From a5bb2dca9de76e0822a8bd257eb8bc4105a52190 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 25 Jun 2024 10:45:30 +0000 Subject: [PATCH 01/98] ADCM-5681 Alternative Audit Implementation via Middleware Changed: 1. Audit in `api_v2.cluster.views` Added: 1. Alternative audit implementation in `audit.alt` --- pyproject.toml | 4 + python/adcm/settings.py | 1 + python/api_v2/cluster/permissions.py | 2 - python/api_v2/cluster/views.py | 27 +- python/api_v2/host/permissions.py | 1 - python/api_v2/host/views.py | 22 +- python/api_v2/tests/base.py | 15 +- .../api_v2/tests/test_audit/test_cluster.py | 11 - python/api_v2/utils/__init__.py | 11 + python/api_v2/utils/audit.py | 149 +++++++++++ python/audit/alt/__init__.py | 11 + python/audit/alt/api.py | 161 ++++++++++++ python/audit/alt/core.py | 200 +++++++++++++++ python/audit/alt/hooks.py | 234 ++++++++++++++++++ python/audit/alt/middleware.py | 70 ++++++ python/audit/alt/object_retrievers.py | 72 ++++++ python/audit/utils.py | 1 + 17 files changed, 959 insertions(+), 33 deletions(-) create mode 100644 python/api_v2/utils/__init__.py create mode 100644 python/api_v2/utils/audit.py create mode 100644 python/audit/alt/__init__.py create mode 100644 python/audit/alt/api.py create mode 100644 python/audit/alt/core.py create mode 100644 python/audit/alt/hooks.py create mode 100644 python/audit/alt/middleware.py create mode 100644 python/audit/alt/object_retrievers.py diff --git a/pyproject.toml b/pyproject.toml index 39586e1b72..2ca62d75e1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -93,6 +93,10 @@ order-by-type = true "python/cm/migrations/*" = ["ARG001", "N806", "N999"] "python/audit/migrations/*" = ["ARG001", "N806", "N999"] "python/rbac/migrations/*" = ["ARG001", "N806", "N999"] +# Hooks must implement specific interface, and it's visually convenient to name them as funcitons. +# ARG001 - interface implementations may not use all arguments required by that interface +# N801 - hooks that inherits from convenient class are also classes, but should be percieved as functions +"python/audit/alt/hooks.py" = ["ARG001", "N801"] [tool.ruff.lint] ignore = [ diff --git a/python/adcm/settings.py b/python/adcm/settings.py index 449e852f22..c7f54db232 100644 --- a/python/adcm/settings.py +++ b/python/adcm/settings.py @@ -102,6 +102,7 @@ "django.contrib.messages.middleware.MessageMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware", "djangorestframework_camel_case.middleware.CamelCaseMiddleWare", + "audit.alt.middleware.AuditMiddleware", ] if not DEBUG: MIDDLEWARE = [*MIDDLEWARE, "csp.middleware.CSPMiddleware"] diff --git a/python/api_v2/cluster/permissions.py b/python/api_v2/cluster/permissions.py index c4b382ec7b..9e78c570c9 100644 --- a/python/api_v2/cluster/permissions.py +++ b/python/api_v2/cluster/permissions.py @@ -10,7 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from audit.utils import audit from rest_framework.permissions import DjangoObjectPermissions @@ -25,7 +24,6 @@ class ClusterPermissions(DjangoObjectPermissions): "DELETE": ["%(app_label)s.delete_%(model_name)s"], } - @audit def has_permission(self, request, view) -> bool: if ( view.action in ["destroy", "update", "partial_update", "ansible_config"] diff --git a/python/api_v2/cluster/views.py b/python/api_v2/cluster/views.py index abe1d30962..67bf97a384 100644 --- a/python/api_v2/cluster/views.py +++ b/python/api_v2/cluster/views.py @@ -18,7 +18,8 @@ check_custom_perm, get_object_for_user, ) -from audit.utils import audit +from audit.alt.api import audit_create, audit_delete, audit_update +from audit.alt.hooks import extract_current_from_response, extract_previous_from_object, only_on_success from cm.api import add_cluster, delete_cluster from cm.errors import AdcmEx from cm.models import ( @@ -73,6 +74,7 @@ from api_v2.component.serializers import ComponentMappingSerializer from api_v2.config.utils import ConfigSchemaMixin from api_v2.host.serializers import HostMappingSerializer +from api_v2.utils.audit import cluster_from_lookup, cluster_from_response, update_cluster_name from api_v2.views import ADCMGenericViewSet, ObjectWithStatusViewMixin @@ -181,7 +183,7 @@ def get_serializer_class(self): description="Creates of a new ADCM cluster.", responses={201: ClusterSerializer, 400: ErrorSerializer, 403: ErrorSerializer, 409: ErrorSerializer}, ) - @audit + @audit_create(name="Cluster created", object_=cluster_from_response) def create(self, request, *args, **kwargs): # noqa: ARG002 serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) @@ -210,7 +212,14 @@ def create(self, request, *args, **kwargs): # noqa: ARG002 409: ErrorSerializer, }, ) - @audit + @( + audit_update(name="Cluster updated", object_=cluster_from_lookup) + .attach_hooks(on_collect=only_on_success(update_cluster_name)) + .track_changes( + before=extract_previous_from_object(Cluster, "name", "description"), + after=extract_current_from_response("name", "description"), + ) + ) def partial_update(self, request, *args, **kwargs): # noqa: ARG002 instance = self.get_object() serializer = self.get_serializer(data=request.data) @@ -235,13 +244,9 @@ def partial_update(self, request, *args, **kwargs): # noqa: ARG002 operation_id="deleteCluster", summary="DELETE cluster", description="Delete a specific ADCM cluster.", - responses={ - 204: None, - 403: ErrorSerializer, - 404: ErrorSerializer, - }, + responses={204: None, 403: ErrorSerializer, 404: ErrorSerializer}, ) - @audit + @audit_delete(name="Cluster deleted", object_=cluster_from_lookup, removed_on_success=True) def destroy(self, request, *args, **kwargs): # noqa: ARG002 cluster = self.get_object() delete_cluster(cluster=cluster) @@ -341,7 +346,7 @@ def hosts_statuses(self, request: Request, *args, **kwargs) -> Response: # noqa 409: ErrorSerializer, }, ) - @audit + @audit_update(name="Host-Component map updated", object_=cluster_from_lookup) @action( methods=["get", "post"], detail=True, @@ -446,7 +451,7 @@ def mapping_components(self, request: Request, *args, **kwargs): # noqa: ARG002 HTTP_409_CONFLICT: ErrorSerializer, }, ) - @audit + @audit_update(name="Ansible configuration updated", object_=cluster_from_lookup) @action(methods=["get", "post"], detail=True, pagination_class=None, filter_backends=[], url_path="ansible-config") def ansible_config(self, request: Request, *args, **kwargs): # noqa: ARG002 cluster = self.get_object() diff --git a/python/api_v2/host/permissions.py b/python/api_v2/host/permissions.py index 3f9d376ab6..da85b88a4b 100644 --- a/python/api_v2/host/permissions.py +++ b/python/api_v2/host/permissions.py @@ -63,7 +63,6 @@ class HostsClusterPermissions(DjangoObjectPermissions): "DELETE": ["%(app_label)s.delete_%(model_name)s"], } - @audit def has_permission(self, request, view) -> bool: if view.action in ["create", "destroy"]: return True diff --git a/python/api_v2/host/views.py b/python/api_v2/host/views.py index fd446fe65f..4200376041 100644 --- a/python/api_v2/host/views.py +++ b/python/api_v2/host/views.py @@ -22,6 +22,8 @@ check_custom_perm, get_object_for_user, ) +from audit.alt.api import audit_update +from audit.alt.hooks import adjust_denied_on_404_result, extract_current_from_response, extract_previous_from_object from audit.utils import audit from cm.api import delete_host, remove_host_from_cluster from cm.errors import AdcmEx @@ -71,6 +73,13 @@ HostUpdateSerializer, ) from api_v2.host.utils import create_host, maintenance_mode, process_config_issues_policies_hc +from api_v2.utils.audit import ( + host_from_lookup, + nested_host_does_exist, + parent_cluster_from_lookup, + set_add_hosts_name, + set_removed_host_name, +) from api_v2.views import ADCMGenericViewSet, ObjectWithStatusViewMixin @@ -427,7 +436,7 @@ def get_queryset(self, *args, **kwargs): # noqa: ARG002 return by_cluster_qs - @audit + @audit_update(name="Hosts added", object_=parent_cluster_from_lookup).attach_hooks(pre_call=set_add_hosts_name) def create(self, request, *_, **kwargs): cluster = get_object_for_user( user=request.user, perms=VIEW_CLUSTER_PERM, klass=Cluster, id=kwargs["cluster_pk"] @@ -473,7 +482,11 @@ def create(self, request, *_, **kwargs): data=HostSerializer(instance=qs_for_added_hosts.first(), context=context).data, ) - @audit + @( + audit_update(name="Host removed", object_=parent_cluster_from_lookup).attach_hooks( + pre_call=set_removed_host_name, on_collect=adjust_denied_on_404_result(objects_exist=nested_host_does_exist) + ) + ) def destroy(self, request, *args, **kwargs): # noqa: ARG002 host = self.get_object() cluster = get_object_for_user(request.user, VIEW_CLUSTER_PERM, Cluster, id=kwargs["cluster_pk"]) @@ -481,7 +494,10 @@ def destroy(self, request, *args, **kwargs): # noqa: ARG002 remove_host_from_cluster(host=host) return Response(status=HTTP_204_NO_CONTENT) - @audit + @audit_update(name="Host updated", object_=host_from_lookup).track_changes( + before=extract_previous_from_object(Host, "maintenance_mode"), + after=extract_current_from_response("maintenance_mode"), + ) @action(methods=["post"], detail=True, url_path="maintenance-mode", permission_classes=[ChangeMMPermissions]) def maintenance_mode(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 return maintenance_mode(request=request, host=self.get_object()) diff --git a/python/api_v2/tests/base.py b/python/api_v2/tests/base.py index b5be79b3bd..2bf1d307c6 100644 --- a/python/api_v2/tests/base.py +++ b/python/api_v2/tests/base.py @@ -117,16 +117,21 @@ def check_last_audit_record( if model is AuditLog: kwargs.setdefault("user__username", "admin") + object_changes = kwargs.pop("object_changes", {}) + + expected_record = model.objects.filter(**kwargs).order_by("pk").last() + self.assertIsNotNone(expected_record, "Can't find audit record") + self.assertEqual(last_audit_record.pk, expected_record.pk, "Expected audit record is not last") + # Object changes are {} for most cases, # we always want to check it, but providing it each time is redundant. # But sometimes structure is too complex for sqlite/ORM to handle, # so we have to check changes separately. + # + # Check is on equality after retrieve for more clear message + # and to avoid object changes filtering if (model is AuditLog) and expect_object_changes_: - kwargs.setdefault("object_changes", {}) - - expected_record = model.objects.filter(**kwargs).order_by("pk").last() - self.assertIsNotNone(expected_record, "Can't find audit record") - self.assertEqual(last_audit_record.pk, expected_record.pk, "Expected audit record is not last") + self.assertDictEqual(expected_record.object_changes, object_changes) return last_audit_record diff --git a/python/api_v2/tests/test_audit/test_cluster.py b/python/api_v2/tests/test_audit/test_cluster.py index 10263d84f8..27ccf848b4 100644 --- a/python/api_v2/tests/test_audit/test_cluster.py +++ b/python/api_v2/tests/test_audit/test_cluster.py @@ -10,7 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from audit.models import AuditObject from cm.models import ( Action, AnsibleConfig, @@ -203,16 +202,6 @@ def test_edit_not_found_fail(self): ) def test_delete_success(self): - # audit object should exist before successful DELETE request - # to have `is_deleted` updated - # for now we've agreed that's ok tradeoff - AuditObject.objects.get_or_create( - object_id=self.cluster_1.pk, - object_name=self.cluster_1.name, - object_type="cluster", - is_deleted=False, - ) - response = self.client.delete( path=reverse(viewname="v2:cluster-detail", kwargs={"pk": self.cluster_1.pk}), ) diff --git a/python/api_v2/utils/__init__.py b/python/api_v2/utils/__init__.py new file mode 100644 index 0000000000..824dd6c8fe --- /dev/null +++ b/python/api_v2/utils/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/python/api_v2/utils/audit.py b/python/api_v2/utils/audit.py new file mode 100644 index 0000000000..147d5d6a16 --- /dev/null +++ b/python/api_v2/utils/audit.py @@ -0,0 +1,149 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from contextlib import suppress +from dataclasses import dataclass +from functools import partial +import json + +from audit.alt.core import AuditedCallArguments, OperationAuditContext, Result +from audit.alt.hooks import AuditHook +from audit.alt.object_retrievers import GeneralAuditObjectRetriever +from audit.models import AuditObject, AuditObjectType +from cm.models import Cluster, Host +from django.db.models import Model +from rest_framework.response import Response + +# object retrievers + + +@dataclass(slots=True) +class ExtractID: + field: str + + def from_response(self, call_arguments: AuditedCallArguments, result: Result | None): # noqa: ARG002 + if not isinstance(result, Response): + return None + + return result.data.get(self.field) + + def from_lookup_kwargs(self, call_arguments: AuditedCallArguments, result: Response | None): # noqa: ARG002 + return call_arguments.get(self.field) + + +@dataclass(slots=True) +class CMAuditObjectCreator: + cm_model: type[Model] + name_field: str = "name" + + def __call__(self, id_: str | int, audit_object_type: AuditObjectType) -> AuditObject | None: + name = self.get_name(id_=id_) + if not name: + return None + + return AuditObject.objects.create(object_id=id_, object_type=audit_object_type, object_name=name) + + def get_name(self, id_: str | int) -> str | None: + return self.cm_model.objects.values_list(self.name_field, flat=True).filter(id=id_).first() + + +create_audit_cluster_object = CMAuditObjectCreator(cm_model=Cluster) +create_audit_host_object = CMAuditObjectCreator(cm_model=Host, name_field="fqdn") + +_extract_cluster_from = partial( + GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.CLUSTER, create_new=create_audit_cluster_object +) +cluster_from_response = _extract_cluster_from(extract_id=ExtractID(field="id").from_response) +cluster_from_lookup = _extract_cluster_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) +parent_cluster_from_lookup = _extract_cluster_from(extract_id=ExtractID(field="cluster_pk").from_lookup_kwargs) + +host_from_lookup = GeneralAuditObjectRetriever( + audit_object_type=AuditObjectType.HOST, + extract_id=ExtractID(field="pk").from_lookup_kwargs, + create_new=create_audit_host_object, +) + + +# hooks + + +def update_cluster_name( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Response | None, + exception: Exception | None, +) -> None: + _ = call_arguments, result, exception + + if not context.object: + return + + instance = context.object + + new_name = Cluster.objects.values_list("name", flat=True).filter(id=instance.object_id).first() + if not new_name: + return + + instance.object_name = new_name + instance.save(update_fields=["object_name"]) + + +# hook helpers / special functions + + +def object_does_exist(hook: AuditHook, model: type[Model], id_field: str = "pk") -> bool: + id_ = hook.call_arguments.get(id_field) + if not id_: + # it's quite a stretch, but I don't see an alternative way for a safe implementation here + return False + + return model.objects.filter(id=id_).exists() + + +def nested_host_does_exist(hook: AuditHook) -> bool: + return object_does_exist(hook=hook, model=Host) + + +# name changers + + +class set_add_hosts_name(AuditHook): # noqa: N801 + def __call__(self): + request = self.call_arguments.get("request", "") + + data = None + # if body was already read without assigning to `request._data`, + # those exceptions won't be enough to silence, + # but if such a problem will occur, it should be addressed more thoughtfully than just suppress + with suppress(AttributeError, json.JSONDecodeError): + data = json.loads(request.body) + + host_fqdn = "" + if isinstance(data, list): + # we may want to consider both naming styles here, but just v2-like camelCase for now + ids = (entry.get("hostId", entry.get("host_id")) for entry in data if isinstance(entry, dict)) + host_fqdn = ", ".join(sorted(Host.objects.filter(id__in=ids).values_list("fqdn", flat=True))) + elif isinstance(data, dict) and (host_id := data.get("hostId", data.get("host_id"))) is not None: + host_fqdn = Host.objects.values_list("fqdn", flat=True).filter(id=host_id).first() or "" + + self.context.name = f"[{host_fqdn}] host(s) added" + + +class set_removed_host_name(AuditHook): # noqa: N801 + def __call__(self): + host_id = self.call_arguments.get("pk") + + if not host_id: + return + + fqdn = Host.objects.values_list("fqdn", flat=True).filter(id=host_id).first() or "" + self.context.name = f"{fqdn} host removed".strip() diff --git a/python/audit/alt/__init__.py b/python/audit/alt/__init__.py new file mode 100644 index 0000000000..824dd6c8fe --- /dev/null +++ b/python/audit/alt/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/python/audit/alt/api.py b/python/audit/alt/api.py new file mode 100644 index 0000000000..96e6270ad7 --- /dev/null +++ b/python/audit/alt/api.py @@ -0,0 +1,161 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC +from dataclasses import dataclass +from functools import lru_cache +from typing import Any, Callable, Iterable, ParamSpec + +from rest_framework.response import Response +from typing_extensions import Self + +from audit.alt.core import AuditHookFunc, Hooks, OperationAuditContext, RetrieveAuditObjectFunc +from audit.alt.hooks import ( + cleanup_changes, + collect_meta, + detect_request_user, + mark_object_as_deleted_on_success, + only_on_success, + retriever_as_hook, + set_api_operation_result, +) +from audit.alt.object_retrievers import ignore_object_search +from audit.models import AuditLogOperationType + +P = ParamSpec("P") + +AUDITED_HTTP_METHODS = frozenset(("POST", "DELETE", "PUT", "PATCH")) + + +class APIOperationAuditContext(OperationAuditContext): + DEFAULT_HOOKS = Hooks(pre_call=(detect_request_user, collect_meta), on_collect=(set_api_operation_result,)) + + +@dataclass(slots=True, frozen=True) +class AuditedEndpointConfig: + operation_type: AuditLogOperationType + operation_name: str + retrieve_object_func: RetrieveAuditObjectFunc + hooks: Hooks + + +class AuditEndpointsRegistry: + """ + Registry of view functions that should be audited. + Used to match caller func and audit configuration in runtime (usually middleware). + + Key extraction is bound to usages, so it may have to be adjusted/extended in the future. + """ + + __slots__ = ("_endpoints",) + + def __init__(self): + self._endpoints: dict[str, AuditedEndpointConfig] = {} + + def register(self, func: Callable, config: AuditedEndpointConfig) -> None: + key = f"{getattr(func, '__module__', '-')}:{func.__qualname__}" + self._endpoints[key] = config + + def find_for_view(self, http_method: str, view_func: Any) -> AuditedEndpointConfig | None: + # view_func is not just simple Callable, it's special func prepared by Django's middleware system. + # __qualname__ of view_func doesn't specify method (because it's View, not API method itself) + method_name = getattr(view_func, "actions", {}).get(http_method.lower(), "") + key = f"{getattr(view_func, '__module__', '-')}:{view_func.__qualname__}.{method_name}".rstrip(".") + return self._endpoints.get(key) + + +@lru_cache(maxsize=1) +def get_endpoints_registry() -> AuditEndpointsRegistry: + return AuditEndpointsRegistry() + + +class GenericAPIAuditDecorator: + """ + Decorator to wrap ViewSet's functions that should be audited. + Adds function to registry and returns function without changes. + Additional hooks may be configured after instantiation. + """ + + def __init__(self, name: str, type_: AuditLogOperationType, object_: RetrieveAuditObjectFunc): + self.operation_type = type_ + self.operation_name = name + self.retrieve_object_func = object_ + self.extra_pre_call_hooks = [] + self.extra_on_collect_hooks = [] + + self._registry = get_endpoints_registry() + + def __call__(self, func: Callable[P, Response]) -> Callable[P, Response]: + endpoint_config = AuditedEndpointConfig( + operation_type=self.operation_type, + operation_name=self.operation_name, + retrieve_object_func=self.retrieve_object_func, + hooks=Hooks(pre_call=tuple(self.extra_pre_call_hooks), on_collect=tuple(self.extra_on_collect_hooks)), + ) + + self._registry.register(func=func, config=endpoint_config) + + return func + + +class TypedAuditDecorator(GenericAPIAuditDecorator, ABC): + OPERATION_TYPE: AuditLogOperationType + + def __init__(self, name: str, object_: RetrieveAuditObjectFunc): + if not getattr(self, "OPERATION_TYPE", None): + message = "OPERATION_TYPE should be specified" + raise ValueError(message) + + super().__init__(name=name, type_=self.OPERATION_TYPE, object_=object_) + + def attach_hooks( + self, + pre_call: AuditHookFunc | Iterable[AuditHookFunc] = (), + on_collect: AuditHookFunc | Iterable[AuditHookFunc] = (), + ) -> Self: + self.extra_pre_call_hooks.extend(pre_call if not callable(pre_call) else (pre_call,)) + self.extra_on_collect_hooks.extend(on_collect if not callable(on_collect) else (on_collect,)) + + return self + + +class audit_create(TypedAuditDecorator): # noqa: N801 + OPERATION_TYPE = AuditLogOperationType.CREATE + + +class audit_update(TypedAuditDecorator): # noqa: N801 + OPERATION_TYPE = AuditLogOperationType.UPDATE + + def track_changes(self, before: AuditHookFunc, after: AuditHookFunc) -> Self: + """Shouldn't be called more than 1 time, isn't adopted for that""" + + self.extra_pre_call_hooks.append(before) + self.extra_on_collect_hooks.append(only_on_success(after)) + self.extra_on_collect_hooks.append(cleanup_changes) + + return self + + +class audit_delete(TypedAuditDecorator): # noqa: N801 + OPERATION_TYPE = AuditLogOperationType.DELETE + + def __init__(self, name: str, object_: RetrieveAuditObjectFunc, removed_on_success: bool = False): + retrieve_func, pre_hooks, collect_hooks = object_, (), () + if removed_on_success: + retrieve_func = ignore_object_search + pre_hooks = (retriever_as_hook(object_),) + collect_hooks = (mark_object_as_deleted_on_success,) + + super().__init__(name=name, object_=retrieve_func) + + self.extra_pre_call_hooks.extend(pre_hooks) + self.extra_on_collect_hooks.extend(collect_hooks) diff --git a/python/audit/alt/core.py b/python/audit/alt/core.py new file mode 100644 index 0000000000..a5c3e762ed --- /dev/null +++ b/python/audit/alt/core.py @@ -0,0 +1,200 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import UserDict +from dataclasses import dataclass, field + +from typing_extensions import Protocol, Self, TypeVar + +from audit.cef_logger import cef_logger as write_cef_log +from audit.models import AuditLog, AuditLogOperationResult, AuditLogOperationType, AuditObject, AuditUser + +Result = TypeVar("Result") + + +class AuditedCallArguments(UserDict): + """ + Simple and dummy implementation for "frozendict" to prevent accidental changes to call arguments + """ + + def __setitem__(self, key, value): + message = "Audit context can't be changed" + raise ValueError(message) + + def set(self, from_dict: dict) -> Self: + """The only "correct" way to assign data to the context dict""" + self.data = from_dict + return self + + +@dataclass(slots=True, frozen=False) +class AuditedCall: + arguments: AuditedCallArguments = field(default_factory=AuditedCallArguments) + result: Result | None = None + exception: Exception | None = None + + +@dataclass(slots=True, frozen=False) +class OperationMeta: + address: str | None = None + agent: str = "" + changes: dict = field(default_factory=dict) + + +@dataclass(slots=True, frozen=True) +class AuditSignature: + id: str + type: AuditLogOperationType + + +class AuditHookFunc(Protocol): + def __call__( + self, + *, + context: "OperationAuditContext", + call_arguments: AuditedCallArguments, + result: Result | None, + exception: Exception | None, + ): + ... + + +class RetrieveAuditObjectFunc(Protocol): + def __call__( + self, + *, + context: "OperationAuditContext", + call_arguments: AuditedCallArguments, + result: Result | None, + exception: Exception | None, + ) -> AuditObject | None: + ... + + +@dataclass(slots=True, frozen=True) +class Hooks: + pre_call: tuple[AuditHookFunc, ...] = () + """ + Thou `pre_call` hooks have `result` and `exception` arguments, + but they should be expected to be `None` always + """ + + on_collect: tuple[AuditHookFunc, ...] = () + + def __add__(self, other: "Hooks") -> Self: + if not isinstance(other, Hooks): + message = f"{other} should be of `Hooks` type" + raise TypeError(message) + + return Hooks(pre_call=(*self.pre_call, *other.pre_call), on_collect=(*self.on_collect, *other.on_collect)) + + +class OperationAuditContext: + """ + Audited operation context that accumulates input arguments and allow basic flow control. + + Actual behavior and attributes changes should be configured via hooks. + DEFAULT_HOOKS have priority over user-provided. + Knowledge of what work is performed on which method calls is advised. + + Also note that: + - Pre-call hooks have `result` and `exception` equal to None + - Thou `retrieve_object` and hooks have very similar API, it isn't expected from hook to return anything, + but `retrieve_object` is expected to return value or None if it can't be returned + - `retrieve_object`'s return value is assigned to `object` attribute unconditionally during `collect` call + - Hooks should be as safe as possible (raise no Exceptions) + """ + + DEFAULT_HOOKS = Hooks() + + name: str + result: AuditLogOperationResult + meta: OperationMeta + + object: AuditObject | None + user: AuditUser | None + + _call: AuditedCall + _retrieve_object: RetrieveAuditObjectFunc + + def __init__( + self, + signature: AuditSignature, + default_name: str, + retrieve_object: RetrieveAuditObjectFunc, + custom_hooks: Hooks, + ): + self._default_name = default_name + self._signature = signature + self._hooks = self.DEFAULT_HOOKS + custom_hooks + self._retrieve_object = retrieve_object + + self.restore_defaults() + + @property + def signature(self) -> AuditSignature: + return self._signature + + def restore_defaults(self) -> Self: + self.name = self._default_name + self.result = AuditLogOperationResult.FAIL + self.object: AuditObject | None = None + self.user: AuditUser | None = None + self.meta = OperationMeta() + self._call = AuditedCall() + return self + + def attach_call_arguments(self, arguments: dict) -> Self: + self._call.arguments.set(from_dict=arguments) + return self + + def attach_result(self, result: Result | None) -> Self: + self._call.result = result + return self + + def attach_exception(self, exception: Exception | None) -> Self: + self._call.exception = exception + return self + + def run_pre_call_hooks(self) -> Self: + for hook in self._hooks.pre_call: + hook(context=self, call_arguments=self._call.arguments, result=None, exception=None) + + return self + + def collect(self) -> Self: + self.object = self._retrieve_object( + context=self, call_arguments=self._call.arguments, result=self._call.result, exception=self._call.exception + ) + + for hook in self._hooks.on_collect: + hook( + context=self, + call_arguments=self._call.arguments, + result=self._call.result, + exception=self._call.exception, + ) + + return self + + def save(self) -> None: + record = AuditLog.objects.create( + audit_object=self.object, + operation_name=self.name, + operation_type=self._signature.type, + operation_result=self.result, + user=self.user, + object_changes=self.meta.changes, + address=self.meta.address, + agent=self.meta.agent, + ) + write_cef_log(audit_instance=record, signature_id=self._signature.id) diff --git a/python/audit/alt/hooks.py b/python/audit/alt/hooks.py new file mode 100644 index 0000000000..c1232e8547 --- /dev/null +++ b/python/audit/alt/hooks.py @@ -0,0 +1,234 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque +from functools import wraps +from typing import Callable + +from django.contrib.auth.models import User as DjangoUser +from django.core.handlers.wsgi import WSGIRequest +from django.db.models import F, Model +from rest_framework.response import Response +from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND + +from audit.alt.core import AuditedCallArguments, AuditHookFunc, OperationAuditContext, Result, RetrieveAuditObjectFunc +from audit.models import AuditLogOperationResult, AuditUser +from audit.utils import get_client_agent, get_client_ip + + +class AuditHook: + """ + Convenience hook implementation to avoid specifying arguments each time. + Accepts all regular arguments the hook will get in constructor, + assigns them to attributes, then calls itself. + + This hook that does nothing if `__call__` isn't re-implemented. + """ + + __slots__ = ("context", "call_arguments", "result", "exception") + + def __init__( + self, + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Result | None, + exception: Exception | None, + ): + self.context = context + self.call_arguments = call_arguments + self.result = result + self.exception = exception + self() + + def __call__(self): + ... + + +# decorators to prepare / enhance / change hook + + +def only_on_success(func: AuditHookFunc) -> AuditHookFunc: + @wraps(func) + def wrapped( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Response | None, + exception: Exception | None, + ): + if context.result != AuditLogOperationResult.SUCCESS: + return None + + return func(context=context, call_arguments=call_arguments, result=result, exception=exception) + + return wrapped + + +def only_on_fail(func: AuditHookFunc) -> AuditHookFunc: + @wraps(func) + def wrapped( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Response | None, + exception: Exception | None, + ): + if context.result == AuditLogOperationResult.SUCCESS: + return None + + return func(context=context, call_arguments=call_arguments, result=result, exception=exception) + + return wrapped + + +def retriever_as_hook(func: RetrieveAuditObjectFunc) -> AuditHookFunc: + """Call hook and assign its return value to object""" + + @wraps(func) + def wrapped( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Response | None, + exception: Exception | None, + ) -> None: + context.object = func(context=context, call_arguments=call_arguments, result=result, exception=exception) + + return wrapped + + +# basic hooks and hook builders + + +class cleanup_changes(AuditHook): + """ + Clean up object changes stored in meta. + Affects `current` and `previous` keys. + """ + + def __call__(self): + changes = self.context.meta.changes + + if not changes: + return + + current = changes.pop("current", {}) + previous = changes.pop("previous", {}) + + if not (current or previous): + return + + keys_to_remove = deque(maxlen=len(current)) + + for key in current: + # Since most extraction functions takes values from request/response, + # "current" (being built from response) may have more keys than "previous" (due to PATCH), + # so we need to remove this keys, because they weren't changed + if key not in previous: + keys_to_remove.append(key) + + elif previous[key] == current[key]: + keys_to_remove.append(key) + previous.pop(key) + + for key in keys_to_remove: + current.pop(key) + + if current and previous: + self.context.meta.changes |= {"previous": previous, "current": current} + + +class detect_request_user(AuditHook): + def __call__(self): + request = self.call_arguments.get("request") + if not hasattr(request, "user"): + return + + if isinstance(request.user, DjangoUser): + self.context.user = AuditUser.objects.filter(username=request.user.username).order_by("-pk").first() + else: + self.context.user = None + + +class collect_meta(AuditHook): + def __call__(self): + request = self.call_arguments.get("request") + if not isinstance(request, WSGIRequest): + return + + self.context.meta.address = get_client_ip(request) + self.context.meta.agent = get_client_agent(request) + + +class set_api_operation_result(AuditHook): + def __call__(self): + # maybe set result will require something like "ensure object exists" on pre or collect hook before it, + # but most likely `context.object` will be filled one way or another and will be enough for this function + + if not isinstance(self.result, Response): + return + + if self.result.status_code < HTTP_400_BAD_REQUEST: + self.context.result = AuditLogOperationResult.SUCCESS + elif self.result.status_code in (HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN) or ( + self.result.status_code == HTTP_404_NOT_FOUND and self.context.object and not self.context.object.is_deleted + ): + self.context.result = AuditLogOperationResult.DENIED + else: + self.context.result = AuditLogOperationResult.FAIL + + +def adjust_denied_on_404_result(objects_exist: Callable[[AuditHook], bool]) -> AuditHookFunc: + class HookImpl(set_api_operation_result): + def __call__(self): + if self.context.result == AuditLogOperationResult.DENIED and not objects_exist(self): + self.context.result = AuditLogOperationResult.FAIL + + return HookImpl + + +@only_on_success +class mark_object_as_deleted_on_success(AuditHook): + def __call__(self): + if not self.context.object: + return + + self.context.object.is_deleted = True + self.context.object.save(update_fields=["is_deleted"]) + + +# hook builders + + +def extract_previous_from_object( + model: type[Model], *fields: str, id_field_: str = "pk", **named_fields: F +) -> AuditHookFunc: + class HookImpl(AuditHook): + def __call__(self): + id_ = self.call_arguments.get(id_field_) + if not id_: + return + + self.context.meta.changes["previous"] = ( + model.objects.values(*fields, **named_fields).filter(id=id_).first() or {} + ) + + return HookImpl + + +def extract_current_from_response(*fields: str) -> AuditHookFunc: + class HookImpl(AuditHook): + def __call__(self): + if not isinstance(self.result, Response): + return + + data = self.result.data + self.context.meta.changes["current"] = {field: data[field] for field in fields if field in data} + + return HookImpl diff --git a/python/audit/alt/middleware.py b/python/audit/alt/middleware.py new file mode 100644 index 0000000000..1b7fa79f66 --- /dev/null +++ b/python/audit/alt/middleware.py @@ -0,0 +1,70 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.urls import resolve + +from audit.alt.api import ( + AUDITED_HTTP_METHODS, + APIOperationAuditContext, + AuditedEndpointConfig, + AuditEndpointsRegistry, + get_endpoints_registry, +) +from audit.alt.core import AuditSignature + + +class AuditMiddleware: + """ + Audit controller functions that are registered in AuditEndpointsRegistry. + + Since `process_view` is executed previously to actual response handling, + it will handle pre-request hooks and audit context preparation. + + Sync only (see `skip_audit` and `current_audit_context` usage). + """ + + def __init__(self, get_response): + self.get_response = get_response + + self.audited_endpoints_registry: AuditEndpointsRegistry = get_endpoints_registry() + + self.skip_audit = False + self.current_audit_context: APIOperationAuditContext | None = None + + def __call__(self, request): + self.current_audit_context = None + self.skip_audit = request.method not in AUDITED_HTTP_METHODS + + response = self.get_response(request) + if self.skip_audit or self.current_audit_context is None: + return response + + self.current_audit_context.attach_result(result=response).collect().save() + + return response + + def process_view(self, request, view_func, view_args, view_kwargs): # noqa: ARG002 + endpoint_config: AuditedEndpointConfig | None = self.audited_endpoints_registry.find_for_view( + http_method=request.method, view_func=view_func + ) + if not endpoint_config: + return + + signature = AuditSignature(id=resolve(request.path).route, type=endpoint_config.operation_type) + self.current_audit_context = APIOperationAuditContext( + signature=signature, + default_name=endpoint_config.operation_name, + retrieve_object=endpoint_config.retrieve_object_func, + custom_hooks=endpoint_config.hooks, + ) + self.current_audit_context.attach_call_arguments(arguments=view_kwargs | {"request": request}) + self.current_audit_context.run_pre_call_hooks() diff --git a/python/audit/alt/object_retrievers.py b/python/audit/alt/object_retrievers.py new file mode 100644 index 0000000000..bb94d9f084 --- /dev/null +++ b/python/audit/alt/object_retrievers.py @@ -0,0 +1,72 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass +from typing import Callable, Protocol + +from rest_framework.response import Response + +from audit.alt.core import AuditedCallArguments, OperationAuditContext, Result +from audit.models import AuditObject, AuditObjectType + + +def ignore_object_search( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Response | None, + exception: Exception | None, +) -> AuditObject | None: + """Do not attempt to search for object (e.g. object will be deleted after request is finished)""" + _ = call_arguments, result, exception + + return context.object + + +class ExtractAuditObjectIDFunc(Protocol): + def __call__(self, call_arguments: AuditedCallArguments, result: Result | None) -> str | int | None: + ... + + +@dataclass(slots=True) +class GeneralAuditObjectRetriever: + """ + Unification of object retrieval process: + 1. Try to get id + 2. Try to retrieve audit object by this ID and audit object type + 3. On retrieval fail, call create function (it is expected to return Audit Object if it can be created) + """ + + audit_object_type: AuditObjectType + + extract_id: ExtractAuditObjectIDFunc + create_new: Callable[[str, AuditObjectType], AuditObject | None] + + is_deleted: bool = False + + def __call__( + self, + context: "OperationAuditContext", # noqa: ARG002 + call_arguments: AuditedCallArguments, + result: Result | None, + exception: Exception | None, # noqa: ARG002 + ) -> AuditObject | None: + id_ = self.extract_id(call_arguments=call_arguments, result=result) + if not id_: + return None + + audit_object = AuditObject.objects.filter( + object_id=id_, object_type=self.audit_object_type, is_deleted=self.is_deleted + ).first() + if audit_object: + return audit_object + + return self.create_new(id_, self.audit_object_type) diff --git a/python/audit/utils.py b/python/audit/utils.py index 70d358a880..1cb96ca6eb 100644 --- a/python/audit/utils.py +++ b/python/audit/utils.py @@ -520,6 +520,7 @@ def wrapped(*args, **kwargs): return res # Correctly finished request (when will be `bool(res) is False`?) + # - when has_permission is decorated and it returns False status_code = res.status_code if res else HTTP_403_FORBIDDEN except (AdcmEx, ValidationError, Http404, NotFound) as exc: error = exc From 75804b2c1c63d0dae0b9639087e817f419b557cf Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 25 Jun 2024 13:48:47 +0000 Subject: [PATCH 02/98] ADCM-5694 & ADCM-5695 Move imports/cluster host views to corresponding modules and audit them --- python/api_v2/api_schema.py | 41 +- python/api_v2/cluster/permissions.py | 18 + python/api_v2/cluster/serializers.py | 18 + python/api_v2/cluster/urls.py | 7 +- python/api_v2/cluster/views.py | 434 +++++++++++++----- .../api_v2/{imports => generic}/__init__.py | 0 python/api_v2/generic/imports/__init__.py | 11 + .../{ => generic}/imports/serializers.py | 0 python/api_v2/{ => generic}/imports/types.py | 0 python/api_v2/{ => generic}/imports/utils.py | 2 +- python/api_v2/generic/imports/views.py | 84 ++++ python/api_v2/host/filters.py | 10 +- python/api_v2/host/permissions.py | 18 - python/api_v2/host/serializers.py | 20 +- python/api_v2/host/views.py | 261 +---------- python/api_v2/imports/views.py | 9 +- python/api_v2/service/views.py | 118 +++-- python/api_v2/utils/audit.py | 22 +- python/audit/alt/api.py | 51 +- python/audit/alt/middleware.py | 8 +- 20 files changed, 610 insertions(+), 522 deletions(-) rename python/api_v2/{imports => generic}/__init__.py (100%) create mode 100644 python/api_v2/generic/imports/__init__.py rename python/api_v2/{ => generic}/imports/serializers.py (100%) rename python/api_v2/{ => generic}/imports/types.py (100%) rename python/api_v2/{ => generic}/imports/utils.py (99%) create mode 100644 python/api_v2/generic/imports/views.py diff --git a/python/api_v2/api_schema.py b/python/api_v2/api_schema.py index e647ab540c..384a30b0b3 100644 --- a/python/api_v2/api_schema.py +++ b/python/api_v2/api_schema.py @@ -10,10 +10,20 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Iterable, TypeAlias + from adcm.serializers import EmptySerializer +from cm.models import ADCMEntityStatus from drf_spectacular.utils import OpenApiParameter from rest_framework.fields import CharField -from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT +from rest_framework.serializers import Serializer +from rest_framework.status import ( + HTTP_200_OK, + HTTP_400_BAD_REQUEST, + HTTP_403_FORBIDDEN, + HTTP_404_NOT_FOUND, + HTTP_409_CONFLICT, +) class ErrorSerializer(EmptySerializer): @@ -26,6 +36,17 @@ class ErrorSerializer(EmptySerializer): DOCS_CLIENT_INPUT_ERROR_RESPONSES = {HTTP_400_BAD_REQUEST: ErrorSerializer, HTTP_409_CONFLICT: ErrorSerializer} +def status_param(required: bool) -> OpenApiParameter: + return OpenApiParameter( + name="status", + required=required, + location=OpenApiParameter.QUERY, + description="Case insensitive and partial filter by status.", + enum=ADCMEntityStatus.values, + type=str, + ) + + class DefaultParams: LIMIT = OpenApiParameter(name="limit", description="Number of records included in the selection.", type=int) OFFSET = OpenApiParameter(name="offset", description="Record number from which the selection starts.", type=int) @@ -37,6 +58,9 @@ class DefaultParams: type=str, ) + STATUS_REQUIRED = status_param(required=True) + STATUS_OPTIONAL = status_param(required=False) + @classmethod def ordering_by(cls, *values: str | tuple[str, str], **kwargs: str | bool | type) -> OpenApiParameter: return OpenApiParameter( @@ -44,3 +68,18 @@ def ordering_by(cls, *values: str | tuple[str, str], **kwargs: str | bool | type enum=values, **{attr: getattr(cls.ORDERING, attr) for attr in ("name", "required", "description", "type")} | kwargs, ) + + +ResponseOKType: TypeAlias = Serializer | type[Serializer] | type[dict] | type[list] | None + + +def responses( + success: ResponseOKType | tuple[int, ResponseOKType], errors: Iterable[int] | int +) -> dict[int, Serializer]: + if not isinstance(success, tuple): + success = (HTTP_200_OK, success) + + if isinstance(errors, int): + errors = (errors,) + + return {success[0]: success[1]} | {status: ErrorSerializer for status in errors} diff --git a/python/api_v2/cluster/permissions.py b/python/api_v2/cluster/permissions.py index 9e78c570c9..e35503ed77 100644 --- a/python/api_v2/cluster/permissions.py +++ b/python/api_v2/cluster/permissions.py @@ -43,3 +43,21 @@ def has_object_permission(self, request, view, obj) -> bool: self.perms_map["POST"] = ["%(app_label)s.add_%(model_name)s"] return super().has_object_permission(request=request, view=view, obj=obj) + + +class HostsClusterPermissions(DjangoObjectPermissions): + perms_map = { + "GET": [], + "OPTIONS": [], + "HEAD": [], + "POST": [], + "PUT": ["%(app_label)s.change_%(model_name)s"], + "PATCH": ["%(app_label)s.change_%(model_name)s"], + "DELETE": ["%(app_label)s.delete_%(model_name)s"], + } + + def has_permission(self, request, view) -> bool: + if view.action in ["create", "destroy"]: + return True + + return super().has_permission(request=request, view=view) diff --git a/python/api_v2/cluster/serializers.py b/python/api_v2/cluster/serializers.py index 146c75edf7..f5e067dd51 100644 --- a/python/api_v2/cluster/serializers.py +++ b/python/api_v2/cluster/serializers.py @@ -248,3 +248,21 @@ def to_representation(self, instance: AnsibleConfig) -> dict: data["config"]["defaults"]["forks"] = int(data["config"]["defaults"]["forks"]) return data + + +class RelatedHostComponentsStatusSerializer(WithStatusSerializer): + id = IntegerField(source="component.id") + name = CharField(source="component.name") + display_name = CharField(source="component.display_name") + + class Meta: + model = HostComponent + fields = ["id", "name", "display_name", "status"] + + +class ClusterHostStatusSerializer(EmptySerializer): + host_components = RelatedHostComponentsStatusSerializer(many=True, source="hostcomponent_set") + + class Meta: + model = Host + fields = ["host_components"] diff --git a/python/api_v2/cluster/urls.py b/python/api_v2/cluster/urls.py index 869a65eee5..84ef9f30e0 100644 --- a/python/api_v2/cluster/urls.py +++ b/python/api_v2/cluster/urls.py @@ -21,13 +21,12 @@ ActionHostGroupViewSet, HostActionHostGroupViewSet, ) -from api_v2.cluster.views import ClusterViewSet +from api_v2.cluster.views import ClusterImportViewSet, ClusterViewSet, HostClusterViewSet from api_v2.component.views import ComponentViewSet, HostComponentViewSet from api_v2.config.views import ConfigLogViewSet from api_v2.group_config.views import GroupConfigViewSet -from api_v2.host.views import HostClusterViewSet, HostGroupConfigViewSet -from api_v2.imports.views import ClusterImportViewSet, ServiceImportViewSet -from api_v2.service.views import ServiceViewSet +from api_v2.host.views import HostGroupConfigViewSet +from api_v2.service.views import ServiceImportViewSet, ServiceViewSet from api_v2.upgrade.views import UpgradeViewSet CLUSTER_PREFIX = "" diff --git a/python/api_v2/cluster/views.py b/python/api_v2/cluster/views.py index 67bf97a384..83e400eebe 100644 --- a/python/api_v2/cluster/views.py +++ b/python/api_v2/cluster/views.py @@ -10,17 +10,26 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Any, Collection + from adcm.permissions import ( VIEW_CLUSTER_PERM, VIEW_HC_PERM, VIEW_HOST_PERM, + VIEW_IMPORT_PERM, VIEW_SERVICE_PERM, + ChangeMMPermissions, check_custom_perm, get_object_for_user, ) -from audit.alt.api import audit_create, audit_delete, audit_update -from audit.alt.hooks import extract_current_from_response, extract_previous_from_object, only_on_success -from cm.api import add_cluster, delete_cluster +from audit.alt.api import audit_create, audit_delete, audit_update, audit_view +from audit.alt.hooks import ( + adjust_denied_on_404_result, + extract_current_from_response, + extract_previous_from_object, + only_on_success, +) +from cm.api import add_cluster, delete_cluster, remove_host_from_cluster from cm.errors import AdcmEx from cm.models import ( AnsibleConfig, @@ -33,6 +42,9 @@ Prototype, ServiceComponent, ) +from cm.services.cluster import perform_host_to_cluster_map +from cm.services.status import notify +from core.cluster.errors import HostAlreadyBoundError, HostBelongsToAnotherClusterError, HostDoesNotExistError from django.contrib.contenttypes.models import ContentType from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view from guardian.mixins import PermissionListMixin @@ -51,17 +63,18 @@ HTTP_409_CONFLICT, ) -from api_v2.api_schema import ErrorSerializer +from api_v2.api_schema import DefaultParams, responses from api_v2.cluster.filters import ( ClusterFilter, ClusterHostFilter, ClusterServiceFilter, ) -from api_v2.cluster.permissions import ClusterPermissions +from api_v2.cluster.permissions import ClusterPermissions, HostsClusterPermissions from api_v2.cluster.serializers import ( AnsibleConfigRetrieveSerializer, AnsibleConfigUpdateSerializer, ClusterCreateSerializer, + ClusterHostStatusSerializer, ClusterSerializer, ClusterUpdateSerializer, MappingSerializer, @@ -73,12 +86,39 @@ from api_v2.cluster.utils import retrieve_mapping_data, save_mapping from api_v2.component.serializers import ComponentMappingSerializer from api_v2.config.utils import ConfigSchemaMixin -from api_v2.host.serializers import HostMappingSerializer -from api_v2.utils.audit import cluster_from_lookup, cluster_from_response, update_cluster_name +from api_v2.generic.imports.serializers import ImportPostSerializer, ImportSerializer +from api_v2.generic.imports.views import ImportViewSet +from api_v2.host.filters import HostMemberFilter +from api_v2.host.serializers import ( + HostAddSerializer, + HostChangeMaintenanceModeSerializer, + HostMappingSerializer, + HostSerializer, +) +from api_v2.host.utils import maintenance_mode +from api_v2.utils.audit import ( + cluster_from_lookup, + cluster_from_response, + host_from_lookup, + nested_host_does_exist, + parent_cluster_from_lookup, + set_add_hosts_name, + set_removed_host_name, + update_cluster_name, +) from api_v2.views import ADCMGenericViewSet, ObjectWithStatusViewMixin @extend_schema_view( + create=extend_schema( + operation_id="postCluster", + summary="POST cluster", + description="Creates of a new ADCM cluster.", + responses=responses( + success=(HTTP_201_CREATED, ClusterSerializer), + errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_409_CONFLICT), + ), + ), list=extend_schema( summary="GET clusters", description="Get a list of ADCM clusters with information on them.", @@ -88,54 +128,67 @@ summary="GET cluster", description="Get information about a specific cluster.", operation_id="getCluster", - responses={ - 200: ClusterSerializer, - 404: ErrorSerializer, - }, + responses=responses(success=ClusterSerializer, errors=HTTP_404_NOT_FOUND), + ), + partial_update=extend_schema( + operation_id="patchCluster", + summary="PATCH cluster", + description="Change cluster name.", + responses=responses( + success=ClusterSerializer, + errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT), + ), + ), + destroy=extend_schema( + operation_id="deleteCluster", + summary="DELETE cluster", + description="Delete a specific ADCM cluster.", + responses=responses(success=None, errors=(HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND)), ), services_statuses=extend_schema( operation_id="getClusterServiceStatuses", summary="GET cluster service statuses", description="Get information about cluster service statuses.", - responses={200: RelatedServicesStatusesSerializer, 404: ErrorSerializer}, - parameters=[ - OpenApiParameter( - name="status", - required=True, - location=OpenApiParameter.QUERY, - description="Case insensitive and partial filter by status.", - type=str, - ), - OpenApiParameter( - name="clusterId", - required=True, - location=OpenApiParameter.PATH, - description="Cluster id.", - type=int, - ), - ], + responses=responses(success=RelatedServicesStatusesSerializer, errors=HTTP_404_NOT_FOUND), + parameters=[DefaultParams.STATUS_REQUIRED], + ), + service_prototypes=extend_schema( + operation_id="getServicePrototypes", + summary="GET service prototypes", + description="Get service prototypes that is related to this cluster.", + responses=responses(success=ServicePrototypeSerializer(many=True), errors=HTTP_404_NOT_FOUND), + ), + service_candidates=extend_schema( + operation_id="getServiceCandidates", + summary="GET service candidates", + description="Get service prototypes that can be added to this cluster.", + responses=responses(success=ServicePrototypeSerializer(many=True), errors=HTTP_404_NOT_FOUND), ), hosts_statuses=extend_schema( operation_id="getClusterHostStatuses", summary="Get information about cluster host statuses.", description="Get information about cluster service statuses.", - responses={200: RelatedServicesStatusesSerializer, 403: ErrorSerializer, 404: ErrorSerializer}, - parameters=[ - OpenApiParameter( - name="status", - required=True, - location=OpenApiParameter.QUERY, - description="Case insensitive and partial filter by status.", - type=str, - ), - OpenApiParameter( - name="clusterId", - required=True, - location=OpenApiParameter.PATH, - description="Cluster id.", - type=int, - ), - ], + responses=responses(success=RelatedServicesStatusesSerializer, errors=(HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND)), + parameters=[DefaultParams.STATUS_REQUIRED], + ), + mapping_hosts=extend_schema( + operation_id="getMappingHosts", + summary="GET mapping hosts", + description="Get a list of hosts to map.", + responses=responses(success=HostMappingSerializer(many=True), errors=HTTP_404_NOT_FOUND), + ), + mapping_components=extend_schema( + operation_id="getMappingComponents", + summary="GET mapping components", + description="Get a list of components to map.", + responses=responses(success=ComponentMappingSerializer, errors=HTTP_404_NOT_FOUND), + ), + ansible_config_schema=extend_schema( + methods=["get"], + operation_id="getClusterAnsibleConfigs", + summary="GET cluster ansible configuration", + description="Get information about cluster ansible config.", + responses=responses(success=dict, errors=HTTP_404_NOT_FOUND), ), ) class ClusterViewSet( @@ -177,12 +230,6 @@ def get_serializer_class(self): case _: return ClusterSerializer - @extend_schema( - operation_id="postCluster", - summary="POST cluster", - description="Creates of a new ADCM cluster.", - responses={201: ClusterSerializer, 400: ErrorSerializer, 403: ErrorSerializer, 409: ErrorSerializer}, - ) @audit_create(name="Cluster created", object_=cluster_from_response) def create(self, request, *args, **kwargs): # noqa: ARG002 serializer = self.get_serializer(data=request.data) @@ -200,18 +247,6 @@ def create(self, request, *args, **kwargs): # noqa: ARG002 data=ClusterSerializer(cluster, context=self.get_serializer_context()).data, status=HTTP_201_CREATED ) - @extend_schema( - operation_id="patchCluster", - summary="PATCH cluster", - description="Change cluster name.", - responses={ - 200: ClusterSerializer, - 400: ErrorSerializer, - 403: ErrorSerializer, - 404: ErrorSerializer, - 409: ErrorSerializer, - }, - ) @( audit_update(name="Cluster updated", object_=cluster_from_lookup) .attach_hooks(on_collect=only_on_success(update_cluster_name)) @@ -240,12 +275,6 @@ def partial_update(self, request, *args, **kwargs): # noqa: ARG002 status=HTTP_200_OK, data=ClusterSerializer(instance, context=self.get_serializer_context()).data ) - @extend_schema( - operation_id="deleteCluster", - summary="DELETE cluster", - description="Delete a specific ADCM cluster.", - responses={204: None, 403: ErrorSerializer, 404: ErrorSerializer}, - ) @audit_delete(name="Cluster deleted", object_=cluster_from_lookup, removed_on_success=True) def destroy(self, request, *args, **kwargs): # noqa: ARG002 cluster = self.get_object() @@ -253,12 +282,6 @@ def destroy(self, request, *args, **kwargs): # noqa: ARG002 return Response(status=HTTP_204_NO_CONTENT) - @extend_schema( - operation_id="getServicePrototypes", - summary="GET service prototypes", - description="Get service prototypes that is related to this cluster.", - responses={200: ServicePrototypeSerializer(many=True), 404: ErrorSerializer}, - ) @action(methods=["get"], detail=True, url_path="service-prototypes", pagination_class=None) def service_prototypes(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 cluster = self.get_object() @@ -269,12 +292,6 @@ def service_prototypes(self, request: Request, *args, **kwargs) -> Response: # return Response(data=serializer.data) - @extend_schema( - operation_id="getServiceCandidates", - summary="GET service candidates", - description="Get service prototypes that can be added to this cluster.", - responses={200: ServicePrototypeSerializer(many=True), 404: ErrorSerializer}, - ) @action(methods=["get"], detail=True, url_path="service-candidates", pagination_class=None) def service_candidates(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 cluster = self.get_object() @@ -330,7 +347,7 @@ def hosts_statuses(self, request: Request, *args, **kwargs) -> Response: # noqa operation_id="getHostComponentMapping", summary="GET host component mapping", description="Get information about host and component mapping.", - responses={200: MappingSerializer(many=True), 403: ErrorSerializer, 404: ErrorSerializer}, + responses=responses(success=MappingSerializer(many=True), errors=(HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND)), ) @extend_schema( methods=["post"], @@ -338,13 +355,10 @@ def hosts_statuses(self, request: Request, *args, **kwargs) -> Response: # noqa summary="POST host component mapping", description="Save host and component mapping information.", request=SetMappingSerializer(many=True), - responses={ - 201: MappingSerializer(many=True), - 400: ErrorSerializer, - 403: ErrorSerializer, - 404: ErrorSerializer, - 409: ErrorSerializer, - }, + responses=responses( + success=(HTTP_201_CREATED, MappingSerializer(many=True)), + errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT), + ), ) @audit_update(name="Host-Component map updated", object_=cluster_from_lookup) @action( @@ -384,12 +398,6 @@ def mapping(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG00 return Response(data=self.get_serializer(instance=new_mapping, many=True).data, status=HTTP_201_CREATED) - @extend_schema( - operation_id="getMappingHosts", - summary="GET mapping hosts", - description="Get a list of hosts to map.", - responses={200: HostMappingSerializer(many=True), 404: ErrorSerializer}, - ) @action( methods=["get"], pagination_class=None, @@ -404,12 +412,6 @@ def mapping_hosts(self, request: Request, *args, **kwargs) -> Response: # noqa: return Response(status=HTTP_200_OK, data=serializer.data) - @extend_schema( - operation_id="getMappingComponents", - summary="GET mapping components", - description="Get a list of components to map.", - responses={200: ComponentMappingSerializer, 404: ErrorSerializer}, - ) @action( methods=["get"], detail=True, @@ -431,11 +433,7 @@ def mapping_components(self, request: Request, *args, **kwargs): # noqa: ARG002 operation_id="getClusterAnsibleConfigs", summary="GET cluster ansible configuration", description="Get information about cluster ansible config.", - responses={ - HTTP_200_OK: AnsibleConfigRetrieveSerializer, - HTTP_403_FORBIDDEN: ErrorSerializer, - HTTP_404_NOT_FOUND: ErrorSerializer, - }, + responses=responses(success=AnsibleConfigRetrieveSerializer, errors=(HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND)), ) @extend_schema( methods=["post"], @@ -443,13 +441,10 @@ def mapping_components(self, request: Request, *args, **kwargs): # noqa: ARG002 summary="POST cluster ansible config", description="Create ansible configuration.", request=AnsibleConfigUpdateSerializer, - responses={ - HTTP_201_CREATED: AnsibleConfigRetrieveSerializer, - HTTP_400_BAD_REQUEST: ErrorSerializer, - HTTP_403_FORBIDDEN: ErrorSerializer, - HTTP_404_NOT_FOUND: ErrorSerializer, - HTTP_409_CONFLICT: ErrorSerializer, - }, + responses=responses( + success=(HTTP_201_CREATED, AnsibleConfigRetrieveSerializer), + errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT), + ), ) @audit_update(name="Ansible configuration updated", object_=cluster_from_lookup) @action(methods=["get", "post"], detail=True, pagination_class=None, filter_backends=[], url_path="ansible-config") @@ -479,16 +474,6 @@ def ansible_config(self, request: Request, *args, **kwargs): # noqa: ARG002 return Response(status=HTTP_201_CREATED, data=AnsibleConfigRetrieveSerializer(instance=ansible_config).data) - @extend_schema( - methods=["get"], - operation_id="getClusterAnsibleConfigs", - summary="GET cluster ansible configuration", - description="Get information about cluster ansible config.", - responses={ - HTTP_200_OK: dict, - HTTP_404_NOT_FOUND: ErrorSerializer, - }, - ) @action(methods=["get"], detail=True, pagination_class=None, filter_backends=[], url_path="ansible-config-schema") def ansible_config_schema(self, request: Request, *args, **kwargs): # noqa: ARG002 adcm_meta_part = { @@ -537,3 +522,206 @@ def ansible_config_schema(self, request: Request, *args, **kwargs): # noqa: ARG } return Response(status=HTTP_200_OK, data=schema) + + +@extend_schema_view( + list=extend_schema( + operation_id="getClusterHosts", + description="Get a list of all cluster hosts.", + summary="GET cluster hosts", + parameters=[ + OpenApiParameter(name="name", description="Case insensitive and partial filter by host name."), + OpenApiParameter(name="componentId", description="Id of component."), + DefaultParams.LIMIT, + DefaultParams.OFFSET, + DefaultParams.ordering_by("name", "id", default="name"), + OpenApiParameter(name="search", exclude=True), + ], + responses=responses(success=HostSerializer, errors=HTTP_404_NOT_FOUND), + ), + create=extend_schema( + operation_id="postCusterHosts", + description="Add a new hosts to cluster.", + summary="POST cluster hosts", + request=HostAddSerializer(many=True), + responses=responses( + success=(HTTP_201_CREATED, HostSerializer(many=True)), + errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT), + ), + ), + retrieve=extend_schema( + operation_id="getClusterHost", + description="Get information about a specific cluster host.", + summary="GET cluster host", + responses=responses(success=HostSerializer, errors=(HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND)), + ), + destroy=extend_schema( + operation_id="deleteClusterHost", + description="Unlink host from cluster.", + summary="DELETE cluster host", + responses=responses( + success=(HTTP_204_NO_CONTENT, None), errors=(HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT) + ), + ), + maintenance_mode=extend_schema( + operation_id="postClusterHostMaintenanceMode", + description="Turn on/off maintenance mode on the cluster host.", + summary="POST cluster host maintenance-mode", + responses=responses( + success=HostChangeMaintenanceModeSerializer, + errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT), + ), + ), + statuses=extend_schema( + operation_id="getHostStatuses", + description="Get information about cluster host status.", + summary="GET host status", + responses=responses(success=ClusterHostStatusSerializer, errors=(HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND)), + ), +) +class HostClusterViewSet( + PermissionListMixin, ObjectWithStatusViewMixin, RetrieveModelMixin, ListModelMixin, ADCMGenericViewSet +): + permission_required = [VIEW_HOST_PERM] + permission_classes = [HostsClusterPermissions] + # have to define it here for `ObjectWithStatusViewMixin` to be able to determine model related to view + # don't use it directly, use `get_queryset` + queryset = ( + Host.objects.select_related("cluster", "cluster__prototype", "provider", "prototype") + .prefetch_related("concerns", "hostcomponent_set__component__prototype") + .order_by("fqdn") + ) + filterset_class = HostMemberFilter + audit_model_hint = Host + retrieve_status_map_actions = ("list", "statuses") + exc_conversion_map = { + HostDoesNotExistError: AdcmEx("BAD_REQUEST", "At least one host does not exist."), + HostAlreadyBoundError: AdcmEx("HOST_CONFLICT", "At least one host is already associated with this cluster."), + HostBelongsToAnotherClusterError: AdcmEx( + "FOREIGN_HOST", "At least one host is already linked to another cluster." + ), + } + + def get_serializer_class(self): + if self.action == "maintenance_mode": + return HostChangeMaintenanceModeSerializer + + if self.action == "create": + return HostAddSerializer + + return HostSerializer + + def get_queryset(self, *_, **__): + cluster = get_object_for_user( + user=self.request.user, perms=VIEW_CLUSTER_PERM, klass=Cluster, id=self.kwargs["cluster_pk"] + ) + + by_cluster_qs = ( + get_objects_for_user(**self.get_get_objects_for_user_kwargs(self.queryset)) + .filter(cluster=cluster) + .order_by("fqdn") + ) + + if self.action == "statuses": + return by_cluster_qs.prefetch_related("hostcomponent_set__component__prototype") + + return by_cluster_qs + + def handle_exception(self, exc: Any): + return super().handle_exception(self.exc_conversion_map.get(exc.__class__, exc)) + + @audit_update(name="Hosts added", object_=parent_cluster_from_lookup).attach_hooks(pre_call=set_add_hosts_name) + def create(self, request, *_, **kwargs): + cluster = get_object_for_user( + user=request.user, perms=VIEW_CLUSTER_PERM, klass=Cluster, id=kwargs["cluster_pk"] + ) + + check_custom_perm(request.user, "map_host_to", "cluster", cluster) + + multiple_hosts = isinstance(request.data, list) + + serializer = self.get_serializer(data=request.data, many=multiple_hosts) + serializer.is_valid(raise_exception=True) + + added_hosts: Collection[int] = perform_host_to_cluster_map( + cluster_id=cluster.pk, + hosts=[ + entry["host_id"] + for entry in (serializer.validated_data if multiple_hosts else [serializer.validated_data]) + ], + status_service=notify, + ) + + qs_for_added_hosts = self.get_queryset().filter(id__in=added_hosts) + return Response( + status=HTTP_201_CREATED, + data=HostSerializer( + instance=qs_for_added_hosts if multiple_hosts else qs_for_added_hosts.first(), + many=multiple_hosts, + context=self.get_serializer_context(), + ).data, + ) + + @( + audit_update(name="Host removed", object_=parent_cluster_from_lookup).attach_hooks( + pre_call=set_removed_host_name, on_collect=adjust_denied_on_404_result(objects_exist=nested_host_does_exist) + ) + ) + def destroy(self, request, *args, **kwargs): # noqa: ARG002 + host = self.get_object() + cluster = get_object_for_user(request.user, VIEW_CLUSTER_PERM, Cluster, id=kwargs["cluster_pk"]) + check_custom_perm(request.user, "unmap_host_from", "cluster", cluster) + remove_host_from_cluster(host=host) + return Response(status=HTTP_204_NO_CONTENT) + + @audit_update(name="Host updated", object_=host_from_lookup).track_changes( + before=extract_previous_from_object(Host, "maintenance_mode"), + after=extract_current_from_response("maintenance_mode"), + ) + @action(methods=["post"], detail=True, url_path="maintenance-mode", permission_classes=[ChangeMMPermissions]) + def maintenance_mode(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 + return maintenance_mode(request=request, host=self.get_object()) + + @action(methods=["get"], detail=True, url_path="statuses") + def statuses(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 + host = self.get_object() + cluster = get_object_for_user(request.user, VIEW_CLUSTER_PERM, Cluster, id=kwargs["cluster_pk"]) + if host.cluster != cluster: + raise AdcmEx(code="FOREIGN_HOST", msg=f"Host #{host.id} doesn't belong to cluster #{cluster.id}") + + return Response( + data=ClusterHostStatusSerializer( + instance=Host.objects.prefetch_related("hostcomponent_set__component__prototype").get(id=host.id), + context=self.get_serializer_context(), + ).data + ) + + +@extend_schema_view( + list=extend_schema( + operation_id="getClusterImports", + description="Get information about cluster imports.", + summary="GET cluster imports", + parameters=[DefaultParams.LIMIT, DefaultParams.OFFSET], + responses=responses(success=ImportSerializer(many=True), errors=HTTP_403_FORBIDDEN), + ), + create=extend_schema( + operation_id="postClusterImports", + description="Import data.", + summary="POST cluster imports", + responses=responses( + success=(HTTP_201_CREATED, ImportPostSerializer), + errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT), + ), + ), +) +@audit_view(create=audit_update(name="Cluster import updated", object_=parent_cluster_from_lookup)) +class ClusterImportViewSet(ImportViewSet): + def detect_get_check_kwargs(self) -> tuple[dict, dict]: + return ( + {"perms": VIEW_CLUSTER_PERM, "klass": Cluster, "id": self.kwargs["cluster_pk"]}, + {"action_type": VIEW_IMPORT_PERM, "model": Cluster.__name__.lower()}, + ) + + def detect_cluster_service_bind_arguments(self, obj: Cluster) -> tuple[Cluster, None]: + return obj, None diff --git a/python/api_v2/imports/__init__.py b/python/api_v2/generic/__init__.py similarity index 100% rename from python/api_v2/imports/__init__.py rename to python/api_v2/generic/__init__.py diff --git a/python/api_v2/generic/imports/__init__.py b/python/api_v2/generic/imports/__init__.py new file mode 100644 index 0000000000..824dd6c8fe --- /dev/null +++ b/python/api_v2/generic/imports/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/python/api_v2/imports/serializers.py b/python/api_v2/generic/imports/serializers.py similarity index 100% rename from python/api_v2/imports/serializers.py rename to python/api_v2/generic/imports/serializers.py diff --git a/python/api_v2/imports/types.py b/python/api_v2/generic/imports/types.py similarity index 100% rename from python/api_v2/imports/types.py rename to python/api_v2/generic/imports/types.py diff --git a/python/api_v2/imports/utils.py b/python/api_v2/generic/imports/utils.py similarity index 99% rename from python/api_v2/imports/utils.py rename to python/api_v2/generic/imports/utils.py index 3057462ad0..89867858cc 100644 --- a/python/api_v2/imports/utils.py +++ b/python/api_v2/generic/imports/utils.py @@ -25,7 +25,7 @@ from cm.services.status.convert import convert_to_entity_status from django.db.models import QuerySet -from api_v2.imports.types import ( +from api_v2.generic.imports.types import ( ClusterImportCandidate, CommonImportCandidate, ServiceImportCandidate, diff --git a/python/api_v2/generic/imports/views.py b/python/api_v2/generic/imports/views.py new file mode 100644 index 0000000000..2996a470fb --- /dev/null +++ b/python/api_v2/generic/imports/views.py @@ -0,0 +1,84 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC, abstractmethod + +from adcm.permissions import ( + CHANGE_IMPORT_PERM, + VIEW_CLUSTER_BIND, + check_custom_perm, + get_object_for_user, +) +from cm.api import multi_bind +from cm.models import Cluster, ClusterObject, PrototypeImport +from django.db.transaction import atomic +from rest_framework.request import Request +from rest_framework.response import Response +from rest_framework.status import ( + HTTP_201_CREATED, +) + +from api_v2.generic.imports.serializers import ImportPostSerializer +from api_v2.generic.imports.utils import cook_data_for_multibind, get_imports +from api_v2.views import ADCMGenericViewSet + + +class ImportViewSet(ADCMGenericViewSet, ABC): + queryset = PrototypeImport.objects.all() + ordering = ["id"] + filter_backends = [] + serializer_class = ImportPostSerializer + + @abstractmethod + def detect_get_check_kwargs(self) -> tuple[dict, dict]: + ... + + @abstractmethod + def detect_cluster_service_bind_arguments( + self, obj: Cluster | ClusterObject + ) -> tuple[Cluster, ClusterObject | None]: + ... + + def get_object_and_check_perm(self, request) -> Cluster | ClusterObject: + kwargs_get, kwargs_check = self.detect_get_check_kwargs() + + if self.action == "list": + kwargs_check.update({"second_perm": VIEW_CLUSTER_BIND}) + + obj = get_object_for_user(user=request.user, **kwargs_get) + + check_custom_perm(user=request.user, obj=obj, **kwargs_check) + + if self.action == "create": + check_custom_perm( + user=request.user, action_type=CHANGE_IMPORT_PERM, model=obj.__class__.__name__.lower(), obj=obj + ) + + return obj + + def list(self, request: Request, *_, **__) -> Response: + obj = self.get_object_and_check_perm(request=request) + return self.get_paginated_response(data=self.paginate_queryset(queryset=get_imports(obj=obj))) + + @atomic + def create(self, request, *_, **__): + obj = self.get_object_and_check_perm(request=request) + serializer = self.get_serializer(data=request.data, many=True, context={"request": request, "cluster": obj}) + serializer.is_valid(raise_exception=True) + + cluster, service = self.detect_cluster_service_bind_arguments(obj) + multi_bind( + cluster=cluster, + service=service, + bind_list=cook_data_for_multibind(validated_data=serializer.validated_data, obj=obj), + ) + return Response(get_imports(obj=obj), status=HTTP_201_CREATED) diff --git a/python/api_v2/host/filters.py b/python/api_v2/host/filters.py index d91a78c6d4..c6b88e6764 100644 --- a/python/api_v2/host/filters.py +++ b/python/api_v2/host/filters.py @@ -11,13 +11,7 @@ # limitations under the License. from cm.models import Host -from django_filters.rest_framework import ( - BooleanFilter, - CharFilter, - FilterSet, - NumberFilter, - OrderingFilter, -) +from django_filters.rest_framework import BooleanFilter, CharFilter, FilterSet, NumberFilter, OrderingFilter class HostFilter(FilterSet): @@ -38,7 +32,7 @@ def filter_is_in_cluster(queryset, _, value): return queryset.filter(cluster__isnull=not value) -class HostClusterFilter(FilterSet): +class HostMemberFilter(FilterSet): name = CharFilter(label="Host name", field_name="fqdn", lookup_expr="icontains") hostprovider_name = CharFilter(label="Hostprovider name", field_name="provider__name") component_id = NumberFilter(label="Component id", field_name="hostcomponent__component_id") diff --git a/python/api_v2/host/permissions.py b/python/api_v2/host/permissions.py index da85b88a4b..3ec44fba74 100644 --- a/python/api_v2/host/permissions.py +++ b/python/api_v2/host/permissions.py @@ -50,21 +50,3 @@ def has_permission(self, request, view) -> bool: return True return super().has_permission(request=request, view=view) - - -class HostsClusterPermissions(DjangoObjectPermissions): - perms_map = { - "GET": [], - "OPTIONS": [], - "HEAD": [], - "POST": [], - "PUT": ["%(app_label)s.change_%(model_name)s"], - "PATCH": ["%(app_label)s.change_%(model_name)s"], - "DELETE": ["%(app_label)s.delete_%(model_name)s"], - } - - def has_permission(self, request, view) -> bool: - if view.action in ["create", "destroy"]: - return True - - return super().has_permission(request=request, view=view) diff --git a/python/api_v2/host/serializers.py b/python/api_v2/host/serializers.py index 69a358d09c..6a6ac1eb35 100644 --- a/python/api_v2/host/serializers.py +++ b/python/api_v2/host/serializers.py @@ -12,7 +12,7 @@ from adcm import settings from adcm.serializers import EmptySerializer -from cm.models import Cluster, Host, HostComponent, HostProvider, MaintenanceMode, ServiceComponent +from cm.models import Cluster, Host, HostProvider, MaintenanceMode, ServiceComponent from cm.validators import HostUniqueValidator, StartMidEndValidator from drf_spectacular.utils import extend_schema_field from rest_framework.serializers import ( @@ -177,24 +177,6 @@ class Meta: extra_kwargs = {"name": {"read_only": True}} -class RelatedHostComponentsStatusSerializer(WithStatusSerializer): - id = IntegerField(source="component.id") - name = CharField(source="component.name") - display_name = CharField(source="component.display_name") - - class Meta: - model = HostComponent - fields = ["id", "name", "display_name", "status"] - - -class ClusterHostStatusSerializer(EmptySerializer): - host_components = RelatedHostComponentsStatusSerializer(many=True, source="hostcomponent_set") - - class Meta: - model = Host - fields = ["host_components"] - - class HostAuditSerializer(ModelSerializer): class Meta: model = Host diff --git a/python/api_v2/host/views.py b/python/api_v2/host/views.py index 4200376041..a2ba6f0a3b 100644 --- a/python/api_v2/host/views.py +++ b/python/api_v2/host/views.py @@ -10,7 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Collection from adcm.mixins import GetParentObjectMixin from adcm.permissions import ( @@ -22,24 +21,14 @@ check_custom_perm, get_object_for_user, ) -from audit.alt.api import audit_update -from audit.alt.hooks import adjust_denied_on_404_result, extract_current_from_response, extract_previous_from_object from audit.utils import audit -from cm.api import delete_host, remove_host_from_cluster +from cm.api import delete_host from cm.errors import AdcmEx from cm.models import Cluster, ConcernType, GroupConfig, Host, HostProvider -from cm.services.cluster import perform_host_to_cluster_map -from cm.services.status import notify -from core.cluster.errors import ( - HostAlreadyBoundError, - HostBelongsToAnotherClusterError, - HostDoesNotExistError, -) from django.db.transaction import atomic from django_filters.rest_framework.backends import DjangoFilterBackend from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view from guardian.mixins import PermissionListMixin -from guardian.shortcuts import get_objects_for_user from rest_framework.decorators import action from rest_framework.exceptions import NotFound from rest_framework.mixins import ListModelMixin, RetrieveModelMixin @@ -57,14 +46,12 @@ from api_v2.api_schema import DefaultParams, ErrorSerializer from api_v2.config.utils import ConfigSchemaMixin -from api_v2.host.filters import HostClusterFilter, HostFilter +from api_v2.host.filters import HostFilter, HostMemberFilter from api_v2.host.permissions import ( GroupConfigHostsPermissions, - HostsClusterPermissions, HostsPermissions, ) from api_v2.host.serializers import ( - ClusterHostStatusSerializer, HostAddSerializer, HostChangeMaintenanceModeSerializer, HostCreateSerializer, @@ -73,13 +60,6 @@ HostUpdateSerializer, ) from api_v2.host.utils import create_host, maintenance_mode, process_config_issues_policies_hc -from api_v2.utils.audit import ( - host_from_lookup, - nested_host_does_exist, - parent_cluster_from_lookup, - set_add_hosts_name, - set_removed_host_name, -) from api_v2.views import ADCMGenericViewSet, ObjectWithStatusViewMixin @@ -282,241 +262,6 @@ def maintenance_mode(self, request: Request, *args, **kwargs) -> Response: # no return maintenance_mode(request=request, host=self.get_object()) -@extend_schema_view( - list=extend_schema( - operation_id="getClusterHosts", - description="Get a list of all cluster hosts.", - summary="GET cluster hosts", - parameters=[ - OpenApiParameter(name="name", description="Case insensitive and partial filter by host name."), - OpenApiParameter(name="componentId", description="Id of component."), - DefaultParams.LIMIT, - DefaultParams.OFFSET, - OpenApiParameter( - name="ordering", - description='Field to sort by. To sort in descending order, precede the attribute name with a "-".', - type=str, - enum=("name", "-name", "id", "-id"), - default="name", - ), - OpenApiParameter(name="search", exclude=True), - ], - responses={ - HTTP_200_OK: HostSerializer, - HTTP_404_NOT_FOUND: ErrorSerializer, - }, - ), - create=extend_schema( - operation_id="postCusterHosts", - description="Add a new hosts to cluster.", - summary="POST cluster hosts", - request=HostAddSerializer(many=True), - responses={ - HTTP_201_CREATED: HostSerializer(many=True), - **{ - err_code: ErrorSerializer - for err_code in (HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT) - }, - }, - ), - retrieve=extend_schema( - operation_id="getClusterHost", - description="Get information about a specific cluster host.", - summary="GET cluster host", - parameters=[ - OpenApiParameter( - name="id", - type=int, - location=OpenApiParameter.PATH, - description="Host id.", - ), - ], - responses={ - HTTP_200_OK: HostSerializer, - HTTP_403_FORBIDDEN: ErrorSerializer, - HTTP_404_NOT_FOUND: ErrorSerializer, - }, - ), - destroy=extend_schema( - operation_id="deleteClusterHost", - description="Unlink host from cluster.", - summary="DELETE cluster host", - parameters=[ - OpenApiParameter( - name="id", - type=int, - location=OpenApiParameter.PATH, - description="Host id.", - ), - ], - responses={ - HTTP_204_NO_CONTENT: None, - HTTP_403_FORBIDDEN: ErrorSerializer, - HTTP_404_NOT_FOUND: ErrorSerializer, - HTTP_409_CONFLICT: ErrorSerializer, - }, - ), - maintenance_mode=extend_schema( - operation_id="postClusterHostMaintenanceMode", - description="Turn on/off maintenance mode on the cluster host.", - summary="POST cluster host maintenance-mode", - parameters=[ - OpenApiParameter( - name="id", - type=int, - location=OpenApiParameter.PATH, - description="Host id.", - ), - ], - responses={ - HTTP_200_OK: HostChangeMaintenanceModeSerializer, - **{ - err_code: ErrorSerializer - for err_code in (HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT) - }, - }, - ), - statuses=extend_schema( - operation_id="getHostStatuses", - description="Get information about cluster host status.", - summary="GET host status", - parameters=[ - OpenApiParameter( - name="id", - type=int, - location=OpenApiParameter.PATH, - description="Host id.", - ), - ], - responses={ - HTTP_200_OK: ClusterHostStatusSerializer, - HTTP_403_FORBIDDEN: ErrorSerializer, - HTTP_404_NOT_FOUND: ErrorSerializer, - }, - ), -) -class HostClusterViewSet( - PermissionListMixin, RetrieveModelMixin, ListModelMixin, ObjectWithStatusViewMixin, ADCMGenericViewSet -): - permission_required = [VIEW_HOST_PERM] - permission_classes = [HostsClusterPermissions] - # have to define it here for `ObjectWithStatusViewMixin` to be able to determine model related to view - # don't use it directly, use `get_queryset` - queryset = ( - Host.objects.select_related("cluster", "cluster__prototype", "provider", "prototype") - .prefetch_related("concerns", "hostcomponent_set__component__prototype") - .order_by("fqdn") - ) - filterset_class = HostClusterFilter - audit_model_hint = Host - retrieve_status_map_actions = ("list", "statuses") - - def get_serializer_class(self): - if self.action == "maintenance_mode": - return HostChangeMaintenanceModeSerializer - - if self.action == "create": - return HostAddSerializer - - return HostSerializer - - def get_queryset(self, *args, **kwargs): # noqa: ARG002 - cluster = get_object_for_user( - user=self.request.user, perms=VIEW_CLUSTER_PERM, klass=Cluster, id=self.kwargs["cluster_pk"] - ) - - by_cluster_qs = ( - get_objects_for_user(**self.get_get_objects_for_user_kwargs(self.queryset)) - .filter(cluster=cluster) - .order_by("fqdn") - ) - - if self.action == "statuses": - return by_cluster_qs.prefetch_related("hostcomponent_set__component__prototype") - - return by_cluster_qs - - @audit_update(name="Hosts added", object_=parent_cluster_from_lookup).attach_hooks(pre_call=set_add_hosts_name) - def create(self, request, *_, **kwargs): - cluster = get_object_for_user( - user=request.user, perms=VIEW_CLUSTER_PERM, klass=Cluster, id=kwargs["cluster_pk"] - ) - if not cluster: - return Response(data=f'Cluster with pk "{kwargs["cluster_pk"]}" not found', status=HTTP_404_NOT_FOUND) - - check_custom_perm(request.user, "map_host_to", "cluster", cluster) - - multiple_hosts = isinstance(request.data, list) - - serializer = self.get_serializer(data=request.data, many=multiple_hosts) - serializer.is_valid(raise_exception=True) - - try: - added_hosts: Collection[int] = perform_host_to_cluster_map( - cluster_id=cluster.pk, - hosts=[ - entry["host_id"] - for entry in (serializer.validated_data if multiple_hosts else [serializer.validated_data]) - ], - status_service=notify, - ) - except HostDoesNotExistError: - raise AdcmEx("BAD_REQUEST", "At least one host does not exist.") from None - except HostAlreadyBoundError: - raise AdcmEx("HOST_CONFLICT", "At least one host is already associated with this cluster.") from None - except HostBelongsToAnotherClusterError: - raise AdcmEx("FOREIGN_HOST", "At least one host is already linked to another cluster.") from None - - qs_for_added_hosts = self.get_queryset().filter(id__in=added_hosts) - - context = self.get_serializer_context() - - if multiple_hosts: - return Response( - status=HTTP_201_CREATED, - data=HostSerializer(instance=qs_for_added_hosts, many=True, context=context).data, - ) - - return Response( - status=HTTP_201_CREATED, - data=HostSerializer(instance=qs_for_added_hosts.first(), context=context).data, - ) - - @( - audit_update(name="Host removed", object_=parent_cluster_from_lookup).attach_hooks( - pre_call=set_removed_host_name, on_collect=adjust_denied_on_404_result(objects_exist=nested_host_does_exist) - ) - ) - def destroy(self, request, *args, **kwargs): # noqa: ARG002 - host = self.get_object() - cluster = get_object_for_user(request.user, VIEW_CLUSTER_PERM, Cluster, id=kwargs["cluster_pk"]) - check_custom_perm(request.user, "unmap_host_from", "cluster", cluster) - remove_host_from_cluster(host=host) - return Response(status=HTTP_204_NO_CONTENT) - - @audit_update(name="Host updated", object_=host_from_lookup).track_changes( - before=extract_previous_from_object(Host, "maintenance_mode"), - after=extract_current_from_response("maintenance_mode"), - ) - @action(methods=["post"], detail=True, url_path="maintenance-mode", permission_classes=[ChangeMMPermissions]) - def maintenance_mode(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 - return maintenance_mode(request=request, host=self.get_object()) - - @action(methods=["get"], detail=True, url_path="statuses") - def statuses(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 - host = self.get_object() - cluster = get_object_for_user(request.user, VIEW_CLUSTER_PERM, Cluster, id=kwargs["cluster_pk"]) - if host.cluster != cluster: - raise AdcmEx(code="FOREIGN_HOST", msg=f"Host #{host.id} doesn't belong to cluster #{cluster.id}") - - return Response( - data=ClusterHostStatusSerializer( - instance=Host.objects.prefetch_related("hostcomponent_set__component__prototype").get(id=host.id), - context=self.get_serializer_context(), - ).data - ) - - @extend_schema_view( list=extend_schema( operation_id="getObjectConfigGroupHosts", @@ -559,7 +304,7 @@ class HostGroupConfigViewSet( ) permission_classes = [GroupConfigHostsPermissions] permission_required = [VIEW_HOST_PERM] - filterset_class = HostClusterFilter + filterset_class = HostMemberFilter filter_backends = (DjangoFilterBackend,) pagination_class = None diff --git a/python/api_v2/imports/views.py b/python/api_v2/imports/views.py index bcf5678ff5..4ba4470307 100644 --- a/python/api_v2/imports/views.py +++ b/python/api_v2/imports/views.py @@ -19,6 +19,10 @@ check_custom_perm, get_object_for_user, ) +from api_v2.api_schema import DefaultParams, ErrorSerializer +from api_v2.imports.serializers import ImportPostSerializer, ImportSerializer +from api_v2.imports.utils import cook_data_for_multibind, get_imports +from api_v2.views import ADCMGenericViewSet from audit.utils import audit from cm.api import multi_bind from cm.models import Cluster, ClusterObject, PrototypeImport @@ -40,11 +44,6 @@ HTTP_409_CONFLICT, ) -from api_v2.api_schema import DefaultParams, ErrorSerializer -from api_v2.imports.serializers import ImportPostSerializer, ImportSerializer -from api_v2.imports.utils import cook_data_for_multibind, get_imports -from api_v2.views import ADCMGenericViewSet - class ImportViewSet(ListModelMixin, CreateModelMixin, ADCMGenericViewSet): queryset = PrototypeImport.objects.all() diff --git a/python/api_v2/service/views.py b/python/api_v2/service/views.py index f8118568fc..ab971b2f5c 100644 --- a/python/api_v2/service/views.py +++ b/python/api_v2/service/views.py @@ -14,14 +14,16 @@ ADD_SERVICE_PERM, CHANGE_MM_PERM, VIEW_CLUSTER_PERM, + VIEW_IMPORT_PERM, VIEW_SERVICE_PERM, ChangeMMPermissions, check_custom_perm, get_object_for_user, ) +from audit.alt.api import audit_update, audit_view from audit.utils import audit from cm.errors import AdcmEx -from cm.models import ADCMEntityStatus, Cluster, ClusterObject +from cm.models import Cluster, ClusterObject from cm.services.maintenance_mode import get_maintenance_mode_response from cm.services.service import delete_service_from_api from cm.services.status.notify import update_mm_objects @@ -47,8 +49,10 @@ HTTP_409_CONFLICT, ) -from api_v2.api_schema import DefaultParams, ErrorSerializer +from api_v2.api_schema import DefaultParams, responses from api_v2.config.utils import ConfigSchemaMixin +from api_v2.generic.imports.serializers import ImportPostSerializer, ImportSerializer +from api_v2.generic.imports.views import ImportViewSet from api_v2.service.filters import ServiceFilter from api_v2.service.permissions import ServicePermissions from api_v2.service.serializers import ( @@ -61,6 +65,7 @@ bulk_add_services_to_cluster, validate_service_prototypes, ) +from api_v2.utils.audit import parent_service_from_lookup from api_v2.views import ADCMGenericViewSet, ObjectWithStatusViewMixin @@ -69,11 +74,7 @@ operation_id="getClusterService", summary="GET cluster service", description="Get information about a specific cluster service.", - responses={ - HTTP_200_OK: ServiceRetrieveSerializer, - HTTP_403_FORBIDDEN: ErrorSerializer, - HTTP_404_NOT_FOUND: ErrorSerializer, - }, + responses=responses(success=ServiceRetrieveSerializer, errors=(HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND)), ), list=extend_schema( operation_id="getClusterServices", @@ -83,6 +84,7 @@ DefaultParams.LIMIT, DefaultParams.OFFSET, DefaultParams.ordering_by("Display name"), + DefaultParams.STATUS_OPTIONAL, OpenApiParameter( name="name", location=OpenApiParameter.QUERY, @@ -95,84 +97,42 @@ description="Case insensitive and partial filter by service displayName.", type=str, ), - OpenApiParameter( - name="status", - location=OpenApiParameter.QUERY, - description="Filter by service status.", - enum=ADCMEntityStatus.values, - type=str, - ), ], - responses={HTTP_200_OK: ServiceRetrieveSerializer(many=True), HTTP_404_NOT_FOUND: ErrorSerializer}, + responses=responses(success=ServiceRetrieveSerializer(many=True), errors=HTTP_404_NOT_FOUND), ), create=extend_schema( operation_id="postClusterServices", summary="POST cluster services", description="Add a new cluster services.", - responses={ - HTTP_201_CREATED: ServiceRetrieveSerializer, - HTTP_400_BAD_REQUEST: ErrorSerializer, - HTTP_404_NOT_FOUND: ErrorSerializer, - HTTP_403_FORBIDDEN: ErrorSerializer, - HTTP_409_CONFLICT: ErrorSerializer, - }, + responses=responses( + success=(HTTP_201_CREATED, ServiceRetrieveSerializer), + errors=(HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND, HTTP_403_FORBIDDEN, HTTP_409_CONFLICT), + ), ), destroy=extend_schema( operation_id="deleteClusterService", summary="DELETE cluster service", description="Delete a specific cluster service.", - responses={ - HTTP_204_NO_CONTENT: None, - HTTP_400_BAD_REQUEST: ErrorSerializer, - HTTP_403_FORBIDDEN: ErrorSerializer, - HTTP_404_NOT_FOUND: ErrorSerializer, - HTTP_409_CONFLICT: ErrorSerializer, - }, + responses=responses( + success=(HTTP_204_NO_CONTENT, None), + errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT), + ), ), maintenance_mode=extend_schema( operation_id="postServiceMaintenanceMode", summary="POST service maintenance-mode", description="Turn on/off maintenance mode on the service.", - responses={ - HTTP_200_OK: ServiceMaintenanceModeSerializer, - HTTP_400_BAD_REQUEST: ErrorSerializer, - HTTP_403_FORBIDDEN: ErrorSerializer, - HTTP_404_NOT_FOUND: ErrorSerializer, - HTTP_409_CONFLICT: ErrorSerializer, - }, + responses=responses( + success=ServiceMaintenanceModeSerializer, + errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT), + ), ), statuses=extend_schema( operation_id="getServiceComponentStatuses", summary="GET service component statuses", description="Get information about service component statuses.", - responses={ - HTTP_200_OK: ServiceStatusSerializer, - HTTP_403_FORBIDDEN: ErrorSerializer, - HTTP_404_NOT_FOUND: ErrorSerializer, - }, - parameters=[ - OpenApiParameter( - name="status", - required=True, - location=OpenApiParameter.QUERY, - description="Case insensitive and partial filter by status.", - type=str, - ), - OpenApiParameter( - name="clusterId", - required=True, - location=OpenApiParameter.PATH, - description="Cluster id.", - type=int, - ), - OpenApiParameter( - name="serviceId", - required=True, - location=OpenApiParameter.PATH, - description="Service id.", - type=int, - ), - ], + responses=responses(success=ServiceStatusSerializer, errors=(HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND)), + parameters=[DefaultParams.STATUS_REQUIRED], ), ) class ServiceViewSet( @@ -274,3 +234,33 @@ def statuses(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG0 service = get_object_for_user(user=request.user, perms=VIEW_SERVICE_PERM, klass=ClusterObject, id=kwargs["pk"]) return Response(data=ServiceStatusSerializer(instance=service, context=self.get_serializer_context()).data) + + +@extend_schema_view( + list=extend_schema( + operation_id="getServiceImports", + description="Get information about service imports.", + summary="GET service imports", + parameters=[DefaultParams.LIMIT, DefaultParams.OFFSET], + responses=responses(success=ImportSerializer(many=True), errors=(HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND)), + ), + create=extend_schema( + operation_id="postServiceImports", + description="Import data.", + summary="POST service imports", + responses=responses( + success=(HTTP_201_CREATED, ImportPostSerializer), + errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT), + ), + ), +) +@audit_view(create=audit_update(name="Service import updated", object_=parent_service_from_lookup)) +class ServiceImportViewSet(ImportViewSet): + def detect_get_check_kwargs(self) -> tuple[dict, dict]: + return ( + {"perms": VIEW_SERVICE_PERM, "klass": ClusterObject, "id": self.kwargs["service_pk"]}, + {"action_type": VIEW_IMPORT_PERM, "model": ClusterObject.__name__.lower()}, + ) + + def detect_cluster_service_bind_arguments(self, obj: Cluster | ClusterObject) -> tuple[Cluster, ClusterObject]: + return obj.cluster, obj diff --git a/python/api_v2/utils/audit.py b/python/api_v2/utils/audit.py index 147d5d6a16..5837d85193 100644 --- a/python/api_v2/utils/audit.py +++ b/python/api_v2/utils/audit.py @@ -19,7 +19,7 @@ from audit.alt.hooks import AuditHook from audit.alt.object_retrievers import GeneralAuditObjectRetriever from audit.models import AuditObject, AuditObjectType -from cm.models import Cluster, Host +from cm.models import Cluster, ClusterObject, Host from django.db.models import Model from rest_framework.response import Response @@ -56,6 +56,19 @@ def get_name(self, id_: str | int) -> str | None: return self.cm_model.objects.values_list(self.name_field, flat=True).filter(id=id_).first() +@dataclass(slots=True) +class ServiceAuditObjectCreator(CMAuditObjectCreator): + cm_model = ClusterObject + name_field = "prototype__display_name" + + def get_name(self, id_: str | int) -> str | None: + names = ClusterObject.objects.values_list("cluster__name", "prototype__display_name").filter(id=id_).first() + if not names: + return None + + return "/".join(names) + + create_audit_cluster_object = CMAuditObjectCreator(cm_model=Cluster) create_audit_host_object = CMAuditObjectCreator(cm_model=Host, name_field="fqdn") @@ -66,6 +79,13 @@ def get_name(self, id_: str | int) -> str | None: cluster_from_lookup = _extract_cluster_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) parent_cluster_from_lookup = _extract_cluster_from(extract_id=ExtractID(field="cluster_pk").from_lookup_kwargs) +_extract_service_from = partial( + GeneralAuditObjectRetriever, + audit_object_type=AuditObjectType.SERVICE, + create_new=ServiceAuditObjectCreator(cm_model=ClusterObject), +) +parent_service_from_lookup = _extract_service_from(extract_id=ExtractID(field="service_pk").from_lookup_kwargs) + host_from_lookup = GeneralAuditObjectRetriever( audit_object_type=AuditObjectType.HOST, extract_id=ExtractID(field="pk").from_lookup_kwargs, diff --git a/python/audit/alt/api.py b/python/audit/alt/api.py index 96e6270ad7..451b22ece7 100644 --- a/python/audit/alt/api.py +++ b/python/audit/alt/api.py @@ -13,7 +13,7 @@ from abc import ABC from dataclasses import dataclass from functools import lru_cache -from typing import Any, Callable, Iterable, ParamSpec +from typing import Callable, Iterable, ParamSpec, TypeVar from rest_framework.response import Response from typing_extensions import Self @@ -31,6 +31,7 @@ from audit.alt.object_retrievers import ignore_object_search from audit.models import AuditLogOperationType +T = TypeVar("T") P = ParamSpec("P") AUDITED_HTTP_METHODS = frozenset(("POST", "DELETE", "PUT", "PATCH")) @@ -51,9 +52,8 @@ class AuditedEndpointConfig: class AuditEndpointsRegistry: """ Registry of view functions that should be audited. - Used to match caller func and audit configuration in runtime (usually middleware). - Key extraction is bound to usages, so it may have to be adjusted/extended in the future. + Key format: {module_name}:{class_name}.{func_name} """ __slots__ = ("_endpoints",) @@ -61,15 +61,10 @@ class AuditEndpointsRegistry: def __init__(self): self._endpoints: dict[str, AuditedEndpointConfig] = {} - def register(self, func: Callable, config: AuditedEndpointConfig) -> None: - key = f"{getattr(func, '__module__', '-')}:{func.__qualname__}" + def register(self, key: str, config: AuditedEndpointConfig) -> None: self._endpoints[key] = config - def find_for_view(self, http_method: str, view_func: Any) -> AuditedEndpointConfig | None: - # view_func is not just simple Callable, it's special func prepared by Django's middleware system. - # __qualname__ of view_func doesn't specify method (because it's View, not API method itself) - method_name = getattr(view_func, "actions", {}).get(http_method.lower(), "") - key = f"{getattr(view_func, '__module__', '-')}:{view_func.__qualname__}.{method_name}".rstrip(".") + def find(self, key: str) -> AuditedEndpointConfig | None: return self._endpoints.get(key) @@ -78,11 +73,13 @@ def get_endpoints_registry() -> AuditEndpointsRegistry: return AuditEndpointsRegistry() -class GenericAPIAuditDecorator: +class GenericAPIAuditRegistrator: """ Decorator to wrap ViewSet's functions that should be audited. Adds function to registry and returns function without changes. Additional hooks may be configured after instantiation. + + Can be used directly via `register` """ def __init__(self, name: str, type_: AuditLogOperationType, object_: RetrieveAuditObjectFunc): @@ -95,19 +92,23 @@ def __init__(self, name: str, type_: AuditLogOperationType, object_: RetrieveAud self._registry = get_endpoints_registry() def __call__(self, func: Callable[P, Response]) -> Callable[P, Response]: - endpoint_config = AuditedEndpointConfig( + self.register(with_key=f"{getattr(func, '__module__', '-')}:{func.__qualname__}") + + return func + + def register(self, with_key: str) -> None: + self._registry.register(key=with_key, config=self._construct_audit_config()) + + def _construct_audit_config(self) -> AuditedEndpointConfig: + return AuditedEndpointConfig( operation_type=self.operation_type, operation_name=self.operation_name, retrieve_object_func=self.retrieve_object_func, hooks=Hooks(pre_call=tuple(self.extra_pre_call_hooks), on_collect=tuple(self.extra_on_collect_hooks)), ) - self._registry.register(func=func, config=endpoint_config) - - return func - -class TypedAuditDecorator(GenericAPIAuditDecorator, ABC): +class TypedAuditDecorator(GenericAPIAuditRegistrator, ABC): OPERATION_TYPE: AuditLogOperationType def __init__(self, name: str, object_: RetrieveAuditObjectFunc): @@ -159,3 +160,19 @@ def __init__(self, name: str, object_: RetrieveAuditObjectFunc, removed_on_succe self.extra_pre_call_hooks.extend(pre_hooks) self.extra_on_collect_hooks.extend(collect_hooks) + + +class audit_view: # noqa: N801 + def __init__(self, **audited_methods: GenericAPIAuditRegistrator): + self.view_methods_audit_registers = audited_methods + + def __call__(self, cls: T) -> T: + for method_name, registrar in self.view_methods_audit_registers.items(): + if not hasattr(cls, method_name): + message = f"Failed to audit method {method_name} of {cls}: method not found" + raise AttributeError(message) + + key = f"{cls.__module__}:{cls.__name__}.{method_name}" + registrar.register(with_key=key) + + return cls diff --git a/python/audit/alt/middleware.py b/python/audit/alt/middleware.py index 1b7fa79f66..98fa172cd8 100644 --- a/python/audit/alt/middleware.py +++ b/python/audit/alt/middleware.py @@ -53,9 +53,11 @@ def __call__(self, request): return response def process_view(self, request, view_func, view_args, view_kwargs): # noqa: ARG002 - endpoint_config: AuditedEndpointConfig | None = self.audited_endpoints_registry.find_for_view( - http_method=request.method, view_func=view_func - ) + # view_func is not just simple Callable, it's special func prepared by Django's middleware system. + # __qualname__ of view_func doesn't specify method (because it's View, not API method itself) + method_name = getattr(view_func, "actions", {}).get(request.method.lower(), "") + key = f"{getattr(view_func, '__module__', '-')}:{view_func.__qualname__}.{method_name}".rstrip(".") + endpoint_config: AuditedEndpointConfig | None = self.audited_endpoints_registry.find(key=key) if not endpoint_config: return From e77a05fe8f90e3189ecd14799fd75e893f52f355 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Thu, 27 Jun 2024 10:27:13 +0000 Subject: [PATCH 03/98] ADCM-5704 Specialize Group Config --- python/api_v2/cluster/urls.py | 63 ++++---- python/api_v2/cluster/views.py | 15 ++ python/api_v2/component/views.py | 16 ++ .../{ => generic}/group_config/__init__.py | 0 .../api_v2/generic/group_config/api_schema.py | 102 ++++++++++++ python/api_v2/generic/group_config/audit.py | 134 ++++++++++++++++ .../{ => generic}/group_config/permissions.py | 20 ++- .../{ => generic}/group_config/serializers.py | 12 +- .../generic/group_config/urls_helpers.py | 43 ++++++ .../{ => generic}/group_config/views.py | 145 +++++++++++------- python/api_v2/host/permissions.py | 19 --- python/api_v2/host/serializers.py | 10 -- python/api_v2/host/views.py | 111 +------------- python/api_v2/hostprovider/urls.py | 41 ++--- python/api_v2/hostprovider/views.py | 16 ++ python/api_v2/service/views.py | 15 ++ .../tests/test_audit/test_group_config.py | 14 ++ python/api_v2/utils/audit.py | 37 ++++- 18 files changed, 565 insertions(+), 248 deletions(-) rename python/api_v2/{ => generic}/group_config/__init__.py (100%) create mode 100644 python/api_v2/generic/group_config/api_schema.py create mode 100644 python/api_v2/generic/group_config/audit.py rename python/api_v2/{ => generic}/group_config/permissions.py (66%) rename python/api_v2/{ => generic}/group_config/serializers.py (82%) create mode 100644 python/api_v2/generic/group_config/urls_helpers.py rename python/api_v2/{ => generic}/group_config/views.py (62%) diff --git a/python/api_v2/cluster/urls.py b/python/api_v2/cluster/urls.py index 84ef9f30e0..124493ddbb 100644 --- a/python/api_v2/cluster/urls.py +++ b/python/api_v2/cluster/urls.py @@ -21,12 +21,27 @@ ActionHostGroupViewSet, HostActionHostGroupViewSet, ) -from api_v2.cluster.views import ClusterImportViewSet, ClusterViewSet, HostClusterViewSet -from api_v2.component.views import ComponentViewSet, HostComponentViewSet +from api_v2.cluster.views import ( + ClusterGroupConfigViewSet, + ClusterHostGroupConfigViewSet, + ClusterImportViewSet, + ClusterViewSet, + HostClusterViewSet, +) +from api_v2.component.views import ( + ComponentGroupConfigViewSet, + ComponentHostGroupConfigViewSet, + ComponentViewSet, + HostComponentViewSet, +) from api_v2.config.views import ConfigLogViewSet -from api_v2.group_config.views import GroupConfigViewSet -from api_v2.host.views import HostGroupConfigViewSet -from api_v2.service.views import ServiceImportViewSet, ServiceViewSet +from api_v2.generic.group_config.urls_helpers import add_group_config_routers +from api_v2.service.views import ( + ServiceGroupConfigViewSet, + ServiceHostGroupConfigViewSet, + ServiceImportViewSet, + ServiceViewSet, +) from api_v2.upgrade.views import UpgradeViewSet CLUSTER_PREFIX = "" @@ -36,7 +51,6 @@ SERVICE_PREFIX = "services" CONFIG_PREFIX = "configs" IMPORT_PREFIX = "imports" -CONFIG_GROUPS_PREFIX = "config-groups" ACTION_HOST_GROUPS_PREFIX = "action-host-groups" @@ -44,25 +58,6 @@ def extract_urls_from_routers(routers: Iterable[NestedSimpleRouter]) -> tuple[st return tuple(itertools.chain.from_iterable(router.urls for router in routers)) -def add_group_config_routers( - parent_router: NestedSimpleRouter | SimpleRouter, parent_prefix: str, lookup: str -) -> tuple[NestedSimpleRouter, ...]: - group_config_router = NestedSimpleRouter(parent_router=parent_router, parent_prefix=parent_prefix, lookup=lookup) - group_config_router.register( - prefix=CONFIG_GROUPS_PREFIX, viewset=GroupConfigViewSet, basename=f"{lookup}-group-config" - ) - - hosts_router = NestedSimpleRouter(group_config_router, CONFIG_GROUPS_PREFIX, lookup="group_config") - hosts_router.register(prefix=r"hosts", viewset=HostGroupConfigViewSet, basename=f"{lookup}-group-config-hosts") - - config_router = NestedSimpleRouter( - parent_router=group_config_router, parent_prefix=CONFIG_GROUPS_PREFIX, lookup="group_config" - ) - config_router.register(prefix=CONFIG_PREFIX, viewset=ConfigLogViewSet, basename=f"{lookup}-group-config-config") - - return group_config_router, hosts_router, config_router - - def add_action_host_groups_routers( parent_router: NestedSimpleRouter | SimpleRouter, parent_prefix: str, lookup: str ) -> tuple[NestedSimpleRouter, ...]: @@ -104,7 +99,11 @@ def add_action_host_groups_routers( cluster_config_router.register(prefix=CONFIG_PREFIX, viewset=ConfigLogViewSet, basename="cluster-config") cluster_config_group_routers = add_group_config_routers( - parent_router=cluster_router, parent_prefix=CLUSTER_PREFIX, lookup="cluster" + group_config_viewset=ClusterGroupConfigViewSet, + host_group_config_viewset=ClusterHostGroupConfigViewSet, + parent_router=cluster_router, + parent_prefix=CLUSTER_PREFIX, + lookup="cluster", ) cluster_action_host_groups_routers = add_action_host_groups_routers( @@ -125,7 +124,11 @@ def add_action_host_groups_routers( service_config_router.register(prefix=CONFIG_PREFIX, viewset=ConfigLogViewSet, basename="service-config") service_group_config_routers = add_group_config_routers( - parent_router=service_router, parent_prefix=SERVICE_PREFIX, lookup="service" + group_config_viewset=ServiceGroupConfigViewSet, + host_group_config_viewset=ServiceHostGroupConfigViewSet, + parent_router=service_router, + parent_prefix=SERVICE_PREFIX, + lookup="service", ) service_action_host_groups_routers = add_action_host_groups_routers( parent_router=service_router, parent_prefix=SERVICE_PREFIX, lookup="service" @@ -146,7 +149,11 @@ def add_action_host_groups_routers( component_config_router.register(prefix=CONFIG_PREFIX, viewset=ConfigLogViewSet, basename="component-config") component_group_config_routers = add_group_config_routers( - parent_router=component_router, parent_prefix=COMPONENT_PREFIX, lookup="component" + group_config_viewset=ComponentGroupConfigViewSet, + host_group_config_viewset=ComponentHostGroupConfigViewSet, + parent_router=component_router, + parent_prefix=COMPONENT_PREFIX, + lookup="component", ) component_action_host_groups_routers = add_action_host_groups_routers( parent_router=component_router, parent_prefix=COMPONENT_PREFIX, lookup="component" diff --git a/python/api_v2/cluster/views.py b/python/api_v2/cluster/views.py index 83e400eebe..ff99d69471 100644 --- a/python/api_v2/cluster/views.py +++ b/python/api_v2/cluster/views.py @@ -86,6 +86,9 @@ from api_v2.cluster.utils import retrieve_mapping_data, save_mapping from api_v2.component.serializers import ComponentMappingSerializer from api_v2.config.utils import ConfigSchemaMixin +from api_v2.generic.group_config.api_schema import document_group_config_viewset, document_host_group_config_viewset +from api_v2.generic.group_config.audit import audit_group_config_viewset, audit_host_group_config_viewset +from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet from api_v2.generic.imports.serializers import ImportPostSerializer, ImportSerializer from api_v2.generic.imports.views import ImportViewSet from api_v2.host.filters import HostMemberFilter @@ -725,3 +728,15 @@ def detect_get_check_kwargs(self) -> tuple[dict, dict]: def detect_cluster_service_bind_arguments(self, obj: Cluster) -> tuple[Cluster, None]: return obj, None + + +@document_group_config_viewset(object_type="cluster") +@audit_group_config_viewset(retrieve_owner=parent_cluster_from_lookup) +class ClusterGroupConfigViewSet(GroupConfigViewSet): + ... + + +@document_host_group_config_viewset(object_type="cluster") +@audit_host_group_config_viewset(retrieve_owner=parent_cluster_from_lookup) +class ClusterHostGroupConfigViewSet(HostGroupConfigViewSet): + ... diff --git a/python/api_v2/component/views.py b/python/api_v2/component/views.py index 2cf97ccbeb..9a6a921500 100644 --- a/python/api_v2/component/views.py +++ b/python/api_v2/component/views.py @@ -49,6 +49,10 @@ HostComponentSerializer, ) from api_v2.config.utils import ConfigSchemaMixin +from api_v2.generic.group_config.api_schema import document_group_config_viewset, document_host_group_config_viewset +from api_v2.generic.group_config.audit import audit_group_config_viewset, audit_host_group_config_viewset +from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet +from api_v2.utils.audit import parent_component_from_lookup from api_v2.views import ( ADCMGenericViewSet, ADCMReadOnlyModelViewSet, @@ -205,3 +209,15 @@ def get_queryset(self, *args, **kwargs): .get_queryset(*args, **kwargs) .filter(cluster=cluster, id__in=host.hostcomponent_set.all().values_list("component_id", flat=True)) ) + + +@document_group_config_viewset(object_type="component") +@audit_group_config_viewset(retrieve_owner=parent_component_from_lookup) +class ComponentGroupConfigViewSet(GroupConfigViewSet): + ... + + +@document_host_group_config_viewset(object_type="component") +@audit_host_group_config_viewset(retrieve_owner=parent_component_from_lookup) +class ComponentHostGroupConfigViewSet(HostGroupConfigViewSet): + ... diff --git a/python/api_v2/group_config/__init__.py b/python/api_v2/generic/group_config/__init__.py similarity index 100% rename from python/api_v2/group_config/__init__.py rename to python/api_v2/generic/group_config/__init__.py diff --git a/python/api_v2/generic/group_config/api_schema.py b/python/api_v2/generic/group_config/api_schema.py new file mode 100644 index 0000000000..d8509788ef --- /dev/null +++ b/python/api_v2/generic/group_config/api_schema.py @@ -0,0 +1,102 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from drf_spectacular.utils import extend_schema, extend_schema_view +from rest_framework.status import ( + HTTP_200_OK, + HTTP_201_CREATED, + HTTP_204_NO_CONTENT, + HTTP_400_BAD_REQUEST, + HTTP_403_FORBIDDEN, + HTTP_404_NOT_FOUND, + HTTP_409_CONFLICT, +) + +from api_v2.api_schema import ErrorSerializer, responses +from api_v2.generic.group_config.serializers import GroupConfigSerializer, HostGroupConfigSerializer + + +def document_group_config_viewset(object_type: str): + capitalized_type = object_type.capitalize() + + return extend_schema_view( + list=extend_schema( + operation_id=f"get{capitalized_type}ConfigGroups", + summary=f"GET {object_type}'s config groups", + description=f"Get information about {object_type}'s config-groups", + responses={HTTP_200_OK: GroupConfigSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, + ), + retrieve=extend_schema( + operation_id=f"get{capitalized_type}ConfigGroup", + summary=f"GET {object_type}'s config group", + description=f"Get information about {object_type}'s config-group", + responses={HTTP_200_OK: GroupConfigSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, + ), + create=extend_schema( + operation_id=f"post{capitalized_type}ConfigGroups", + summary=f"POST {object_type}'s config-groups", + description=f"Create new {object_type}'s config-group.", + responses={HTTP_200_OK: GroupConfigSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, + ), + partial_update=extend_schema( + operation_id=f"patch{capitalized_type}ConfigGroup", + summary=f"PATCH {object_type}'s config-group", + description=f"Change {object_type}'s config-group's name and description.", + responses={HTTP_200_OK: GroupConfigSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, + ), + destroy=extend_schema( + operation_id=f"delete{capitalized_type}ConfigGroup", + summary=f"DELETE {object_type}'s config-group", + description=f"Delete specific {object_type}'s config-group.", + responses={HTTP_204_NO_CONTENT: None, HTTP_404_NOT_FOUND: ErrorSerializer}, + ), + host_candidates=extend_schema( + operation_id=f"get{capitalized_type}ConfigGroupHostCandidates", + summary=f"GET {object_type}'s config-group host candidates", + description=f"Get a list of hosts available for adding to {object_type}'s config group.", + responses={HTTP_200_OK: HostGroupConfigSerializer(many=True), HTTP_404_NOT_FOUND: ErrorSerializer}, + ), + ) + + +def document_host_group_config_viewset(object_type: str): + capitalized_type = object_type.capitalize() + + return extend_schema_view( + list=extend_schema( + operation_id=f"get{capitalized_type}ConfigGroupHosts", + summary=f"GET {object_type}'s config-group hosts", + description=f"Get a list of hosts added to {object_type}'s config-group.", + responses={HTTP_200_OK: HostGroupConfigSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, + ), + retrieve=extend_schema( + operation_id=f"get{capitalized_type}ConfigGroupHost", + summary=f"GET {object_type}'s config-group host", + description=f"Get information about a specific host of {object_type}'s config-group.", + responses={HTTP_200_OK: HostGroupConfigSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, + ), + create=extend_schema( + operation_id=f"post{capitalized_type}ConfigGroupHosts", + summary=f"POST {object_type}'s config-group host", + description=f"Add host to {object_type}'s config-group.", + responses=responses( + success=(HTTP_201_CREATED, HostGroupConfigSerializer), + errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT), + ), + ), + destroy=extend_schema( + operation_id=f"delete{capitalized_type}ConfigGroupHosts", + summary=f"DELETE host from {object_type}'s config-group", + description=f"Remove host from {object_type}'s config-group.", + responses=responses(success=(HTTP_204_NO_CONTENT, None), errors=(HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND)), + ), + ) diff --git a/python/api_v2/generic/group_config/audit.py b/python/api_v2/generic/group_config/audit.py new file mode 100644 index 0000000000..16dc222769 --- /dev/null +++ b/python/api_v2/generic/group_config/audit.py @@ -0,0 +1,134 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from contextlib import suppress +import json + +from audit.alt.api import audit_create, audit_delete, audit_update, audit_view +from audit.alt.core import AuditedCallArguments, OperationAuditContext, Result, RetrieveAuditObjectFunc +from audit.alt.hooks import AuditHook, adjust_denied_on_404_result +from cm.models import GroupConfig, Host +from rest_framework.response import Response + +from api_v2.utils.audit import object_does_exist + + +def audit_group_config_viewset(retrieve_owner: RetrieveAuditObjectFunc): + return audit_view( + create=audit_create(name="{group_name} configuration group created", object_=retrieve_owner).attach_hooks( + on_collect=set_group_name_from_response + ), + destroy=audit_delete(name="{group_name} configuration group deleted", object_=retrieve_owner).attach_hooks( + pre_call=set_group_name, on_collect=adjust_denied_on_404_result(objects_exist=group_config_does_not_exist) + ), + partial_update=audit_update( + name="{group_name} configuration group updated", object_=retrieve_owner + ).attach_hooks( + on_collect=(set_group_name, adjust_denied_on_404_result(objects_exist=group_config_does_not_exist)) + ), + ) + + +def audit_host_group_config_viewset(retrieve_owner: RetrieveAuditObjectFunc): + return audit_view( + create=audit_update( + name="{host_name} host added to {group_name} configuration group", object_=retrieve_owner + ).attach_hooks( + pre_call=set_group_and_host_names_from_response, + on_collect=adjust_denied_on_404_result(objects_exist=nested_group_config_does_not_exist), + ), + destroy=audit_update( + name="{host_name} host removed from {group_name} configuration group", object_=retrieve_owner + ).attach_hooks( + on_collect=[ + set_group_and_host_names, + adjust_denied_on_404_result(objects_exist=host_or_group_does_not_exist), + ] + ), + ) + + +# hooks + + +def group_config_does_not_exist(hook: AuditHook) -> bool: + return object_does_exist(hook=hook, model=GroupConfig) + + +def nested_group_config_does_not_exist(hook: AuditHook) -> bool: + return object_does_exist(hook=hook, model=GroupConfig, id_field="group_config_pk") + + +def host_or_group_does_not_exist(hook: AuditHook) -> bool: + return nested_group_config_does_not_exist(hook=hook) and object_does_exist(hook=hook, model=Host) + + +def set_group_name_from_response( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, # noqa: ARG001 + result: Result | None, + exception: Exception | None, # noqa: ARG001 +): + group_name = "" + if isinstance(result, Response) and result.status_code < 300 and isinstance(result.data, dict): + group_name = result.data.get("name", "") + + context.name = context.name.format(group_name=group_name).strip() + + +def set_group_name( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Result | None, # noqa: ARG001 + exception: Exception | None, # noqa: ARG001 +): + group_name = GroupConfig.objects.values_list("name", flat=True).filter(id=call_arguments.get("pk")).first() + + context.name = context.name.format(group_name=group_name or "").strip() + + +def set_group_and_host_names( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Result | None, # noqa: ARG001 + exception: Exception | None, # noqa: ARG001 +): + group_name = ( + GroupConfig.objects.values_list("name", flat=True).filter(id=call_arguments.get("group_config_pk")).first() + ) + host_name = Host.objects.values_list("fqdn", flat=True).filter(id=call_arguments.get("pk")).first() + + context.name = ( + context.name.format(group_name=group_name or "", host_name=host_name or "").strip().replace(" ", " ") + ) + + +def set_group_and_host_names_from_response( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Result | None, # noqa: ARG001 + exception: Exception | None, # noqa: ARG001 +): + host_name = "" + group_name = ( + GroupConfig.objects.values_list("name", flat=True).filter(id=call_arguments.get("group_config_pk")).first() + ) + + if request := call_arguments.get("request"): + data = None + with suppress(AttributeError, json.JSONDecodeError): + data = json.loads(request.body) + + if isinstance(data, dict): + host_name = Host.objects.values_list("fqdn", flat=True).filter(id=data.get("hostId")).first() or "" + + context.name = context.name.format(group_name=group_name or "", host_name=host_name).strip().replace(" ", " ") diff --git a/python/api_v2/group_config/permissions.py b/python/api_v2/generic/group_config/permissions.py similarity index 66% rename from python/api_v2/group_config/permissions.py rename to python/api_v2/generic/group_config/permissions.py index 0fd312560f..e3df89ab70 100644 --- a/python/api_v2/group_config/permissions.py +++ b/python/api_v2/generic/group_config/permissions.py @@ -10,7 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from audit.utils import audit from rest_framework.permissions import DjangoObjectPermissions @@ -25,7 +24,24 @@ class GroupConfigPermissions(DjangoObjectPermissions): "DELETE": ["%(app_label)s.change_%(model_name)s"], } - @audit + def has_permission(self, request, view) -> bool: + if view.action in ["create", "destroy", "update", "partial_update"]: + return True + + return super().has_permission(request=request, view=view) + + +class GroupConfigHostsPermissions(DjangoObjectPermissions): + perms_map = { + "GET": [], + "OPTIONS": [], + "HEAD": [], + "POST": ["%(app_label)s.add_%(model_name)s"], + "PUT": ["%(app_label)s.change_%(model_name)s"], + "PATCH": ["%(app_label)s.change_%(model_name)s"], + "DELETE": ["%(app_label)s.delete_%(model_name)s"], + } + def has_permission(self, request, view) -> bool: if view.action in ["create", "destroy", "update", "partial_update"]: return True diff --git a/python/api_v2/group_config/serializers.py b/python/api_v2/generic/group_config/serializers.py similarity index 82% rename from python/api_v2/group_config/serializers.py rename to python/api_v2/generic/group_config/serializers.py index 8ff8aa41e3..368b0cb9ac 100644 --- a/python/api_v2/group_config/serializers.py +++ b/python/api_v2/generic/group_config/serializers.py @@ -10,9 +10,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from cm.models import GroupConfig +from cm.models import GroupConfig, Host from django.contrib.contenttypes.models import ContentType from rest_framework.exceptions import ValidationError +from rest_framework.relations import PrimaryKeyRelatedField from rest_framework.serializers import ModelSerializer from api_v2.host.serializers import HostShortSerializer @@ -37,3 +38,12 @@ def validate_name(self, value): f"Group config with name {value} already exists for {parent_content_type} {object_.name}" ) return value + + +class HostGroupConfigSerializer(ModelSerializer): + id = PrimaryKeyRelatedField(queryset=Host.objects.all()) + + class Meta: + model = Host + fields = ["id", "name"] + extra_kwargs = {"name": {"read_only": True}} diff --git a/python/api_v2/generic/group_config/urls_helpers.py b/python/api_v2/generic/group_config/urls_helpers.py new file mode 100644 index 0000000000..d517459da6 --- /dev/null +++ b/python/api_v2/generic/group_config/urls_helpers.py @@ -0,0 +1,43 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from rest_framework.routers import SimpleRouter +from rest_framework_nested.routers import NestedSimpleRouter + +from api_v2.config.views import ConfigLogViewSet +from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet + +CONFIG_PREFIX = "configs" +CONFIG_GROUPS_PREFIX = "config-groups" + + +def add_group_config_routers( + group_config_viewset: type[GroupConfigViewSet], + host_group_config_viewset: type[HostGroupConfigViewSet], + parent_router: NestedSimpleRouter | SimpleRouter, + parent_prefix: str, + lookup: str, +) -> tuple[NestedSimpleRouter, ...]: + group_config_router = NestedSimpleRouter(parent_router=parent_router, parent_prefix=parent_prefix, lookup=lookup) + group_config_router.register( + prefix=CONFIG_GROUPS_PREFIX, viewset=group_config_viewset, basename=f"{lookup}-group-config" + ) + + hosts_router = NestedSimpleRouter(group_config_router, CONFIG_GROUPS_PREFIX, lookup="group_config") + hosts_router.register(prefix="hosts", viewset=host_group_config_viewset, basename=f"{lookup}-group-config-hosts") + + config_router = NestedSimpleRouter( + parent_router=group_config_router, parent_prefix=CONFIG_GROUPS_PREFIX, lookup="group_config" + ) + config_router.register(prefix=CONFIG_PREFIX, viewset=ConfigLogViewSet, basename=f"{lookup}-group-config-config") + + return group_config_router, hosts_router, config_router diff --git a/python/api_v2/group_config/views.py b/python/api_v2/generic/group_config/views.py similarity index 62% rename from python/api_v2/group_config/views.py rename to python/api_v2/generic/group_config/views.py index 48155c62ef..090039da94 100644 --- a/python/api_v2/group_config/views.py +++ b/python/api_v2/generic/group_config/views.py @@ -11,13 +11,12 @@ # limitations under the License. from adcm.mixins import GetParentObjectMixin, ParentObject -from adcm.permissions import VIEW_GROUP_CONFIG_PERM, check_config_perm -from audit.utils import audit +from adcm.permissions import VIEW_GROUP_CONFIG_PERM, VIEW_HOST_PERM, check_config_perm from cm.errors import AdcmEx -from cm.models import GroupConfig +from cm.models import GroupConfig, Host from django.contrib.contenttypes.models import ContentType from django.shortcuts import get_object_or_404 -from drf_spectacular.utils import extend_schema, extend_schema_view +from django_filters.rest_framework import DjangoFilterBackend from guardian.mixins import PermissionListMixin from rbac.models import re_apply_object_policy from rest_framework.decorators import action @@ -25,54 +24,20 @@ from rest_framework.mixins import ListModelMixin, RetrieveModelMixin from rest_framework.request import Request from rest_framework.response import Response -from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT, HTTP_404_NOT_FOUND +from rest_framework.status import ( + HTTP_200_OK, + HTTP_201_CREATED, + HTTP_204_NO_CONTENT, +) -from api_v2.api_schema import ErrorSerializer from api_v2.config.utils import ConfigSchemaMixin -from api_v2.group_config.permissions import GroupConfigPermissions -from api_v2.group_config.serializers import GroupConfigSerializer -from api_v2.host.serializers import HostGroupConfigSerializer +from api_v2.generic.group_config.permissions import GroupConfigHostsPermissions, GroupConfigPermissions +from api_v2.generic.group_config.serializers import GroupConfigSerializer, HostGroupConfigSerializer +from api_v2.host.filters import HostMemberFilter +from api_v2.host.serializers import HostAddSerializer from api_v2.views import ADCMGenericViewSet -@extend_schema_view( - list=extend_schema( - operation_id="getObjectConfigGroups", - summary="GET object's config groups", - description="Get information about object's config-groups", - responses={HTTP_200_OK: GroupConfigSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), - retrieve=extend_schema( - operation_id="getObjectConfigGroup", - summary="GET object's config group", - description="Get information about object's config-group", - responses={HTTP_200_OK: GroupConfigSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), - create=extend_schema( - operation_id="postObjectConfigGroups", - summary="POST object's config-groups", - description="Create new object's config-group.", - responses={HTTP_200_OK: GroupConfigSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), - partial_update=extend_schema( - operation_id="patchObjectConfigGroup", - summary="PATCH object's config-group", - description="Change object's config-group's name and description.", - responses={HTTP_200_OK: GroupConfigSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), - destroy=extend_schema( - operation_id="deleteObjectConfigGroup", - summary="DELETE object's config-group", - description="Delete specific object's config-group.", - responses={HTTP_204_NO_CONTENT: None, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), - host_candidates=extend_schema( - operation_id="getObjectConfigGroupHostCandidates", - summary="GET object's config-group host candidates", - description="Get a list of hosts available for adding to object's config group.", - responses={HTTP_200_OK: HostGroupConfigSerializer(many=True), HTTP_404_NOT_FOUND: ErrorSerializer}, - ), -) class GroupConfigViewSet( PermissionListMixin, GetParentObjectMixin, @@ -91,7 +56,7 @@ def get_queryset(self, *args, **kwargs): parent_object = self.get_parent_object() if parent_object is None: - raise NotFound + return GroupConfig.objects.none() return ( super() @@ -99,8 +64,7 @@ def get_queryset(self, *args, **kwargs): .filter(object_id=parent_object.pk, object_type=ContentType.objects.get_for_model(model=parent_object)) ) - @audit - def create(self, request: Request, *args, **kwargs): # noqa: ARG002 + def create(self, request: Request, *_, **__): parent_object = self.get_parent_object() self._check_parent_permissions(parent_object=parent_object) @@ -135,7 +99,6 @@ def host_candidates(self, request: Request, *args, **kwargs): # noqa: ARG001, A return Response(data=serializer.data, status=HTTP_200_OK) - @audit def destroy(self, request: Request, *args, **kwargs): # noqa: ARG002 parent_object = self.get_parent_object() instance = get_object_or_404( @@ -151,8 +114,7 @@ def destroy(self, request: Request, *args, **kwargs): # noqa: ARG002 instance.delete() return Response(status=HTTP_204_NO_CONTENT) - @audit - def partial_update(self, request: Request, *args, **kwargs): # noqa: ARG002 + def partial_update(self, request: Request, *_, **__): parent_object = self.get_parent_object() instance = get_object_or_404( self.filter_queryset(self.get_queryset()), **{self.lookup_field: self.kwargs[self.lookup_field]} @@ -170,11 +132,11 @@ def partial_update(self, request: Request, *args, **kwargs): # noqa: ARG002 return Response(serializer.data) - def retrieve(self, request, *args, **kwargs) -> Response: # noqa: ARG002 + def retrieve(self, request, *args, **kwargs) -> Response: self._check_parent_permissions() return super().retrieve(request, *args, **kwargs) - def list(self, request, *args, **kwargs) -> Response: # noqa: ARG002 + def list(self, request, *args, **kwargs) -> Response: self._check_parent_permissions() return super().list(request, *args, **kwargs) @@ -183,16 +145,83 @@ def _check_parent_permissions(self, parent_object: ParentObject = None): parent_view_perm = f"cm.view_{parent_obj.__class__.__name__.lower()}" if parent_obj is None: - raise NotFound + raise NotFound() if not ( self.request.user.has_perm(parent_view_perm, parent_obj) or self.request.user.has_perm(parent_view_perm) ): - raise NotFound + raise NotFound() parent_config_view_perm = "cm.view_objectconfig" if not ( self.request.user.has_perm(parent_config_view_perm, parent_obj.config) or self.request.user.has_perm(parent_config_view_perm) ): - raise PermissionDenied + raise PermissionDenied() + + +class HostGroupConfigViewSet( + PermissionListMixin, GetParentObjectMixin, ListModelMixin, RetrieveModelMixin, ADCMGenericViewSet +): + queryset = ( + Host.objects.select_related("provider", "cluster") + .prefetch_related("concerns", "hostcomponent_set") + .order_by("fqdn") + ) + permission_classes = [GroupConfigHostsPermissions] + permission_required = [VIEW_HOST_PERM] + filterset_class = HostMemberFilter + filter_backends = (DjangoFilterBackend,) + pagination_class = None + + def get_serializer_class(self) -> type[HostGroupConfigSerializer | HostAddSerializer]: + if self.action == "create": + return HostAddSerializer + + return HostGroupConfigSerializer + + def get_queryset(self, *args, **kwargs): # noqa: ARG002 + return self.queryset.filter(group_config__id=self.kwargs["group_config_pk"]) + + def get_group_for_change(self) -> GroupConfig: + config_group = super().get_parent_object() + if config_group is None or not isinstance(config_group, GroupConfig): + raise NotFound + + parent_view_perm = f"cm.view_{config_group.object.__class__.__name__.lower()}" + if not ( + self.request.user.has_perm(perm=parent_view_perm, obj=config_group.object) + or self.request.user.has_perm(perm=parent_view_perm) + ): + raise NotFound + + check_config_perm( + user=self.request.user, + action_type="change", + model=config_group.object.content_type.model, + obj=config_group.object, + ) + + return config_group + + def create(self, request, *_, **__): + group_config = self.get_group_for_change() + + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + host_id = serializer.validated_data["host_id"] + group_config.check_host_candidate(host_ids=[host_id]) + host = Host.objects.get(pk=host_id) + group_config.hosts.add(host) + + return Response(status=HTTP_201_CREATED, data=HostGroupConfigSerializer(instance=host).data) + + def destroy(self, request, *_, **kwargs): # noqa: ARG002 + group_config = self.get_group_for_change() + + host = group_config.hosts.filter(pk=kwargs["pk"]).first() + if not host: + raise NotFound + + group_config.hosts.remove(host) + return Response(status=HTTP_204_NO_CONTENT) diff --git a/python/api_v2/host/permissions.py b/python/api_v2/host/permissions.py index 3ec44fba74..e0738f48d2 100644 --- a/python/api_v2/host/permissions.py +++ b/python/api_v2/host/permissions.py @@ -14,25 +14,6 @@ from rest_framework.permissions import DjangoObjectPermissions -class GroupConfigHostsPermissions(DjangoObjectPermissions): - perms_map = { - "GET": [], - "OPTIONS": [], - "HEAD": [], - "POST": ["%(app_label)s.add_%(model_name)s"], - "PUT": ["%(app_label)s.change_%(model_name)s"], - "PATCH": ["%(app_label)s.change_%(model_name)s"], - "DELETE": ["%(app_label)s.delete_%(model_name)s"], - } - - @audit - def has_permission(self, request, view) -> bool: - if view.action in ["create", "destroy", "update", "partial_update"]: - return True - - return super().has_permission(request=request, view=view) - - class HostsPermissions(DjangoObjectPermissions): perms_map = { "GET": [], diff --git a/python/api_v2/host/serializers.py b/python/api_v2/host/serializers.py index 6a6ac1eb35..a4bb5aec33 100644 --- a/python/api_v2/host/serializers.py +++ b/python/api_v2/host/serializers.py @@ -20,7 +20,6 @@ ChoiceField, IntegerField, ModelSerializer, - PrimaryKeyRelatedField, SerializerMethodField, ) @@ -168,15 +167,6 @@ class Meta: fields = ["id", "name"] -class HostGroupConfigSerializer(ModelSerializer): - id = PrimaryKeyRelatedField(queryset=Host.objects.all()) - - class Meta: - model = Host - fields = ["id", "name"] - extra_kwargs = {"name": {"read_only": True}} - - class HostAuditSerializer(ModelSerializer): class Meta: model = Host diff --git a/python/api_v2/host/views.py b/python/api_v2/host/views.py index a2ba6f0a3b..be459ebfb9 100644 --- a/python/api_v2/host/views.py +++ b/python/api_v2/host/views.py @@ -11,26 +11,23 @@ # limitations under the License. -from adcm.mixins import GetParentObjectMixin from adcm.permissions import ( VIEW_CLUSTER_PERM, VIEW_HOST_PERM, VIEW_PROVIDER_PERM, ChangeMMPermissions, - check_config_perm, check_custom_perm, get_object_for_user, ) from audit.utils import audit from cm.api import delete_host from cm.errors import AdcmEx -from cm.models import Cluster, ConcernType, GroupConfig, Host, HostProvider +from cm.models import Cluster, ConcernType, Host, HostProvider from django.db.transaction import atomic from django_filters.rest_framework.backends import DjangoFilterBackend from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view from guardian.mixins import PermissionListMixin from rest_framework.decorators import action -from rest_framework.exceptions import NotFound from rest_framework.mixins import ListModelMixin, RetrieveModelMixin from rest_framework.request import Request from rest_framework.response import Response @@ -46,16 +43,13 @@ from api_v2.api_schema import DefaultParams, ErrorSerializer from api_v2.config.utils import ConfigSchemaMixin -from api_v2.host.filters import HostFilter, HostMemberFilter +from api_v2.host.filters import HostFilter from api_v2.host.permissions import ( - GroupConfigHostsPermissions, HostsPermissions, ) from api_v2.host.serializers import ( - HostAddSerializer, HostChangeMaintenanceModeSerializer, HostCreateSerializer, - HostGroupConfigSerializer, HostSerializer, HostUpdateSerializer, ) @@ -260,104 +254,3 @@ def partial_update(self, request, *args, **kwargs): # noqa: ARG002 @action(methods=["post"], detail=True, url_path="maintenance-mode", permission_classes=[ChangeMMPermissions]) def maintenance_mode(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 return maintenance_mode(request=request, host=self.get_object()) - - -@extend_schema_view( - list=extend_schema( - operation_id="getObjectConfigGroupHosts", - summary="GET object's config-group hosts", - description="Get a list of hosts added to object's config-group.", - responses={HTTP_200_OK: HostGroupConfigSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), - retrieve=extend_schema( - operation_id="getObjectConfigGroupHost", - summary="GET object's config-group host", - description="Get information about a specific host of object's config-group.", - responses={HTTP_200_OK: HostGroupConfigSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), - create=extend_schema( - operation_id="postObjectConfigGroupHosts", - summary="POST object's config-group host", - description="Add host to object's config-group.", - responses={ - HTTP_201_CREATED: HostGroupConfigSerializer, - HTTP_400_BAD_REQUEST: ErrorSerializer, - HTTP_403_FORBIDDEN: ErrorSerializer, - HTTP_404_NOT_FOUND: ErrorSerializer, - HTTP_409_CONFLICT: ErrorSerializer, - }, - ), - destroy=extend_schema( - operation_id="deleteObjectConfigGroupHosts", - summary="DELETE host from object's config-group", - description="Remove host from object's config-group.", - responses={HTTP_204_NO_CONTENT: None, HTTP_403_FORBIDDEN: ErrorSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), -) -class HostGroupConfigViewSet( - PermissionListMixin, GetParentObjectMixin, ListModelMixin, RetrieveModelMixin, ADCMGenericViewSet -): - queryset = ( - Host.objects.select_related("provider", "cluster") - .prefetch_related("concerns", "hostcomponent_set") - .order_by("fqdn") - ) - permission_classes = [GroupConfigHostsPermissions] - permission_required = [VIEW_HOST_PERM] - filterset_class = HostMemberFilter - filter_backends = (DjangoFilterBackend,) - pagination_class = None - - def get_serializer_class(self) -> type[HostGroupConfigSerializer | HostAddSerializer]: - if self.action == "create": - return HostAddSerializer - - return HostGroupConfigSerializer - - def get_queryset(self, *args, **kwargs): # noqa: ARG002 - return self.queryset.filter(group_config__id=self.kwargs["group_config_pk"]) - - def get_group_for_change(self) -> GroupConfig: - config_group = super().get_parent_object() - if config_group is None or not isinstance(config_group, GroupConfig): - raise NotFound - - parent_view_perm = f"cm.view_{config_group.object.__class__.__name__.lower()}" - if not ( - self.request.user.has_perm(perm=parent_view_perm, obj=config_group.object) - or self.request.user.has_perm(perm=parent_view_perm) - ): - raise NotFound - - check_config_perm( - user=self.request.user, - action_type="change", - model=config_group.object.content_type.model, - obj=config_group.object, - ) - - return config_group - - @audit - def create(self, request, *_, **__): - group_config = self.get_group_for_change() - - serializer = self.get_serializer(data=request.data) - serializer.is_valid(raise_exception=True) - host_id = serializer.validated_data["host_id"] - group_config.check_host_candidate(host_ids=[host_id]) - host = Host.objects.get(pk=host_id) - group_config.hosts.add(host) - - return Response(status=HTTP_201_CREATED, data=HostGroupConfigSerializer(instance=host).data) - - @audit - def destroy(self, request, *_, **kwargs): # noqa: ARG002 - group_config = self.get_group_for_change() - - host = group_config.hosts.filter(pk=kwargs["pk"]).first() - if not host: - raise NotFound - - group_config.hosts.remove(host) - return Response(status=HTTP_204_NO_CONTENT) diff --git a/python/api_v2/hostprovider/urls.py b/python/api_v2/hostprovider/urls.py index 2b03b10d47..acbc9df76f 100644 --- a/python/api_v2/hostprovider/urls.py +++ b/python/api_v2/hostprovider/urls.py @@ -10,18 +10,29 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Iterable +import itertools + from rest_framework.routers import SimpleRouter from rest_framework_nested.routers import NestedSimpleRouter from api_v2.action.views import ActionViewSet from api_v2.config.views import ConfigLogViewSet -from api_v2.group_config.views import GroupConfigViewSet -from api_v2.host.views import HostGroupConfigViewSet -from api_v2.hostprovider.views import HostProviderViewSet +from api_v2.generic.group_config.urls_helpers import add_group_config_routers +from api_v2.hostprovider.views import ( + HostProviderGroupConfigViewSet, + HostProviderHostGroupConfigViewSet, + HostProviderViewSet, +) from api_v2.upgrade.views import UpgradeViewSet CONFIG_GROUPS_PREFIX = "config-groups" + +def extract_urls_from_routers(routers: Iterable[NestedSimpleRouter]) -> tuple[str, ...]: + return tuple(itertools.chain.from_iterable(router.urls for router in routers)) + + router = SimpleRouter() router.register("", HostProviderViewSet) @@ -34,21 +45,13 @@ upgrade_router = NestedSimpleRouter(parent_router=router, parent_prefix="", lookup="hostprovider") upgrade_router.register(prefix="upgrades", viewset=UpgradeViewSet) -group_config_router = NestedSimpleRouter(parent_router=router, parent_prefix="", lookup="hostprovider") -group_config_router.register( - prefix=CONFIG_GROUPS_PREFIX, viewset=GroupConfigViewSet, basename="hostprovider-group-config" -) - -group_config_hosts_router = NestedSimpleRouter(group_config_router, CONFIG_GROUPS_PREFIX, lookup="group_config") -group_config_hosts_router.register( - prefix=r"hosts", viewset=HostGroupConfigViewSet, basename="hostprovider-group-config-hosts" -) -group_config_config_router = NestedSimpleRouter( - parent_router=group_config_router, parent_prefix=CONFIG_GROUPS_PREFIX, lookup="group_config" -) -group_config_config_router.register( - prefix="configs", viewset=ConfigLogViewSet, basename="hostprovider-group-config-config" +group_config_routers = add_group_config_routers( + group_config_viewset=HostProviderGroupConfigViewSet, + host_group_config_viewset=HostProviderHostGroupConfigViewSet, + parent_router=router, + parent_prefix="", + lookup="hostprovider", ) urlpatterns = [ @@ -56,7 +59,5 @@ *action_router.urls, *config_router.urls, *upgrade_router.urls, - *group_config_router.urls, - *group_config_hosts_router.urls, - *group_config_config_router.urls, + *extract_urls_from_routers(group_config_routers), ] diff --git a/python/api_v2/hostprovider/views.py b/python/api_v2/hostprovider/views.py index a085c59ee0..8e90499d4d 100644 --- a/python/api_v2/hostprovider/views.py +++ b/python/api_v2/hostprovider/views.py @@ -25,12 +25,16 @@ from api_v2.api_schema import ErrorSerializer from api_v2.config.utils import ConfigSchemaMixin +from api_v2.generic.group_config.api_schema import document_group_config_viewset, document_host_group_config_viewset +from api_v2.generic.group_config.audit import audit_group_config_viewset, audit_host_group_config_viewset +from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet from api_v2.hostprovider.filters import HostProviderFilter from api_v2.hostprovider.permissions import HostProviderPermissions from api_v2.hostprovider.serializers import ( HostProviderCreateSerializer, HostProviderSerializer, ) +from api_v2.utils.audit import parent_hostprovider_from_lookup from api_v2.views import ADCMGenericViewSet @@ -147,3 +151,15 @@ def destroy(self, request, *args, **kwargs): # noqa: ARG002 host_provider = self.get_object() delete_host_provider(host_provider) return Response(status=HTTP_204_NO_CONTENT) + + +@document_group_config_viewset(object_type="hostprovider") +@audit_group_config_viewset(retrieve_owner=parent_hostprovider_from_lookup) +class HostProviderGroupConfigViewSet(GroupConfigViewSet): + ... + + +@document_host_group_config_viewset(object_type="hostprovider") +@audit_host_group_config_viewset(retrieve_owner=parent_hostprovider_from_lookup) +class HostProviderHostGroupConfigViewSet(HostGroupConfigViewSet): + ... diff --git a/python/api_v2/service/views.py b/python/api_v2/service/views.py index ab971b2f5c..0636e0e0ea 100644 --- a/python/api_v2/service/views.py +++ b/python/api_v2/service/views.py @@ -51,6 +51,9 @@ from api_v2.api_schema import DefaultParams, responses from api_v2.config.utils import ConfigSchemaMixin +from api_v2.generic.group_config.api_schema import document_group_config_viewset, document_host_group_config_viewset +from api_v2.generic.group_config.audit import audit_group_config_viewset, audit_host_group_config_viewset +from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet from api_v2.generic.imports.serializers import ImportPostSerializer, ImportSerializer from api_v2.generic.imports.views import ImportViewSet from api_v2.service.filters import ServiceFilter @@ -264,3 +267,15 @@ def detect_get_check_kwargs(self) -> tuple[dict, dict]: def detect_cluster_service_bind_arguments(self, obj: Cluster | ClusterObject) -> tuple[Cluster, ClusterObject]: return obj.cluster, obj + + +@document_group_config_viewset(object_type="service") +@audit_group_config_viewset(retrieve_owner=parent_service_from_lookup) +class ServiceGroupConfigViewSet(GroupConfigViewSet): + ... + + +@document_host_group_config_viewset(object_type="service") +@audit_host_group_config_viewset(retrieve_owner=parent_service_from_lookup) +class ServiceHostGroupConfigViewSet(HostGroupConfigViewSet): + ... diff --git a/python/api_v2/tests/test_audit/test_group_config.py b/python/api_v2/tests/test_audit/test_group_config.py index cbe8bf40ba..6c7a44dd54 100644 --- a/python/api_v2/tests/test_audit/test_group_config.py +++ b/python/api_v2/tests/test_audit/test_group_config.py @@ -1801,6 +1801,20 @@ def test_service_remove_host_not_found_fail(self): user__username="admin", ) + def test_service_remove_host_group_found_host_not_found_fail(self): + self.service_1_group_config.hosts.add(self.host_for_service) + + response = self.client.v2[self.service_1, "config-groups", 1000, "hosts", self.host_for_service].delete() + self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + + self.check_last_audit_record( + operation_name=f"{self.host_for_service.fqdn} host removed from configuration group", + operation_type="update", + operation_result="fail", + **self.prepare_audit_object_arguments(expected_object=self.service_1), + user__username="admin", + ) + def test_cluster_remove_host_no_perms_denied(self): self.client.login(**self.test_user_credentials) self.cluster_1_group_config.hosts.add(self.host_for_service) diff --git a/python/api_v2/utils/audit.py b/python/api_v2/utils/audit.py index 5837d85193..3f72b6cf56 100644 --- a/python/api_v2/utils/audit.py +++ b/python/api_v2/utils/audit.py @@ -19,7 +19,7 @@ from audit.alt.hooks import AuditHook from audit.alt.object_retrievers import GeneralAuditObjectRetriever from audit.models import AuditObject, AuditObjectType -from cm.models import Cluster, ClusterObject, Host +from cm.models import Cluster, ClusterObject, Host, HostProvider, ServiceComponent from django.db.models import Model from rest_framework.response import Response @@ -69,6 +69,25 @@ def get_name(self, id_: str | int) -> str | None: return "/".join(names) +@dataclass(slots=True) +class ComponentAuditObjectCreator(CMAuditObjectCreator): + cm_model = ServiceComponent + name_field = "prototype__display_name" + + def get_name(self, id_: str | int) -> str | None: + names = ( + ServiceComponent.objects.values_list( + "cluster__name", "service__prototype__display_name", "prototype__display_name" + ) + .filter(id=id_) + .first() + ) + if not names: + return None + + return "/".join(names) + + create_audit_cluster_object = CMAuditObjectCreator(cm_model=Cluster) create_audit_host_object = CMAuditObjectCreator(cm_model=Host, name_field="fqdn") @@ -86,6 +105,22 @@ def get_name(self, id_: str | int) -> str | None: ) parent_service_from_lookup = _extract_service_from(extract_id=ExtractID(field="service_pk").from_lookup_kwargs) +_extract_component_from = partial( + GeneralAuditObjectRetriever, + audit_object_type=AuditObjectType.COMPONENT, + create_new=ComponentAuditObjectCreator(cm_model=ServiceComponent), +) +parent_component_from_lookup = _extract_component_from(extract_id=ExtractID(field="component_pk").from_lookup_kwargs) + +_extract_hostprovider_from = partial( + GeneralAuditObjectRetriever, + audit_object_type=AuditObjectType.PROVIDER, + create_new=CMAuditObjectCreator(cm_model=HostProvider), +) +parent_hostprovider_from_lookup = _extract_hostprovider_from( + extract_id=ExtractID(field="hostprovider_pk").from_lookup_kwargs +) + host_from_lookup = GeneralAuditObjectRetriever( audit_object_type=AuditObjectType.HOST, extract_id=ExtractID(field="pk").from_lookup_kwargs, From 097be28396e1295012a9d3784556e9ab66d0d592 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Fri, 28 Jun 2024 06:53:42 +0000 Subject: [PATCH 04/98] ADCM-5696 Specialize Action ViewSets --- python/api_v2/adcm/urls.py | 9 +- python/api_v2/adcm/views.py | 16 +++ python/api_v2/cluster/urls.py | 77 +++++----- python/api_v2/cluster/views.py | 41 ++++++ python/api_v2/component/views.py | 34 +++++ .../api_v2/{ => generic}/action/__init__.py | 0 python/api_v2/generic/action/api_schema.py | 81 +++++++++++ python/api_v2/generic/action/audit.py | 44 ++++++ python/api_v2/{ => generic}/action/filters.py | 0 .../{ => generic}/action/serializers.py | 0 python/api_v2/{ => generic}/action/utils.py | 0 python/api_v2/{ => generic}/action/views.py | 115 +-------------- .../action_host_group/__init__.py | 0 .../generic/action_host_group/api_schema.py | 96 +++++++++++++ .../api_v2/generic/action_host_group/audit.py | 131 +++++++++++++++++ .../action_host_group/serializers.py | 0 .../generic/action_host_group/urls_helpers.py | 56 ++++++++ .../{ => generic}/action_host_group/views.py | 134 ++++++------------ python/api_v2/generic/group_config/audit.py | 21 +-- python/api_v2/host/urls.py | 5 +- python/api_v2/host/views.py | 10 ++ python/api_v2/hostprovider/urls.py | 4 +- python/api_v2/hostprovider/views.py | 9 ++ python/api_v2/service/views.py | 34 +++++ python/api_v2/task/serializers.py | 2 +- python/api_v2/tests/test_action_host_group.py | 4 +- python/api_v2/upgrade/views.py | 4 +- python/api_v2/utils/audit.py | 28 +++- 28 files changed, 682 insertions(+), 273 deletions(-) rename python/api_v2/{ => generic}/action/__init__.py (100%) create mode 100644 python/api_v2/generic/action/api_schema.py create mode 100644 python/api_v2/generic/action/audit.py rename python/api_v2/{ => generic}/action/filters.py (100%) rename python/api_v2/{ => generic}/action/serializers.py (100%) rename python/api_v2/{ => generic}/action/utils.py (100%) rename python/api_v2/{ => generic}/action/views.py (67%) rename python/api_v2/{ => generic}/action_host_group/__init__.py (100%) create mode 100644 python/api_v2/generic/action_host_group/api_schema.py create mode 100644 python/api_v2/generic/action_host_group/audit.py rename python/api_v2/{ => generic}/action_host_group/serializers.py (100%) create mode 100644 python/api_v2/generic/action_host_group/urls_helpers.py rename python/api_v2/{ => generic}/action_host_group/views.py (75%) diff --git a/python/api_v2/adcm/urls.py b/python/api_v2/adcm/urls.py index e052b6e651..1a3d53cae8 100644 --- a/python/api_v2/adcm/urls.py +++ b/python/api_v2/adcm/urls.py @@ -12,15 +12,14 @@ from django.urls import path -from api_v2.action.views import AdcmActionViewSet -from api_v2.adcm.views import ADCMConfigView, ADCMViewSet +from api_v2.adcm.views import ADCMActionViewSet, ADCMConfigView, ADCMViewSet urlpatterns = [ path("", ADCMViewSet.as_view({"get": "retrieve"}), name="adcm-detail"), path("configs/", ADCMConfigView.as_view({"get": "list", "post": "create"}), name="adcm-config-list"), path("configs//", ADCMConfigView.as_view({"get": "retrieve"}), name="adcm-config-detail"), path("config-schema/", ADCMConfigView.as_view({"get": "config_schema"}), name="adcm-config-schema"), - path("actions/", AdcmActionViewSet.as_view({"get": "list"}), name="adcm-action-list"), - path("actions//", AdcmActionViewSet.as_view({"get": "retrieve"}), name="adcm-action-detail"), - path("actions//run/", AdcmActionViewSet.as_view({"post": "run"}), name="adcm-action-run"), + path("actions/", ADCMActionViewSet.as_view({"get": "list"}), name="adcm-action-list"), + path("actions//", ADCMActionViewSet.as_view({"get": "retrieve"}), name="adcm-action-detail"), + path("actions//run/", ADCMActionViewSet.as_view({"post": "run"}), name="adcm-action-run"), ] diff --git a/python/api_v2/adcm/views.py b/python/api_v2/adcm/views.py index 10016799c9..2020c133c2 100644 --- a/python/api_v2/adcm/views.py +++ b/python/api_v2/adcm/views.py @@ -26,6 +26,10 @@ from api_v2.config.serializers import ConfigLogListSerializer, ConfigLogSerializer from api_v2.config.utils import get_config_schema from api_v2.config.views import ConfigLogViewSet +from api_v2.generic.action.api_schema import document_action_viewset +from api_v2.generic.action.audit import audit_action_viewset +from api_v2.generic.action.views import ActionViewSet +from api_v2.utils.audit import adcm_audit_object from api_v2.views import ADCMGenericViewSet @@ -108,3 +112,15 @@ def _check_create_permissions(self, request: Request, parent_object: ADCM | None def _check_parent_permissions(self, parent_object: ParentObject = None): pass + + +@document_action_viewset(object_type="ADCM", operation_id_variant="ADCM") +@audit_action_viewset(retrieve_owner=adcm_audit_object) +class ADCMActionViewSet(ActionViewSet): + def get_parent_object(self): + return ADCM.objects.first() + + def list(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 + self.parent_object = self.get_parent_object() + + return self._list_actions_available_to_user(request) diff --git a/python/api_v2/cluster/urls.py b/python/api_v2/cluster/urls.py index 124493ddbb..507694bcea 100644 --- a/python/api_v2/cluster/urls.py +++ b/python/api_v2/cluster/urls.py @@ -15,28 +15,36 @@ from rest_framework_nested.routers import NestedSimpleRouter, SimpleRouter -from api_v2.action.views import ActionViewSet -from api_v2.action_host_group.views import ( - ActionHostGroupActionViewSet, - ActionHostGroupViewSet, - HostActionHostGroupViewSet, -) from api_v2.cluster.views import ( + ClusterActionHostGroupActionsViewSet, + ClusterActionHostGroupHostsViewSet, + ClusterActionHostGroupViewSet, + ClusterActionViewSet, ClusterGroupConfigViewSet, + ClusterHostActionViewSet, ClusterHostGroupConfigViewSet, ClusterImportViewSet, ClusterViewSet, HostClusterViewSet, ) from api_v2.component.views import ( + ComponentActionHostGroupActionsViewSet, + ComponentActionHostGroupHostsViewSet, + ComponentActionHostGroupViewSet, + ComponentActionViewSet, ComponentGroupConfigViewSet, ComponentHostGroupConfigViewSet, ComponentViewSet, HostComponentViewSet, ) from api_v2.config.views import ConfigLogViewSet +from api_v2.generic.action_host_group.urls_helpers import add_action_host_groups_routers from api_v2.generic.group_config.urls_helpers import add_group_config_routers from api_v2.service.views import ( + ServiceActionHostGroupActionsViewSet, + ServiceActionHostGroupHostsViewSet, + ServiceActionHostGroupViewSet, + ServiceActionViewSet, ServiceGroupConfigViewSet, ServiceHostGroupConfigViewSet, ServiceImportViewSet, @@ -51,40 +59,12 @@ SERVICE_PREFIX = "services" CONFIG_PREFIX = "configs" IMPORT_PREFIX = "imports" -ACTION_HOST_GROUPS_PREFIX = "action-host-groups" def extract_urls_from_routers(routers: Iterable[NestedSimpleRouter]) -> tuple[str, ...]: return tuple(itertools.chain.from_iterable(router.urls for router in routers)) -def add_action_host_groups_routers( - parent_router: NestedSimpleRouter | SimpleRouter, parent_prefix: str, lookup: str -) -> tuple[NestedSimpleRouter, ...]: - action_host_groups_router = NestedSimpleRouter( - parent_router=parent_router, parent_prefix=parent_prefix, lookup=lookup - ) - action_host_groups_router.register( - prefix=ACTION_HOST_GROUPS_PREFIX, viewset=ActionHostGroupViewSet, basename=f"{lookup}-action-host-group" - ) - - action_host_groups_actions_router = NestedSimpleRouter( - parent_router=action_host_groups_router, parent_prefix=ACTION_HOST_GROUPS_PREFIX, lookup="action_host_group" - ) - action_host_groups_actions_router.register( - prefix=ACTION_PREFIX, viewset=ActionHostGroupActionViewSet, basename=f"{lookup}-action-host-group-action" - ) - - action_host_groups_hosts_router = NestedSimpleRouter( - parent_router=action_host_groups_router, parent_prefix=ACTION_HOST_GROUPS_PREFIX, lookup="action_host_group" - ) - action_host_groups_hosts_router.register( - prefix=HOST_PREFIX, viewset=HostActionHostGroupViewSet, basename=f"{lookup}-action-host-group-host" - ) - - return action_host_groups_router, action_host_groups_actions_router, action_host_groups_hosts_router - - # cluster cluster_router = SimpleRouter() cluster_router.register(prefix=CLUSTER_PREFIX, viewset=ClusterViewSet) @@ -93,7 +73,7 @@ def add_action_host_groups_routers( import_cluster_router.register(prefix=IMPORT_PREFIX, viewset=ClusterImportViewSet, basename="cluster-import") cluster_action_router = NestedSimpleRouter(parent_router=cluster_router, parent_prefix=CLUSTER_PREFIX, lookup="cluster") -cluster_action_router.register(prefix=ACTION_PREFIX, viewset=ActionViewSet, basename="cluster-action") +cluster_action_router.register(prefix=ACTION_PREFIX, viewset=ClusterActionViewSet, basename="cluster-action") cluster_config_router = NestedSimpleRouter(parent_router=cluster_router, parent_prefix=CLUSTER_PREFIX, lookup="cluster") cluster_config_router.register(prefix=CONFIG_PREFIX, viewset=ConfigLogViewSet, basename="cluster-config") @@ -107,7 +87,12 @@ def add_action_host_groups_routers( ) cluster_action_host_groups_routers = add_action_host_groups_routers( - parent_router=cluster_router, parent_prefix=CLUSTER_PREFIX, lookup="cluster" + ahg_viewset=ClusterActionHostGroupViewSet, + ahg_hosts_viewset=ClusterActionHostGroupHostsViewSet, + ahg_actions_viewset=ClusterActionHostGroupActionsViewSet, + parent_router=cluster_router, + parent_prefix=CLUSTER_PREFIX, + lookup="cluster", ) # service @@ -118,7 +103,7 @@ def add_action_host_groups_routers( import_service_router.register(prefix=IMPORT_PREFIX, viewset=ServiceImportViewSet, basename="service-import") service_action_router = NestedSimpleRouter(parent_router=service_router, parent_prefix=SERVICE_PREFIX, lookup="service") -service_action_router.register(prefix=ACTION_PREFIX, viewset=ActionViewSet, basename="service-action") +service_action_router.register(prefix=ACTION_PREFIX, viewset=ServiceActionViewSet, basename="service-action") service_config_router = NestedSimpleRouter(parent_router=service_router, parent_prefix=SERVICE_PREFIX, lookup="service") service_config_router.register(prefix=CONFIG_PREFIX, viewset=ConfigLogViewSet, basename="service-config") @@ -131,7 +116,12 @@ def add_action_host_groups_routers( lookup="service", ) service_action_host_groups_routers = add_action_host_groups_routers( - parent_router=service_router, parent_prefix=SERVICE_PREFIX, lookup="service" + ahg_viewset=ServiceActionHostGroupViewSet, + ahg_hosts_viewset=ServiceActionHostGroupHostsViewSet, + ahg_actions_viewset=ServiceActionHostGroupActionsViewSet, + parent_router=service_router, + parent_prefix=SERVICE_PREFIX, + lookup="service", ) # component @@ -141,7 +131,7 @@ def add_action_host_groups_routers( component_action_router = NestedSimpleRouter( parent_router=component_router, parent_prefix=COMPONENT_PREFIX, lookup="component" ) -component_action_router.register(prefix=ACTION_PREFIX, viewset=ActionViewSet, basename="component-action") +component_action_router.register(prefix=ACTION_PREFIX, viewset=ComponentActionViewSet, basename="component-action") component_config_router = NestedSimpleRouter( parent_router=component_router, parent_prefix=COMPONENT_PREFIX, lookup="component" @@ -156,7 +146,12 @@ def add_action_host_groups_routers( lookup="component", ) component_action_host_groups_routers = add_action_host_groups_routers( - parent_router=component_router, parent_prefix=COMPONENT_PREFIX, lookup="component" + ahg_viewset=ComponentActionHostGroupViewSet, + ahg_hosts_viewset=ComponentActionHostGroupHostsViewSet, + ahg_actions_viewset=ComponentActionHostGroupActionsViewSet, + parent_router=component_router, + parent_prefix=COMPONENT_PREFIX, + lookup="component", ) # host @@ -164,7 +159,7 @@ def add_action_host_groups_routers( host_router.register(prefix=HOST_PREFIX, viewset=HostClusterViewSet, basename="host-cluster") host_action_router = NestedSimpleRouter(parent_router=host_router, parent_prefix=HOST_PREFIX, lookup="host") -host_action_router.register(prefix=ACTION_PREFIX, viewset=ActionViewSet, basename="host-cluster-action") +host_action_router.register(prefix=ACTION_PREFIX, viewset=ClusterHostActionViewSet, basename="host-cluster-action") host_component_router = NestedSimpleRouter(parent_router=host_router, parent_prefix=HOST_PREFIX, lookup="host") host_component_router.register(prefix=COMPONENT_PREFIX, viewset=HostComponentViewSet, basename="host-cluster-component") diff --git a/python/api_v2/cluster/views.py b/python/api_v2/cluster/views.py index ff99d69471..b39ccfcae5 100644 --- a/python/api_v2/cluster/views.py +++ b/python/api_v2/cluster/views.py @@ -86,6 +86,19 @@ from api_v2.cluster.utils import retrieve_mapping_data, save_mapping from api_v2.component.serializers import ComponentMappingSerializer from api_v2.config.utils import ConfigSchemaMixin +from api_v2.generic.action.api_schema import document_action_viewset +from api_v2.generic.action.audit import audit_action_viewset +from api_v2.generic.action.views import ActionViewSet +from api_v2.generic.action_host_group.api_schema import ( + document_action_host_group_actions_viewset, + document_action_host_group_hosts_viewset, + document_action_host_group_viewset, +) +from api_v2.generic.action_host_group.views import ( + ActionHostGroupActionsViewSet, + ActionHostGroupHostsViewSet, + ActionHostGroupViewSet, +) from api_v2.generic.group_config.api_schema import document_group_config_viewset, document_host_group_config_viewset from api_v2.generic.group_config.audit import audit_group_config_viewset, audit_host_group_config_viewset from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet @@ -105,6 +118,7 @@ host_from_lookup, nested_host_does_exist, parent_cluster_from_lookup, + parent_host_from_lookup, set_add_hosts_name, set_removed_host_name, update_cluster_name, @@ -740,3 +754,30 @@ class ClusterGroupConfigViewSet(GroupConfigViewSet): @audit_host_group_config_viewset(retrieve_owner=parent_cluster_from_lookup) class ClusterHostGroupConfigViewSet(HostGroupConfigViewSet): ... + + +@document_action_viewset(object_type="cluster") +@audit_action_viewset(retrieve_owner=parent_cluster_from_lookup) +class ClusterActionViewSet(ActionViewSet): + ... + + +@document_action_viewset(object_type="hostInCluster") +@audit_action_viewset(retrieve_owner=parent_host_from_lookup) +class ClusterHostActionViewSet(ActionViewSet): + ... + + +@document_action_host_group_viewset(object_type="cluster") +class ClusterActionHostGroupViewSet(ActionHostGroupViewSet): + ... + + +@document_action_host_group_hosts_viewset(object_type="cluster") +class ClusterActionHostGroupHostsViewSet(ActionHostGroupHostsViewSet): + ... + + +@document_action_host_group_actions_viewset(object_type="cluster") +class ClusterActionHostGroupActionsViewSet(ActionHostGroupActionsViewSet): + ... diff --git a/python/api_v2/component/views.py b/python/api_v2/component/views.py index 9a6a921500..7ff78e9fbd 100644 --- a/python/api_v2/component/views.py +++ b/python/api_v2/component/views.py @@ -49,6 +49,19 @@ HostComponentSerializer, ) from api_v2.config.utils import ConfigSchemaMixin +from api_v2.generic.action.api_schema import document_action_viewset +from api_v2.generic.action.audit import audit_action_viewset +from api_v2.generic.action.views import ActionViewSet +from api_v2.generic.action_host_group.api_schema import ( + document_action_host_group_actions_viewset, + document_action_host_group_hosts_viewset, + document_action_host_group_viewset, +) +from api_v2.generic.action_host_group.views import ( + ActionHostGroupActionsViewSet, + ActionHostGroupHostsViewSet, + ActionHostGroupViewSet, +) from api_v2.generic.group_config.api_schema import document_group_config_viewset, document_host_group_config_viewset from api_v2.generic.group_config.audit import audit_group_config_viewset, audit_host_group_config_viewset from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet @@ -221,3 +234,24 @@ class ComponentGroupConfigViewSet(GroupConfigViewSet): @audit_host_group_config_viewset(retrieve_owner=parent_component_from_lookup) class ComponentHostGroupConfigViewSet(HostGroupConfigViewSet): ... + + +@document_action_viewset(object_type="component") +@audit_action_viewset(retrieve_owner=parent_component_from_lookup) +class ComponentActionViewSet(ActionViewSet): + ... + + +@document_action_host_group_viewset(object_type="component") +class ComponentActionHostGroupViewSet(ActionHostGroupViewSet): + ... + + +@document_action_host_group_hosts_viewset(object_type="component") +class ComponentActionHostGroupHostsViewSet(ActionHostGroupHostsViewSet): + ... + + +@document_action_host_group_actions_viewset(object_type="component") +class ComponentActionHostGroupActionsViewSet(ActionHostGroupActionsViewSet): + ... diff --git a/python/api_v2/action/__init__.py b/python/api_v2/generic/action/__init__.py similarity index 100% rename from python/api_v2/action/__init__.py rename to python/api_v2/generic/action/__init__.py diff --git a/python/api_v2/generic/action/api_schema.py b/python/api_v2/generic/action/api_schema.py new file mode 100644 index 0000000000..9c95b0227b --- /dev/null +++ b/python/api_v2/generic/action/api_schema.py @@ -0,0 +1,81 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view +from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT + +from api_v2.api_schema import DefaultParams, responses +from api_v2.generic.action.serializers import ActionListSerializer, ActionRetrieveSerializer +from api_v2.task.serializers import TaskListSerializer + +_schema_common_filters = ( + OpenApiParameter( + name="name", + required=False, + location=OpenApiParameter.QUERY, + description="System name of an action", + type=str, + ), + OpenApiParameter( + name="displayName", + required=False, + location=OpenApiParameter.QUERY, + description="Visible name of an action", + type=str, + ), +) + + +def document_action_viewset(object_type: str, operation_id_variant: str | None = None): + capitalized_type = operation_id_variant or object_type.capitalize() + + return extend_schema_view( + run=extend_schema( + operation_id=f"post{capitalized_type}Action", + summary=f"POST {object_type}'s action", + description=f"Run {object_type}'s action.", + responses=responses( + success=TaskListSerializer, + errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT), + ), + ), + list=extend_schema( + operation_id=f"get{capitalized_type}Actions", + summary=f"GET {object_type}'s actions", + description=f"Get a list of {object_type}'s actions.", + parameters=[ + DefaultParams.ordering_by("id"), + OpenApiParameter( + name="isHostOwnAction", + required=False, + location=OpenApiParameter.QUERY, + description="Filter for host's own actions / actions from another objects", + type=bool, + ), + OpenApiParameter( + name="prototypeId", + required=False, + location=OpenApiParameter.QUERY, + description="Identifier of action's owner", + type=int, + ), + *_schema_common_filters, + ], + responses=responses(success=ActionListSerializer, errors=HTTP_404_NOT_FOUND), + ), + retrieve=extend_schema( + operation_id=f"get{capitalized_type}Action", + summary=f"GET {object_type}'s action", + description=f"Get information about a specific {object_type}'s action.", + responses=responses(success=ActionRetrieveSerializer, errors=HTTP_404_NOT_FOUND), + ), + ) diff --git a/python/api_v2/generic/action/audit.py b/python/api_v2/generic/action/audit.py new file mode 100644 index 0000000000..35b76b8f43 --- /dev/null +++ b/python/api_v2/generic/action/audit.py @@ -0,0 +1,44 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from audit.alt.api import audit_update, audit_view +from audit.alt.core import AuditedCallArguments, OperationAuditContext, Result, RetrieveAuditObjectFunc +from audit.alt.hooks import AuditHook, adjust_denied_on_404_result +from cm.models import Action + +from api_v2.utils.audit import object_does_exist + + +def audit_action_viewset(retrieve_owner: RetrieveAuditObjectFunc): + return audit_view( + run=audit_update(name="{action_name} action launched", object_=retrieve_owner).attach_hooks( + on_collect=[set_name, adjust_denied_on_404_result(objects_exist=action_exists)] + ) + ) + + +# hooks + + +def action_exists(hook: AuditHook) -> bool: + return object_does_exist(hook=hook, model=Action) + + +def set_name( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Result | None, # noqa: ARG001 + exception: Exception | None, # noqa: ARG001 +): + action_name = Action.objects.values_list("display_name", flat=True).filter(id=call_arguments.get("pk")).first() + + context.name = context.name.format(action_name=action_name or "").strip() diff --git a/python/api_v2/action/filters.py b/python/api_v2/generic/action/filters.py similarity index 100% rename from python/api_v2/action/filters.py rename to python/api_v2/generic/action/filters.py diff --git a/python/api_v2/action/serializers.py b/python/api_v2/generic/action/serializers.py similarity index 100% rename from python/api_v2/action/serializers.py rename to python/api_v2/generic/action/serializers.py diff --git a/python/api_v2/action/utils.py b/python/api_v2/generic/action/utils.py similarity index 100% rename from python/api_v2/action/utils.py rename to python/api_v2/generic/action/utils.py diff --git a/python/api_v2/action/views.py b/python/api_v2/generic/action/views.py similarity index 67% rename from python/api_v2/action/views.py rename to python/api_v2/generic/action/views.py index 9779c2b395..44be06e3d5 100644 --- a/python/api_v2/action/views.py +++ b/python/api_v2/generic/action/views.py @@ -13,16 +13,14 @@ from itertools import compress from adcm.mixins import GetParentObjectMixin -from audit.utils import audit from cm.errors import AdcmEx -from cm.models import ADCM, Action, ADCMEntity, ConcernType, Host, HostComponent, PrototypeConfig +from cm.models import Action, ADCMEntity, ConcernType, Host, HostComponent, PrototypeConfig from cm.services.config.jinja import get_jinja_config from cm.services.job.action import ActionRunPayload, run_action from cm.stack import check_hostcomponents_objects_exist from django.conf import settings from django.db.models import Q from django_filters.rest_framework.backends import DjangoFilterBackend -from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view from rest_framework.decorators import action from rest_framework.exceptions import NotFound from rest_framework.mixins import ListModelMixin, RetrieveModelMixin @@ -30,92 +28,26 @@ from rest_framework.response import Response from rest_framework.status import ( HTTP_200_OK, - HTTP_400_BAD_REQUEST, - HTTP_403_FORBIDDEN, - HTTP_404_NOT_FOUND, - HTTP_409_CONFLICT, ) -from api_v2.action.filters import ActionFilter -from api_v2.action.serializers import ( +from api_v2.config.utils import convert_adcm_meta_to_attr, represent_string_as_json_type +from api_v2.generic.action.filters import ActionFilter +from api_v2.generic.action.serializers import ( ActionListSerializer, ActionRetrieveSerializer, ActionRunSerializer, ) -from api_v2.action.utils import ( +from api_v2.generic.action.utils import ( filter_actions_by_user_perm, get_action_configuration, has_run_perms, insert_service_ids, unique_hc_entries, ) -from api_v2.api_schema import DefaultParams, ErrorSerializer -from api_v2.config.utils import convert_adcm_meta_to_attr, represent_string_as_json_type from api_v2.task.serializers import TaskListSerializer from api_v2.views import ADCMGenericViewSet -_schema_common_filters = ( - OpenApiParameter( - name="name", - required=False, - location=OpenApiParameter.QUERY, - description="System name of an action", - type=str, - ), - OpenApiParameter( - name="displayName", - required=False, - location=OpenApiParameter.QUERY, - description="Visible name of an action", - type=str, - ), -) - -@extend_schema_view( - run=extend_schema( - operation_id="postObjectAction", - summary="POST object's action", - description="Run object's action.", - responses={ - HTTP_200_OK: TaskListSerializer, - HTTP_400_BAD_REQUEST: ErrorSerializer, - HTTP_403_FORBIDDEN: ErrorSerializer, - HTTP_404_NOT_FOUND: ErrorSerializer, - HTTP_409_CONFLICT: ErrorSerializer, - }, - ), - list=extend_schema( - operation_id="getObjectActions", - summary="GET object's actions", - description="Get a list of object's actions.", - parameters=[ - DefaultParams.ordering_by("id"), - OpenApiParameter( - name="isHostOwnAction", - required=False, - location=OpenApiParameter.QUERY, - description="Filter for host's own actions / actions from another objects", - type=bool, - ), - OpenApiParameter( - name="prototypeId", - required=False, - location=OpenApiParameter.QUERY, - description="Identifier of action's owner", - type=int, - ), - *_schema_common_filters, - ], - responses={HTTP_200_OK: ActionListSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), - retrieve=extend_schema( - operation_id="getObjectAction", - summary="GET object's action", - description="Get information about a specific object's action.", - responses={HTTP_200_OK: ActionRetrieveSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), -) class ActionViewSet(ListModelMixin, RetrieveModelMixin, GetParentObjectMixin, ADCMGenericViewSet): filter_backends = (DjangoFilterBackend,) filterset_class = ActionFilter @@ -209,7 +141,6 @@ def retrieve(self, request, *args, **kwargs): # noqa: ARG002 return Response(data=serializer.data) - @audit @action(methods=["post"], detail=True, url_path="run") def run(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG001, ARG002 self.parent_object = self.get_parent_object() @@ -273,39 +204,3 @@ def _list_actions_available_to_user(self, request: Request) -> Response: def _get_actions_owner(self) -> ADCMEntity: return self.parent_object - - -@extend_schema_view( - run=extend_schema( - operation_id="postADCMAction", - summary="POST ADCM action", - description="Run ADCM action.", - responses={ - HTTP_200_OK: TaskListSerializer, - HTTP_400_BAD_REQUEST: ErrorSerializer, - HTTP_403_FORBIDDEN: ErrorSerializer, - HTTP_404_NOT_FOUND: ErrorSerializer, - HTTP_409_CONFLICT: ErrorSerializer, - }, - ), - list=extend_schema( - operation_id="getADCMActions", - summary="GET ADCM actions", - description="Get a list of ADCM actions.", - parameters=[DefaultParams.ordering_by("id"), *_schema_common_filters], - responses={HTTP_200_OK: ActionListSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), - retrieve=extend_schema( - operation_id="getADCMAction", - summary="GET ADCM action", - description="Get information about a specific ADCM action.", - responses={HTTP_200_OK: ActionRetrieveSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), -) -class AdcmActionViewSet(ActionViewSet): - def get_parent_object(self): - return ADCM.objects.first() - - def list(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 - self.parent_object = self.get_parent_object() - return self._list_actions_available_to_user(request) diff --git a/python/api_v2/action_host_group/__init__.py b/python/api_v2/generic/action_host_group/__init__.py similarity index 100% rename from python/api_v2/action_host_group/__init__.py rename to python/api_v2/generic/action_host_group/__init__.py diff --git a/python/api_v2/generic/action_host_group/api_schema.py b/python/api_v2/generic/action_host_group/api_schema.py new file mode 100644 index 0000000000..4aefb4e673 --- /dev/null +++ b/python/api_v2/generic/action_host_group/api_schema.py @@ -0,0 +1,96 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from drf_spectacular.utils import extend_schema, extend_schema_view +from rest_framework.status import ( + HTTP_201_CREATED, + HTTP_204_NO_CONTENT, + HTTP_400_BAD_REQUEST, + HTTP_403_FORBIDDEN, + HTTP_404_NOT_FOUND, + HTTP_409_CONFLICT, +) + +from api_v2.api_schema import responses +from api_v2.generic.action.api_schema import document_action_viewset +from api_v2.generic.action_host_group.serializers import ActionHostGroupSerializer, ShortHostSerializer + + +def document_action_host_group_viewset(object_type: str): + capitalized_type = object_type.capitalize() + + return extend_schema_view( + create=extend_schema( + operation_id=f"post{capitalized_type}ActionHostGroup", + summary=f"POST {object_type}'s Action Host Group", + description=f"Create a new {object_type}'s action host group.", + responses=responses( + success=(HTTP_201_CREATED, ActionHostGroupSerializer), + errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT), + ), + ), + list=extend_schema( + operation_id=f"get{capitalized_type}ActionHostGroups", + summary=f"GET {object_type}'s Action Host Groups", + description=f"Return list of {object_type}'s action host groups.", + responses=responses( + success=ActionHostGroupSerializer(many=True), errors=(HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND) + ), + ), + retrieve=extend_schema( + operation_id=f"get{capitalized_type}ActionHostGroup", + summary=f"GET {object_type}'s Action Host Group", + description=f"Return information about specific {object_type}'s action host group.", + responses=responses(success=ActionHostGroupSerializer, errors=HTTP_404_NOT_FOUND), + ), + destroy=extend_schema( + operation_id=f"delete{capitalized_type}ActionHostGroup", + summary=f"DELETE {object_type}'s Action Host Group", + description=f"Delete specific {object_type}'s action host group.", + responses=responses(success=(HTTP_204_NO_CONTENT, None), errors=(HTTP_404_NOT_FOUND, HTTP_409_CONFLICT)), + ), + host_candidate=extend_schema( + operation_id=f"get{capitalized_type}ActionHostGroupCandidates", + summary=f"GET {object_type}'s Action Host Group's host candidates", + description=f"Return list of {object_type}'s hosts that can be added to action host group.", + responses=responses(success=ShortHostSerializer(many=True), errors=HTTP_404_NOT_FOUND), + ), + ) + + +def document_action_host_group_hosts_viewset(object_type: str): + capitalized_type = object_type.capitalize() + + return extend_schema_view( + create=extend_schema( + operation_id=f"post{capitalized_type}ActionHostGroupHosts", + summary=f"POST {object_type}'s Action Host Group hosts", + description=f"Add hosts to {object_type}'s action host group.", + responses=responses( + success=(HTTP_201_CREATED, ShortHostSerializer), + errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT), + ), + ), + destroy=extend_schema( + operation_id=f"delete{capitalized_type}ActionHostGroupHosts", + summary=f"DELETE {object_type}'s Action Host Group hosts", + description=f"Delete specific host from {object_type}'s action host group.", + responses=responses(success=(HTTP_204_NO_CONTENT, None), errors=(HTTP_404_NOT_FOUND, HTTP_409_CONFLICT)), + ), + ) + + +def document_action_host_group_actions_viewset(object_type: str): + return document_action_viewset( + object_type=f"{object_type}'s action host group", + operation_id_variant=f"{object_type.capitalize()}ActionHostGroup", + ) diff --git a/python/api_v2/generic/action_host_group/audit.py b/python/api_v2/generic/action_host_group/audit.py new file mode 100644 index 0000000000..03e587c93f --- /dev/null +++ b/python/api_v2/generic/action_host_group/audit.py @@ -0,0 +1,131 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from contextlib import suppress +from functools import partial +import json + +from audit.alt.core import AuditedCallArguments, OperationAuditContext, Result +from audit.alt.hooks import AuditHook +from audit.alt.object_retrievers import GeneralAuditObjectRetriever +from audit.models import AuditObjectType +from cm.models import ActionHostGroup, Cluster, ClusterObject, Host, ServiceComponent + +from api_v2.utils.audit import CMAuditObjectCreator, ExtractID, object_does_exist + +# hooks + + +class ActionHostGroupAuditObjectCreator(CMAuditObjectCreator): + cm_model = ActionHostGroup + name_field = "prototype__display_name" + + def get_name(self, id_: str | int) -> str | None: + # retrieval of child names is not optimal, can be improved + # by avoiding prefetching object and just routing based on type + group = ActionHostGroup.objects.prefetch_related("object").filter(id=id_).first() + if not group: + return None + + parent = group.object + + # Also naming extraction can be unified maybe, but then no easy optimization probably + if isinstance(parent, Cluster): + names = (parent.name,) + elif isinstance(parent, ClusterObject): + names = ( + ClusterObject.objects.values_list("cluster__name", "prototype__display_name") + .filter(id=parent.id) + .first() + or () + ) + elif isinstance(parent, ServiceComponent): + names = ( + ServiceComponent.objects.values_list( + "cluster__name", "service__prototype__display_name", "prototype__display_name" + ) + .filter(id=parent.id) + .first() + or () + ) + else: + names = () + + return "/".join((*names, group.name)) + + +_extract_action_host_group = partial( + GeneralAuditObjectRetriever, + audit_object_type=AuditObjectType.ACTION_HOST_GROUP, + create_new=ActionHostGroupAuditObjectCreator(cm_model=ActionHostGroup), +) +action_host_group_from_lookup = _extract_action_host_group(extract_id=ExtractID(field="pk").from_lookup_kwargs) +parent_action_host_group_from_lookup = _extract_action_host_group( + extract_id=ExtractID(field="action_host_group_pk").from_lookup_kwargs +) + + +def action_host_group_exists(hook: AuditHook) -> bool: + return object_does_exist(hook=hook, model=ActionHostGroup) + + +def nested_action_host_group_exists(hook: AuditHook) -> bool: + return object_does_exist(hook=hook, model=ActionHostGroup, id_field="action_host_group_pk") + + +def host_and_action_host_group_exist(hook: AuditHook) -> bool: + m2m = ActionHostGroup.hosts.through + return m2m.objects.filter( + host_id=hook.call_arguments.get("pk"), actionhostgroup_id=hook.call_arguments.get("action_host_group_pk") + ).exists() + + +def set_group_and_host_names( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Result | None, # noqa: ARG001 + exception: Exception | None, # noqa: ARG001 +): + group_name = ( + ActionHostGroup.objects.values_list("name", flat=True) + .filter(id=call_arguments.get("action_host_group_pk")) + .first() + ) + host_name = Host.objects.values_list("fqdn", flat=True).filter(id=call_arguments.get("pk")).first() + + context.name = ( + context.name.format(group_name=group_name or "", host_name=host_name or "").strip().replace(" ", " ") + ) + + +def set_group_and_host_names_from_response( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Result | None, # noqa: ARG001 + exception: Exception | None, # noqa: ARG001 +): + host_name = "" + group_name = ( + ActionHostGroup.objects.values_list("name", flat=True) + .filter(id=call_arguments.get("action_host_group_pk")) + .first() + ) + + if request := call_arguments.get("request"): + data = None + with suppress(AttributeError, json.JSONDecodeError): + data = json.loads(request.body) + + if isinstance(data, dict): + host_name = Host.objects.values_list("fqdn", flat=True).filter(id=data.get("hostId")).first() or "" + + context.name = context.name.format(group_name=group_name or "", host_name=host_name).strip().replace(" ", " ") diff --git a/python/api_v2/action_host_group/serializers.py b/python/api_v2/generic/action_host_group/serializers.py similarity index 100% rename from python/api_v2/action_host_group/serializers.py rename to python/api_v2/generic/action_host_group/serializers.py diff --git a/python/api_v2/generic/action_host_group/urls_helpers.py b/python/api_v2/generic/action_host_group/urls_helpers.py new file mode 100644 index 0000000000..692006fccd --- /dev/null +++ b/python/api_v2/generic/action_host_group/urls_helpers.py @@ -0,0 +1,56 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from rest_framework.routers import SimpleRouter +from rest_framework_nested.routers import NestedSimpleRouter + +from api_v2.generic.action_host_group.views import ( + ActionHostGroupActionsViewSet, + ActionHostGroupHostsViewSet, + ActionHostGroupViewSet, +) + +ACTION_PREFIX = "actions" +HOST_PREFIX = "hosts" +ACTION_HOST_GROUPS_PREFIX = "action-host-groups" + + +def add_action_host_groups_routers( + ahg_viewset: type[ActionHostGroupViewSet], + ahg_actions_viewset: type[ActionHostGroupActionsViewSet], + ahg_hosts_viewset: type[ActionHostGroupHostsViewSet], + parent_router: NestedSimpleRouter | SimpleRouter, + parent_prefix: str, + lookup: str, +) -> tuple[NestedSimpleRouter, ...]: + action_host_groups_router = NestedSimpleRouter( + parent_router=parent_router, parent_prefix=parent_prefix, lookup=lookup + ) + action_host_groups_router.register( + prefix=ACTION_HOST_GROUPS_PREFIX, viewset=ahg_viewset, basename=f"{lookup}-action-host-group" + ) + + action_host_groups_actions_router = NestedSimpleRouter( + parent_router=action_host_groups_router, parent_prefix=ACTION_HOST_GROUPS_PREFIX, lookup="action_host_group" + ) + action_host_groups_actions_router.register( + prefix=ACTION_PREFIX, viewset=ahg_actions_viewset, basename=f"{lookup}-action-host-group-action" + ) + + action_host_groups_hosts_router = NestedSimpleRouter( + parent_router=action_host_groups_router, parent_prefix=ACTION_HOST_GROUPS_PREFIX, lookup="action_host_group" + ) + action_host_groups_hosts_router.register( + prefix=HOST_PREFIX, viewset=ahg_hosts_viewset, basename=f"{lookup}-action-host-group-host" + ) + + return action_host_groups_router, action_host_groups_actions_router, action_host_groups_hosts_router diff --git a/python/api_v2/action_host_group/views.py b/python/api_v2/generic/action_host_group/views.py similarity index 75% rename from python/api_v2/action_host_group/views.py rename to python/api_v2/generic/action_host_group/views.py index 9d8102ffc1..66b7a74e51 100644 --- a/python/api_v2/action_host_group/views.py +++ b/python/api_v2/generic/action_host_group/views.py @@ -20,6 +20,8 @@ VIEW_COMPONENT_PERM, VIEW_SERVICE_PERM, ) +from audit.alt.api import audit_update, audit_view +from audit.alt.hooks import adjust_denied_on_404_result from audit.utils import audit from cm.converters import core_type_to_model from cm.errors import AdcmEx @@ -36,7 +38,6 @@ from django.contrib.contenttypes.models import ContentType from django.db.models import F, Model, QuerySet from django.db.transaction import atomic -from drf_spectacular.utils import extend_schema, extend_schema_view from guardian.shortcuts import get_objects_for_user from rbac.models import User from rest_framework.decorators import action @@ -45,25 +46,27 @@ from rest_framework.response import Response from rest_framework.serializers import Serializer from rest_framework.status import ( - HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT, - HTTP_404_NOT_FOUND, - HTTP_409_CONFLICT, ) -from api_v2.action.serializers import ActionListSerializer, ActionRetrieveSerializer -from api_v2.action.utils import has_run_perms -from api_v2.action.views import ActionViewSet -from api_v2.action_host_group.serializers import ( +from api_v2.generic.action.audit import audit_action_viewset +from api_v2.generic.action.utils import has_run_perms +from api_v2.generic.action.views import ActionViewSet +from api_v2.generic.action_host_group.audit import ( + host_and_action_host_group_exist, + nested_action_host_group_exists, + parent_action_host_group_from_lookup, + set_group_and_host_names, + set_group_and_host_names_from_response, +) +from api_v2.generic.action_host_group.serializers import ( ActionHostGroupCreateResultSerializer, ActionHostGroupCreateSerializer, ActionHostGroupSerializer, AddHostSerializer, ShortHostSerializer, ) -from api_v2.api_schema import DOCS_CLIENT_INPUT_ERROR_RESPONSES, DOCS_DEFAULT_ERROR_RESPONSES, ErrorSerializer -from api_v2.task.serializers import TaskListSerializer from api_v2.views import ADCMGenericViewSet, with_group_object, with_parent_object _PARENT_PERMISSION_MAP: dict[ADCMCoreType, tuple[str, type[Model]]] = { @@ -121,42 +124,6 @@ def check_has_group_permissions(user: User, parent: CoreObjectDescriptor, dto: P check_has_group_permissions_for_object(user=user, parent_object=parent_object, dto=dto) -@extend_schema_view( - create=extend_schema( - operation_id="postObjectActionHostGroup", - summary="POST object's Action Host Group", - description="Create a new object's action host group.", - responses={ - HTTP_201_CREATED: ActionHostGroupSerializer, - **DOCS_DEFAULT_ERROR_RESPONSES, - **DOCS_CLIENT_INPUT_ERROR_RESPONSES, - }, - ), - list=extend_schema( - operation_id="getObjectActionHostGroups", - summary="GET object's Action Host Groups", - description="Return list of object's action host groups.", - responses={HTTP_200_OK: ActionHostGroupSerializer(many=True), **DOCS_DEFAULT_ERROR_RESPONSES}, - ), - retrieve=extend_schema( - operation_id="getObjectActionHostGroup", - summary="GET object's Action Host Group", - description="Return information about specific object's action host group.", - responses={HTTP_200_OK: ActionHostGroupSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), - destroy=extend_schema( - operation_id="deleteObjectActionHostGroup", - summary="DELETE object's Action Host Group", - description="Delete specific object's action host group.", - responses={HTTP_204_NO_CONTENT: None, HTTP_404_NOT_FOUND: ErrorSerializer, HTTP_409_CONFLICT: ErrorSerializer}, - ), - host_candidate=extend_schema( - operation_id="getObjectActionHostGroupCandidates", - summary="GET object's Action Host Group's host candidates", - description="Return list of object's hosts that can be added to action host group.", - responses={HTTP_200_OK: ShortHostSerializer(many=True), HTTP_404_NOT_FOUND: ErrorSerializer}, - ), -) class ActionHostGroupViewSet(ADCMGenericViewSet): queryset = ActionHostGroup.objects.prefetch_related("hosts").order_by("id") action_host_group_service = ActionHostGroupService(repository=ActionHostGroupRepo()) @@ -259,28 +226,34 @@ def get_parent_name(self, parent: CoreObjectDescriptor) -> str: ) -@extend_schema_view( - create=extend_schema( - operation_id="postObjectActionHostGroupHosts", - summary="POST object's Action Host Group hosts", - description="Add hosts to object's action host group.", - responses={ - HTTP_201_CREATED: ShortHostSerializer, - **DOCS_DEFAULT_ERROR_RESPONSES, - **DOCS_CLIENT_INPUT_ERROR_RESPONSES, - }, - ), - destroy=extend_schema( - operation_id="deleteObjectActionHostGroupHosts", - summary="DELETE object's Action Host Group hosts", - description="Delete specific host from object's action host group.", - responses={HTTP_204_NO_CONTENT: None, HTTP_404_NOT_FOUND: ErrorSerializer, HTTP_409_CONFLICT: ErrorSerializer}, - ), -) -class HostActionHostGroupViewSet(ADCMGenericViewSet): +class ActionHostGroupHostsViewSet(ADCMGenericViewSet): serializer_class = AddHostSerializer action_host_group_service = ActionHostGroupService(repository=ActionHostGroupRepo()) + def __init_subclass__(cls, **__): + audit_view( + create=( + audit_update( + name="Host {host_name} added to action host group {group_name}", + object_=parent_action_host_group_from_lookup, + ).attach_hooks( + pre_call=set_group_and_host_names_from_response, + on_collect=adjust_denied_on_404_result(objects_exist=nested_action_host_group_exists), + ) + ), + destroy=( + audit_update( + name="Host {host_name} removed from action host group {group_name}", + object_=parent_action_host_group_from_lookup, + ).attach_hooks( + on_collect=[ + set_group_and_host_names, + adjust_denied_on_404_result(objects_exist=host_and_action_host_group_exist), + ] + ) + ), + )(cls) + @contextmanager def convert_exception(self) -> None: """ @@ -299,7 +272,6 @@ def convert_exception(self) -> None: except GroupIsLockedError as err: raise AdcmEx(code="TASK_ERROR", msg=err.message) from None - @audit @with_group_object def create( self, request: Request, *_, parent: CoreObjectDescriptor, host_group: HostGroupDescriptor, **__ @@ -319,7 +291,6 @@ def create( status=HTTP_201_CREATED, ) - @audit @with_group_object def destroy( self, request: Request, parent: CoreObjectDescriptor, host_group: HostGroupDescriptor, pk: str, **__ @@ -335,31 +306,10 @@ def destroy( return Response(status=HTTP_204_NO_CONTENT) -@extend_schema_view( - run=extend_schema( - operation_id="postActionHostGroupAction", - summary="POST action host group's action", - description="Run action host group's action.", - responses={ - HTTP_200_OK: TaskListSerializer, - **DOCS_DEFAULT_ERROR_RESPONSES, - **DOCS_CLIENT_INPUT_ERROR_RESPONSES, - }, - ), - list=extend_schema( - operation_id="getActionHostGroupActions", - summary="GET action host group's actions", - description="Get a list of action host group's actions.", - responses={HTTP_200_OK: ActionListSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), - retrieve=extend_schema( - operation_id="getActionHostGroupAction", - summary="GET action host group's action", - description="Get information about a specific action host group's action.", - responses={HTTP_200_OK: ActionRetrieveSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), -) -class ActionHostGroupActionViewSet(ActionViewSet): +class ActionHostGroupActionsViewSet(ActionViewSet): + def __init_subclass__(cls, **__): + audit_action_viewset(retrieve_owner=parent_action_host_group_from_lookup)(cls) + def get_parent_object(self) -> ActionHostGroup | None: if "action_host_group_pk" not in self.kwargs: return None diff --git a/python/api_v2/generic/group_config/audit.py b/python/api_v2/generic/group_config/audit.py index 16dc222769..8e9aea6fd9 100644 --- a/python/api_v2/generic/group_config/audit.py +++ b/python/api_v2/generic/group_config/audit.py @@ -28,13 +28,11 @@ def audit_group_config_viewset(retrieve_owner: RetrieveAuditObjectFunc): on_collect=set_group_name_from_response ), destroy=audit_delete(name="{group_name} configuration group deleted", object_=retrieve_owner).attach_hooks( - pre_call=set_group_name, on_collect=adjust_denied_on_404_result(objects_exist=group_config_does_not_exist) + pre_call=set_group_name, on_collect=adjust_denied_on_404_result(objects_exist=group_config_exists) ), partial_update=audit_update( name="{group_name} configuration group updated", object_=retrieve_owner - ).attach_hooks( - on_collect=(set_group_name, adjust_denied_on_404_result(objects_exist=group_config_does_not_exist)) - ), + ).attach_hooks(on_collect=(set_group_name, adjust_denied_on_404_result(objects_exist=group_config_exists))), ) @@ -44,14 +42,14 @@ def audit_host_group_config_viewset(retrieve_owner: RetrieveAuditObjectFunc): name="{host_name} host added to {group_name} configuration group", object_=retrieve_owner ).attach_hooks( pre_call=set_group_and_host_names_from_response, - on_collect=adjust_denied_on_404_result(objects_exist=nested_group_config_does_not_exist), + on_collect=adjust_denied_on_404_result(objects_exist=nested_group_config_exists), ), destroy=audit_update( name="{host_name} host removed from {group_name} configuration group", object_=retrieve_owner ).attach_hooks( on_collect=[ set_group_and_host_names, - adjust_denied_on_404_result(objects_exist=host_or_group_does_not_exist), + adjust_denied_on_404_result(objects_exist=host_in_group_exists), ] ), ) @@ -60,16 +58,19 @@ def audit_host_group_config_viewset(retrieve_owner: RetrieveAuditObjectFunc): # hooks -def group_config_does_not_exist(hook: AuditHook) -> bool: +def group_config_exists(hook: AuditHook) -> bool: return object_does_exist(hook=hook, model=GroupConfig) -def nested_group_config_does_not_exist(hook: AuditHook) -> bool: +def nested_group_config_exists(hook: AuditHook) -> bool: return object_does_exist(hook=hook, model=GroupConfig, id_field="group_config_pk") -def host_or_group_does_not_exist(hook: AuditHook) -> bool: - return nested_group_config_does_not_exist(hook=hook) and object_does_exist(hook=hook, model=Host) +def host_in_group_exists(hook: AuditHook) -> bool: + m2m = GroupConfig.hosts.through + return m2m.objects.filter( + host_id=hook.call_arguments.get("pk"), groupconfig_id=hook.call_arguments.get("group_config_pk") + ).exists() def set_group_name_from_response( diff --git a/python/api_v2/host/urls.py b/python/api_v2/host/urls.py index d092b7730e..b8b89dbf11 100644 --- a/python/api_v2/host/urls.py +++ b/python/api_v2/host/urls.py @@ -12,9 +12,8 @@ from rest_framework_nested.routers import NestedSimpleRouter, SimpleRouter -from api_v2.action.views import ActionViewSet from api_v2.config.views import ConfigLogViewSet -from api_v2.host.views import HostViewSet +from api_v2.host.views import HostActionViewSet, HostViewSet host_router = SimpleRouter() host_router.register(prefix="", viewset=HostViewSet) @@ -23,7 +22,7 @@ host_config_router.register(prefix="configs", viewset=ConfigLogViewSet, basename="host-config") host_action_router = NestedSimpleRouter(parent_router=host_router, parent_prefix="", lookup="host") -host_action_router.register(prefix="actions", viewset=ActionViewSet, basename="host-action") +host_action_router.register(prefix="actions", viewset=HostActionViewSet, basename="host-action") urlpatterns = [ *host_router.urls, diff --git a/python/api_v2/host/views.py b/python/api_v2/host/views.py index be459ebfb9..a17addae61 100644 --- a/python/api_v2/host/views.py +++ b/python/api_v2/host/views.py @@ -43,6 +43,9 @@ from api_v2.api_schema import DefaultParams, ErrorSerializer from api_v2.config.utils import ConfigSchemaMixin +from api_v2.generic.action.api_schema import document_action_viewset +from api_v2.generic.action.audit import audit_action_viewset +from api_v2.generic.action.views import ActionViewSet from api_v2.host.filters import HostFilter from api_v2.host.permissions import ( HostsPermissions, @@ -54,6 +57,7 @@ HostUpdateSerializer, ) from api_v2.host.utils import create_host, maintenance_mode, process_config_issues_policies_hc +from api_v2.utils.audit import parent_host_from_lookup from api_v2.views import ADCMGenericViewSet, ObjectWithStatusViewMixin @@ -254,3 +258,9 @@ def partial_update(self, request, *args, **kwargs): # noqa: ARG002 @action(methods=["post"], detail=True, url_path="maintenance-mode", permission_classes=[ChangeMMPermissions]) def maintenance_mode(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 return maintenance_mode(request=request, host=self.get_object()) + + +@document_action_viewset(object_type="host") +@audit_action_viewset(retrieve_owner=parent_host_from_lookup) +class HostActionViewSet(ActionViewSet): + ... diff --git a/python/api_v2/hostprovider/urls.py b/python/api_v2/hostprovider/urls.py index acbc9df76f..85a87fb171 100644 --- a/python/api_v2/hostprovider/urls.py +++ b/python/api_v2/hostprovider/urls.py @@ -16,10 +16,10 @@ from rest_framework.routers import SimpleRouter from rest_framework_nested.routers import NestedSimpleRouter -from api_v2.action.views import ActionViewSet from api_v2.config.views import ConfigLogViewSet from api_v2.generic.group_config.urls_helpers import add_group_config_routers from api_v2.hostprovider.views import ( + HostProviderActionViewSet, HostProviderGroupConfigViewSet, HostProviderHostGroupConfigViewSet, HostProviderViewSet, @@ -37,7 +37,7 @@ def extract_urls_from_routers(routers: Iterable[NestedSimpleRouter]) -> tuple[st router.register("", HostProviderViewSet) action_router = NestedSimpleRouter(parent_router=router, parent_prefix="", lookup="hostprovider") -action_router.register(prefix="actions", viewset=ActionViewSet, basename="provider-action") +action_router.register(prefix="actions", viewset=HostProviderActionViewSet, basename="provider-action") config_router = NestedSimpleRouter(parent_router=router, parent_prefix="", lookup="hostprovider") config_router.register(prefix="configs", viewset=ConfigLogViewSet, basename="provider-config") diff --git a/python/api_v2/hostprovider/views.py b/python/api_v2/hostprovider/views.py index 8e90499d4d..e520676518 100644 --- a/python/api_v2/hostprovider/views.py +++ b/python/api_v2/hostprovider/views.py @@ -25,6 +25,9 @@ from api_v2.api_schema import ErrorSerializer from api_v2.config.utils import ConfigSchemaMixin +from api_v2.generic.action.api_schema import document_action_viewset +from api_v2.generic.action.audit import audit_action_viewset +from api_v2.generic.action.views import ActionViewSet from api_v2.generic.group_config.api_schema import document_group_config_viewset, document_host_group_config_viewset from api_v2.generic.group_config.audit import audit_group_config_viewset, audit_host_group_config_viewset from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet @@ -163,3 +166,9 @@ class HostProviderGroupConfigViewSet(GroupConfigViewSet): @audit_host_group_config_viewset(retrieve_owner=parent_hostprovider_from_lookup) class HostProviderHostGroupConfigViewSet(HostGroupConfigViewSet): ... + + +@document_action_viewset(object_type="hostprovider") +@audit_action_viewset(retrieve_owner=parent_hostprovider_from_lookup) +class HostProviderActionViewSet(ActionViewSet): + ... diff --git a/python/api_v2/service/views.py b/python/api_v2/service/views.py index 0636e0e0ea..1821493946 100644 --- a/python/api_v2/service/views.py +++ b/python/api_v2/service/views.py @@ -51,6 +51,19 @@ from api_v2.api_schema import DefaultParams, responses from api_v2.config.utils import ConfigSchemaMixin +from api_v2.generic.action.api_schema import document_action_viewset +from api_v2.generic.action.audit import audit_action_viewset +from api_v2.generic.action.views import ActionViewSet +from api_v2.generic.action_host_group.api_schema import ( + document_action_host_group_actions_viewset, + document_action_host_group_hosts_viewset, + document_action_host_group_viewset, +) +from api_v2.generic.action_host_group.views import ( + ActionHostGroupActionsViewSet, + ActionHostGroupHostsViewSet, + ActionHostGroupViewSet, +) from api_v2.generic.group_config.api_schema import document_group_config_viewset, document_host_group_config_viewset from api_v2.generic.group_config.audit import audit_group_config_viewset, audit_host_group_config_viewset from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet @@ -279,3 +292,24 @@ class ServiceGroupConfigViewSet(GroupConfigViewSet): @audit_host_group_config_viewset(retrieve_owner=parent_service_from_lookup) class ServiceHostGroupConfigViewSet(HostGroupConfigViewSet): ... + + +@document_action_viewset(object_type="service") +@audit_action_viewset(retrieve_owner=parent_service_from_lookup) +class ServiceActionViewSet(ActionViewSet): + ... + + +@document_action_host_group_viewset(object_type="service") +class ServiceActionHostGroupViewSet(ActionHostGroupViewSet): + ... + + +@document_action_host_group_hosts_viewset(object_type="service") +class ServiceActionHostGroupHostsViewSet(ActionHostGroupHostsViewSet): + ... + + +@document_action_host_group_actions_viewset(object_type="service") +class ServiceActionHostGroupActionsViewSet(ActionHostGroupActionsViewSet): + ... diff --git a/python/api_v2/task/serializers.py b/python/api_v2/task/serializers.py index 52926cfb06..5372ac6e9f 100644 --- a/python/api_v2/task/serializers.py +++ b/python/api_v2/task/serializers.py @@ -14,7 +14,7 @@ from rest_framework.fields import CharField, DateTimeField, SerializerMethodField from rest_framework.serializers import ModelSerializer -from api_v2.action.serializers import ActionNameSerializer +from api_v2.generic.action.serializers import ActionNameSerializer OBJECT_ORDER = {"adcm": 0, "cluster": 1, "service": 2, "component": 3, "provider": 4, "host": 5, "action_host_group": 6} diff --git a/python/api_v2/tests/test_action_host_group.py b/python/api_v2/tests/test_action_host_group.py index b021080fc8..885a366286 100644 --- a/python/api_v2/tests/test_action_host_group.py +++ b/python/api_v2/tests/test_action_host_group.py @@ -383,7 +383,7 @@ def test_add_host_to_group(self) -> None: with self.subTest(f"[{type_.name}] Add Host Duplicate Audit FAIL"): self.check_last_audit_record( - operation_name=f"Host added to action host group {group.name}", + operation_name=f"Host {host_1.fqdn} added to action host group {group.name}", operation_type="update", operation_result="fail", **self.prepare_audit_object_arguments(expected_object=group), @@ -605,7 +605,7 @@ def test_run(self) -> None: with self.subTest(f"[{message_name}] Running Task Add Hosts Audit FAIL"): self.check_last_audit_record( - operation_name=f"Host added to action host group {group.name}", + operation_name=f"Host {host_2.fqdn} added to action host group {group.name}", operation_type="update", operation_result="fail", **self.prepare_audit_object_arguments(expected_object=group), diff --git a/python/api_v2/upgrade/views.py b/python/api_v2/upgrade/views.py index 9d3343b557..7fc7906b9a 100644 --- a/python/api_v2/upgrade/views.py +++ b/python/api_v2/upgrade/views.py @@ -33,9 +33,9 @@ from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_204_NO_CONTENT -from api_v2.action.serializers import ActionRunSerializer -from api_v2.action.utils import get_action_configuration, insert_service_ids, unique_hc_entries from api_v2.config.utils import convert_adcm_meta_to_attr, represent_string_as_json_type +from api_v2.generic.action.serializers import ActionRunSerializer +from api_v2.generic.action.utils import get_action_configuration, insert_service_ids, unique_hc_entries from api_v2.task.serializers import TaskListSerializer from api_v2.upgrade.serializers import UpgradeListSerializer, UpgradeRetrieveSerializer from api_v2.views import ADCMGenericViewSet diff --git a/python/api_v2/utils/audit.py b/python/api_v2/utils/audit.py index 3f72b6cf56..0e70199ebf 100644 --- a/python/api_v2/utils/audit.py +++ b/python/api_v2/utils/audit.py @@ -19,7 +19,7 @@ from audit.alt.hooks import AuditHook from audit.alt.object_retrievers import GeneralAuditObjectRetriever from audit.models import AuditObject, AuditObjectType -from cm.models import Cluster, ClusterObject, Host, HostProvider, ServiceComponent +from cm.models import ADCM, Cluster, ClusterObject, Host, HostProvider, ServiceComponent from django.db.models import Model from rest_framework.response import Response @@ -121,11 +121,29 @@ def get_name(self, id_: str | int) -> str | None: extract_id=ExtractID(field="hostprovider_pk").from_lookup_kwargs ) -host_from_lookup = GeneralAuditObjectRetriever( - audit_object_type=AuditObjectType.HOST, - extract_id=ExtractID(field="pk").from_lookup_kwargs, - create_new=create_audit_host_object, +_extract_host_from = partial( + GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.HOST, create_new=create_audit_host_object ) +host_from_lookup = _extract_host_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) +parent_host_from_lookup = _extract_host_from(extract_id=ExtractID(field="host_pk").from_lookup_kwargs) + + +def adcm_audit_object( + context: "OperationAuditContext", # noqa: ARG001 + call_arguments: AuditedCallArguments, # noqa: ARG001 + result: Result | None, # noqa: ARG001 + exception: Exception | None, # noqa: ARG001 +) -> AuditObject: + adcm = AuditObject.objects.filter(object_type=AuditObjectType.ADCM, is_deleted=False).first() + if adcm: + return adcm + + return AuditObject.objects.create( + object_id=ADCM.objects.values_list("id", flat=True).first(), + object_name="ADCM", + object_type=AuditObjectType.ADCM, + is_deleted=False, + ) # hooks From 9031565541c80f269810ae99da6efde6b478c591 Mon Sep 17 00:00:00 2001 From: Egor Araslanov Date: Fri, 28 Jun 2024 13:48:48 +0500 Subject: [PATCH 05/98] ADCM-5703 Specialize config views --- python/adcm/tests/base.py | 2 +- python/api_v2/adcm/views.py | 41 +++------------- python/api_v2/cluster/urls.py | 16 +++++-- python/api_v2/cluster/views.py | 23 ++++++++- python/api_v2/component/views.py | 23 ++++++++- python/api_v2/generic/action/utils.py | 2 +- python/api_v2/generic/action/views.py | 2 +- .../api_v2/{ => generic}/config/__init__.py | 0 python/api_v2/generic/config/api_schema.py | 44 +++++++++++++++++ python/api_v2/generic/config/audit.py | 18 +++++++ .../{ => generic}/config/serializers.py | 0 python/api_v2/{ => generic}/config/utils.py | 0 python/api_v2/{ => generic}/config/views.py | 48 +++---------------- python/api_v2/generic/group_config/audit.py | 21 ++++++++ .../generic/group_config/urls_helpers.py | 7 ++- python/api_v2/generic/group_config/views.py | 2 +- python/api_v2/host/urls.py | 5 +- python/api_v2/host/views.py | 11 ++++- python/api_v2/hostprovider/urls.py | 6 ++- python/api_v2/hostprovider/views.py | 23 ++++++++- python/api_v2/service/views.py | 23 ++++++++- python/api_v2/tests/test_cluster.py | 2 +- python/api_v2/tests/test_config.py | 2 +- python/api_v2/upgrade/views.py | 2 +- 24 files changed, 221 insertions(+), 102 deletions(-) rename python/api_v2/{ => generic}/config/__init__.py (100%) create mode 100644 python/api_v2/generic/config/api_schema.py create mode 100644 python/api_v2/generic/config/audit.py rename python/api_v2/{ => generic}/config/serializers.py (100%) rename python/api_v2/{ => generic}/config/utils.py (100%) rename python/api_v2/{ => generic}/config/views.py (79%) diff --git a/python/adcm/tests/base.py b/python/adcm/tests/base.py index 04cbaeaaa4..8a890d68f6 100644 --- a/python/adcm/tests/base.py +++ b/python/adcm/tests/base.py @@ -20,7 +20,7 @@ import string import tarfile -from api_v2.config.utils import convert_adcm_meta_to_attr, convert_attr_to_adcm_meta +from api_v2.generic.config.utils import convert_adcm_meta_to_attr, convert_attr_to_adcm_meta from api_v2.prototype.utils import accept_license from api_v2.service.utils import bulk_add_services_to_cluster from cm.api import add_cluster, add_hc, add_host, add_host_provider, add_host_to_cluster, update_obj_config diff --git a/python/api_v2/adcm/views.py b/python/api_v2/adcm/views.py index 2020c133c2..d9380c900a 100644 --- a/python/api_v2/adcm/views.py +++ b/python/api_v2/adcm/views.py @@ -13,7 +13,7 @@ from adcm.mixins import ParentObject from adcm.permissions import check_config_perm from cm.models import ADCM, ConfigLog, PrototypeConfig -from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view +from drf_spectacular.utils import extend_schema, extend_schema_view from rest_framework.decorators import action from rest_framework.exceptions import NotFound from rest_framework.mixins import RetrieveModelMixin @@ -22,13 +22,13 @@ from rest_framework.status import HTTP_200_OK from api_v2.adcm.serializers import AdcmSerializer -from api_v2.api_schema import ErrorSerializer -from api_v2.config.serializers import ConfigLogListSerializer, ConfigLogSerializer -from api_v2.config.utils import get_config_schema -from api_v2.config.views import ConfigLogViewSet from api_v2.generic.action.api_schema import document_action_viewset from api_v2.generic.action.audit import audit_action_viewset from api_v2.generic.action.views import ActionViewSet +from api_v2.generic.config.api_schema import document_config_viewset +from api_v2.generic.config.audit import audit_config_viewset +from api_v2.generic.config.utils import get_config_schema +from api_v2.generic.config.views import ConfigLogViewSet from api_v2.utils.audit import adcm_audit_object from api_v2.views import ADCMGenericViewSet @@ -49,35 +49,8 @@ def get_object(self, *args, **kwargs): # noqa: ARG001, ARG002 return super().get_queryset().first() -@extend_schema_view( - retrieve=extend_schema( - operation_id="getADCMConfig", - summary="GET ADCM config", - description="Get ADCM configuration information.", - responses={200: ConfigLogSerializer, 404: ErrorSerializer}, - ), - list=extend_schema( - operation_id="getADCMConfigs", - summary="GET ADCM config vesions", - description="Get information about ADCM config versions.", - parameters=[ - OpenApiParameter( - name="isCurrent", - required=False, - location=OpenApiParameter.QUERY, - description="Sign of the current configuration.", - type=bool, - ) - ], - responses={200: ConfigLogListSerializer, 404: ErrorSerializer}, - ), - create=extend_schema( - operation_id="postADCMConfigs", - summary="POST ADCM configs", - description="Create a new version of the ADCM configuration.", - responses={201: ConfigLogSerializer, 400: ErrorSerializer, 403: ErrorSerializer, 404: ErrorSerializer}, - ), -) +@document_config_viewset(object_type="ADCM", operation_id_variant="ADCM") +@audit_config_viewset(type_in_name="ADCM", retrieve_owner=adcm_audit_object) class ADCMConfigView(ConfigLogViewSet): def get_queryset(self, *args, **kwargs): # noqa: ARG002 return ( diff --git a/python/api_v2/cluster/urls.py b/python/api_v2/cluster/urls.py index 507694bcea..5bf4c5f212 100644 --- a/python/api_v2/cluster/urls.py +++ b/python/api_v2/cluster/urls.py @@ -20,6 +20,8 @@ ClusterActionHostGroupHostsViewSet, ClusterActionHostGroupViewSet, ClusterActionViewSet, + ClusterConfigHostGroupViewSet, + ClusterConfigViewSet, ClusterGroupConfigViewSet, ClusterHostActionViewSet, ClusterHostGroupConfigViewSet, @@ -32,12 +34,13 @@ ComponentActionHostGroupHostsViewSet, ComponentActionHostGroupViewSet, ComponentActionViewSet, + ComponentConfigHostGroupViewSet, + ComponentConfigViewSet, ComponentGroupConfigViewSet, ComponentHostGroupConfigViewSet, ComponentViewSet, HostComponentViewSet, ) -from api_v2.config.views import ConfigLogViewSet from api_v2.generic.action_host_group.urls_helpers import add_action_host_groups_routers from api_v2.generic.group_config.urls_helpers import add_group_config_routers from api_v2.service.views import ( @@ -45,6 +48,8 @@ ServiceActionHostGroupHostsViewSet, ServiceActionHostGroupViewSet, ServiceActionViewSet, + ServiceConfigHostGroupViewSet, + ServiceConfigViewSet, ServiceGroupConfigViewSet, ServiceHostGroupConfigViewSet, ServiceImportViewSet, @@ -76,11 +81,12 @@ def extract_urls_from_routers(routers: Iterable[NestedSimpleRouter]) -> tuple[st cluster_action_router.register(prefix=ACTION_PREFIX, viewset=ClusterActionViewSet, basename="cluster-action") cluster_config_router = NestedSimpleRouter(parent_router=cluster_router, parent_prefix=CLUSTER_PREFIX, lookup="cluster") -cluster_config_router.register(prefix=CONFIG_PREFIX, viewset=ConfigLogViewSet, basename="cluster-config") +cluster_config_router.register(prefix=CONFIG_PREFIX, viewset=ClusterConfigViewSet, basename="cluster-config") cluster_config_group_routers = add_group_config_routers( group_config_viewset=ClusterGroupConfigViewSet, host_group_config_viewset=ClusterHostGroupConfigViewSet, + config_group_config_viewset=ClusterConfigHostGroupViewSet, parent_router=cluster_router, parent_prefix=CLUSTER_PREFIX, lookup="cluster", @@ -106,11 +112,12 @@ def extract_urls_from_routers(routers: Iterable[NestedSimpleRouter]) -> tuple[st service_action_router.register(prefix=ACTION_PREFIX, viewset=ServiceActionViewSet, basename="service-action") service_config_router = NestedSimpleRouter(parent_router=service_router, parent_prefix=SERVICE_PREFIX, lookup="service") -service_config_router.register(prefix=CONFIG_PREFIX, viewset=ConfigLogViewSet, basename="service-config") +service_config_router.register(prefix=CONFIG_PREFIX, viewset=ServiceConfigViewSet, basename="service-config") service_group_config_routers = add_group_config_routers( group_config_viewset=ServiceGroupConfigViewSet, host_group_config_viewset=ServiceHostGroupConfigViewSet, + config_group_config_viewset=ServiceConfigHostGroupViewSet, parent_router=service_router, parent_prefix=SERVICE_PREFIX, lookup="service", @@ -136,11 +143,12 @@ def extract_urls_from_routers(routers: Iterable[NestedSimpleRouter]) -> tuple[st component_config_router = NestedSimpleRouter( parent_router=component_router, parent_prefix=COMPONENT_PREFIX, lookup="component" ) -component_config_router.register(prefix=CONFIG_PREFIX, viewset=ConfigLogViewSet, basename="component-config") +component_config_router.register(prefix=CONFIG_PREFIX, viewset=ComponentConfigViewSet, basename="component-config") component_group_config_routers = add_group_config_routers( group_config_viewset=ComponentGroupConfigViewSet, host_group_config_viewset=ComponentHostGroupConfigViewSet, + config_group_config_viewset=ComponentConfigHostGroupViewSet, parent_router=component_router, parent_prefix=COMPONENT_PREFIX, lookup="component", diff --git a/python/api_v2/cluster/views.py b/python/api_v2/cluster/views.py index b39ccfcae5..50d5a96313 100644 --- a/python/api_v2/cluster/views.py +++ b/python/api_v2/cluster/views.py @@ -85,7 +85,6 @@ ) from api_v2.cluster.utils import retrieve_mapping_data, save_mapping from api_v2.component.serializers import ComponentMappingSerializer -from api_v2.config.utils import ConfigSchemaMixin from api_v2.generic.action.api_schema import document_action_viewset from api_v2.generic.action.audit import audit_action_viewset from api_v2.generic.action.views import ActionViewSet @@ -99,8 +98,16 @@ ActionHostGroupHostsViewSet, ActionHostGroupViewSet, ) +from api_v2.generic.config.api_schema import document_config_viewset +from api_v2.generic.config.audit import audit_config_viewset +from api_v2.generic.config.utils import ConfigSchemaMixin +from api_v2.generic.config.views import ConfigLogViewSet from api_v2.generic.group_config.api_schema import document_group_config_viewset, document_host_group_config_viewset -from api_v2.generic.group_config.audit import audit_group_config_viewset, audit_host_group_config_viewset +from api_v2.generic.group_config.audit import ( + audit_config_group_config_viewset, + audit_group_config_viewset, + audit_host_group_config_viewset, +) from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet from api_v2.generic.imports.serializers import ImportPostSerializer, ImportSerializer from api_v2.generic.imports.views import ImportViewSet @@ -756,6 +763,12 @@ class ClusterHostGroupConfigViewSet(HostGroupConfigViewSet): ... +@document_config_viewset(object_type="cluster config group", operation_id_variant="ClusterConfigGroup") +@audit_config_group_config_viewset(retrieve_owner=parent_cluster_from_lookup) +class ClusterConfigHostGroupViewSet(ConfigLogViewSet): + ... + + @document_action_viewset(object_type="cluster") @audit_action_viewset(retrieve_owner=parent_cluster_from_lookup) class ClusterActionViewSet(ActionViewSet): @@ -781,3 +794,9 @@ class ClusterActionHostGroupHostsViewSet(ActionHostGroupHostsViewSet): @document_action_host_group_actions_viewset(object_type="cluster") class ClusterActionHostGroupActionsViewSet(ActionHostGroupActionsViewSet): ... + + +@document_config_viewset(object_type="cluster") +@audit_config_viewset(type_in_name="Cluster", retrieve_owner=parent_cluster_from_lookup) +class ClusterConfigViewSet(ConfigLogViewSet): + ... diff --git a/python/api_v2/component/views.py b/python/api_v2/component/views.py index 7ff78e9fbd..adb22123a2 100644 --- a/python/api_v2/component/views.py +++ b/python/api_v2/component/views.py @@ -48,7 +48,6 @@ ComponentStatusSerializer, HostComponentSerializer, ) -from api_v2.config.utils import ConfigSchemaMixin from api_v2.generic.action.api_schema import document_action_viewset from api_v2.generic.action.audit import audit_action_viewset from api_v2.generic.action.views import ActionViewSet @@ -62,8 +61,16 @@ ActionHostGroupHostsViewSet, ActionHostGroupViewSet, ) +from api_v2.generic.config.api_schema import document_config_viewset +from api_v2.generic.config.audit import audit_config_viewset +from api_v2.generic.config.utils import ConfigSchemaMixin +from api_v2.generic.config.views import ConfigLogViewSet from api_v2.generic.group_config.api_schema import document_group_config_viewset, document_host_group_config_viewset -from api_v2.generic.group_config.audit import audit_group_config_viewset, audit_host_group_config_viewset +from api_v2.generic.group_config.audit import ( + audit_config_group_config_viewset, + audit_group_config_viewset, + audit_host_group_config_viewset, +) from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet from api_v2.utils.audit import parent_component_from_lookup from api_v2.views import ( @@ -236,6 +243,12 @@ class ComponentHostGroupConfigViewSet(HostGroupConfigViewSet): ... +@document_config_viewset(object_type="component config group", operation_id_variant="ComponentConfigGroup") +@audit_config_group_config_viewset(retrieve_owner=parent_component_from_lookup) +class ComponentConfigHostGroupViewSet(ConfigLogViewSet): + ... + + @document_action_viewset(object_type="component") @audit_action_viewset(retrieve_owner=parent_component_from_lookup) class ComponentActionViewSet(ActionViewSet): @@ -255,3 +268,9 @@ class ComponentActionHostGroupHostsViewSet(ActionHostGroupHostsViewSet): @document_action_host_group_actions_viewset(object_type="component") class ComponentActionHostGroupActionsViewSet(ActionHostGroupActionsViewSet): ... + + +@document_config_viewset(object_type="component") +@audit_config_viewset(type_in_name="Component", retrieve_owner=parent_component_from_lookup) +class ComponentConfigViewSet(ConfigLogViewSet): + ... diff --git a/python/api_v2/generic/action/utils.py b/python/api_v2/generic/action/utils.py index 7fed47138b..1c7ad40cd6 100644 --- a/python/api_v2/generic/action/utils.py +++ b/python/api_v2/generic/action/utils.py @@ -33,7 +33,7 @@ from django.conf import settings from rbac.models import User -from api_v2.config.utils import convert_attr_to_adcm_meta, get_config_schema +from api_v2.generic.config.utils import convert_attr_to_adcm_meta, get_config_schema def get_str_hash(value: str) -> str: diff --git a/python/api_v2/generic/action/views.py b/python/api_v2/generic/action/views.py index 44be06e3d5..4eddaf5a77 100644 --- a/python/api_v2/generic/action/views.py +++ b/python/api_v2/generic/action/views.py @@ -30,7 +30,6 @@ HTTP_200_OK, ) -from api_v2.config.utils import convert_adcm_meta_to_attr, represent_string_as_json_type from api_v2.generic.action.filters import ActionFilter from api_v2.generic.action.serializers import ( ActionListSerializer, @@ -44,6 +43,7 @@ insert_service_ids, unique_hc_entries, ) +from api_v2.generic.config.utils import convert_adcm_meta_to_attr, represent_string_as_json_type from api_v2.task.serializers import TaskListSerializer from api_v2.views import ADCMGenericViewSet diff --git a/python/api_v2/config/__init__.py b/python/api_v2/generic/config/__init__.py similarity index 100% rename from python/api_v2/config/__init__.py rename to python/api_v2/generic/config/__init__.py diff --git a/python/api_v2/generic/config/api_schema.py b/python/api_v2/generic/config/api_schema.py new file mode 100644 index 0000000000..8a8141e366 --- /dev/null +++ b/python/api_v2/generic/config/api_schema.py @@ -0,0 +1,44 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from drf_spectacular.utils import extend_schema, extend_schema_view +from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT + +from api_v2.api_schema import responses +from api_v2.generic.config.serializers import ConfigLogListSerializer, ConfigLogSerializer + + +def document_config_viewset(object_type: str, operation_id_variant: str | None = None): + capitalized_type = operation_id_variant or object_type.capitalize() + + return extend_schema_view( + list=extend_schema( + operation_id=f"get{capitalized_type}Configs", + summary=f"GET {object_type}'s config versions", + description=f"Get information about {object_type}'s config versions.", + responses=responses(success=ConfigLogListSerializer, errors=HTTP_404_NOT_FOUND), + ), + retrieve=extend_schema( + operation_id=f"get{capitalized_type}Config", + summary=f"GET {object_type}'s config", + description=f"Get {object_type}'s configuration information.", + responses=responses(success=ConfigLogSerializer, errors=HTTP_404_NOT_FOUND), + ), + create=extend_schema( + operation_id=f"post{capitalized_type}Configs", + summary=f"POST {object_type}'s configs", + description=f"Create a new version of {object_type}'s configuration.", + responses=responses( + success=ConfigLogSerializer, errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_409_CONFLICT) + ), + ), + ) diff --git a/python/api_v2/generic/config/audit.py b/python/api_v2/generic/config/audit.py new file mode 100644 index 0000000000..e48aa420fd --- /dev/null +++ b/python/api_v2/generic/config/audit.py @@ -0,0 +1,18 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from audit.alt.api import audit_update, audit_view +from audit.alt.core import RetrieveAuditObjectFunc + + +def audit_config_viewset(type_in_name: str, retrieve_owner: RetrieveAuditObjectFunc): + return audit_view(create=audit_update(name=f"{type_in_name} configuration updated", object_=retrieve_owner)) diff --git a/python/api_v2/config/serializers.py b/python/api_v2/generic/config/serializers.py similarity index 100% rename from python/api_v2/config/serializers.py rename to python/api_v2/generic/config/serializers.py diff --git a/python/api_v2/config/utils.py b/python/api_v2/generic/config/utils.py similarity index 100% rename from python/api_v2/config/utils.py rename to python/api_v2/generic/config/utils.py diff --git a/python/api_v2/config/views.py b/python/api_v2/generic/config/views.py similarity index 79% rename from python/api_v2/config/views.py rename to python/api_v2/generic/config/views.py index 31f577a815..a310a63f59 100644 --- a/python/api_v2/config/views.py +++ b/python/api_v2/generic/config/views.py @@ -12,29 +12,22 @@ from adcm.mixins import GetParentObjectMixin, ParentObject from adcm.permissions import VIEW_CONFIG_PERM, check_config_perm -from audit.utils import audit from cm.api import update_obj_config from cm.errors import AdcmEx from cm.models import ConfigLog, GroupConfig, PrototypeConfig from django.contrib.contenttypes.models import ContentType -from drf_spectacular.utils import extend_schema, extend_schema_view from guardian.mixins import PermissionListMixin from rest_framework.exceptions import NotFound, PermissionDenied -from rest_framework.mixins import CreateModelMixin, ListModelMixin, RetrieveModelMixin +from rest_framework.mixins import ListModelMixin, RetrieveModelMixin from rest_framework.request import Request from rest_framework.response import Response from rest_framework.status import ( HTTP_200_OK, HTTP_201_CREATED, - HTTP_400_BAD_REQUEST, - HTTP_403_FORBIDDEN, - HTTP_404_NOT_FOUND, - HTTP_409_CONFLICT, ) -from api_v2.api_schema import ErrorSerializer -from api_v2.config.serializers import ConfigLogListSerializer, ConfigLogSerializer -from api_v2.config.utils import ( +from api_v2.generic.config.serializers import ConfigLogListSerializer, ConfigLogSerializer +from api_v2.generic.config.utils import ( convert_adcm_meta_to_attr, convert_attr_to_adcm_meta, represent_json_type_as_string, @@ -43,35 +36,9 @@ from api_v2.views import ADCMGenericViewSet -@extend_schema_view( - list=extend_schema( - operation_id="getObjectConfigs", - summary="GET object's config versions", - description="Get information about object's config versions.", - responses={HTTP_200_OK: ConfigLogListSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), - retrieve=extend_schema( - operation_id="getObjectConfig", - summary="GET object's config", - description="Get object's configuration information.", - responses={HTTP_200_OK: ConfigLogSerializer, HTTP_404_NOT_FOUND: ErrorSerializer}, - ), - create=extend_schema( - operation_id="postObjectConfigs", - summary="POST object's configs", - description="Create a new version of object's configuration.", - responses={ - HTTP_200_OK: ConfigLogSerializer, - HTTP_400_BAD_REQUEST: ErrorSerializer, - HTTP_403_FORBIDDEN: ErrorSerializer, - HTTP_409_CONFLICT: ErrorSerializer, - }, - ), -) class ConfigLogViewSet( PermissionListMixin, ListModelMixin, - CreateModelMixin, RetrieveModelMixin, GetParentObjectMixin, ADCMGenericViewSet, @@ -103,7 +70,6 @@ def get_serializer_class(self): return ConfigLogSerializer - @audit def create(self, request, *args, **kwargs) -> Response: # noqa: ARG002 parent_object = self.get_parent_object() @@ -139,6 +105,10 @@ def retrieve(self, request, *args, **kwargs) -> Response: # noqa: ARG002 return Response(data=serializer.data, status=HTTP_200_OK) + def list(self, request, *args, **kwargs) -> Response: # noqa: ARG002 + self._check_parent_permissions() + return super().list(request, *args, **kwargs) + def _check_create_permissions(self, request: Request, parent_object: ParentObject) -> None: owner_object = parent_object.object if isinstance(parent_object, GroupConfig) else parent_object @@ -158,10 +128,6 @@ def _check_create_permissions(self, request: Request, parent_object: ParentObjec obj=owner_object, ) - def list(self, request, *args, **kwargs) -> Response: # noqa: ARG002 - self._check_parent_permissions() - return super().list(request, *args, **kwargs) - def _check_parent_permissions(self, parent_object: ParentObject = None): parent_obj = parent_object or self.get_parent_object() parent_view_perm = f"cm.view_{parent_obj.__class__.__name__.lower()}" diff --git a/python/api_v2/generic/group_config/audit.py b/python/api_v2/generic/group_config/audit.py index 8e9aea6fd9..6541b308bb 100644 --- a/python/api_v2/generic/group_config/audit.py +++ b/python/api_v2/generic/group_config/audit.py @@ -55,6 +55,14 @@ def audit_host_group_config_viewset(retrieve_owner: RetrieveAuditObjectFunc): ) +def audit_config_group_config_viewset(retrieve_owner: RetrieveAuditObjectFunc): + return audit_view( + create=audit_update(name="{group_name} configuration group updated", object_=retrieve_owner).attach_hooks( + on_collect=(set_nested_group_name, adjust_denied_on_404_result(objects_exist=nested_group_config_exists)) + ) + ) + + # hooks @@ -97,6 +105,19 @@ def set_group_name( context.name = context.name.format(group_name=group_name or "").strip() +def set_nested_group_name( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Result | None, # noqa: ARG001 + exception: Exception | None, # noqa: ARG001 +): + group_name = ( + GroupConfig.objects.values_list("name", flat=True).filter(id=call_arguments.get("group_config_pk")).first() + ) + + context.name = context.name.format(group_name=group_name or "").strip() + + def set_group_and_host_names( context: OperationAuditContext, call_arguments: AuditedCallArguments, diff --git a/python/api_v2/generic/group_config/urls_helpers.py b/python/api_v2/generic/group_config/urls_helpers.py index d517459da6..dd44796e86 100644 --- a/python/api_v2/generic/group_config/urls_helpers.py +++ b/python/api_v2/generic/group_config/urls_helpers.py @@ -13,7 +13,7 @@ from rest_framework.routers import SimpleRouter from rest_framework_nested.routers import NestedSimpleRouter -from api_v2.config.views import ConfigLogViewSet +from api_v2.generic.config.views import ConfigLogViewSet from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet CONFIG_PREFIX = "configs" @@ -23,6 +23,7 @@ def add_group_config_routers( group_config_viewset: type[GroupConfigViewSet], host_group_config_viewset: type[HostGroupConfigViewSet], + config_group_config_viewset: type[ConfigLogViewSet], parent_router: NestedSimpleRouter | SimpleRouter, parent_prefix: str, lookup: str, @@ -38,6 +39,8 @@ def add_group_config_routers( config_router = NestedSimpleRouter( parent_router=group_config_router, parent_prefix=CONFIG_GROUPS_PREFIX, lookup="group_config" ) - config_router.register(prefix=CONFIG_PREFIX, viewset=ConfigLogViewSet, basename=f"{lookup}-group-config-config") + config_router.register( + prefix=CONFIG_PREFIX, viewset=config_group_config_viewset, basename=f"{lookup}-group-config-config" + ) return group_config_router, hosts_router, config_router diff --git a/python/api_v2/generic/group_config/views.py b/python/api_v2/generic/group_config/views.py index 090039da94..40825160e9 100644 --- a/python/api_v2/generic/group_config/views.py +++ b/python/api_v2/generic/group_config/views.py @@ -30,7 +30,7 @@ HTTP_204_NO_CONTENT, ) -from api_v2.config.utils import ConfigSchemaMixin +from api_v2.generic.config.utils import ConfigSchemaMixin from api_v2.generic.group_config.permissions import GroupConfigHostsPermissions, GroupConfigPermissions from api_v2.generic.group_config.serializers import GroupConfigSerializer, HostGroupConfigSerializer from api_v2.host.filters import HostMemberFilter diff --git a/python/api_v2/host/urls.py b/python/api_v2/host/urls.py index b8b89dbf11..ebd834d1de 100644 --- a/python/api_v2/host/urls.py +++ b/python/api_v2/host/urls.py @@ -12,14 +12,13 @@ from rest_framework_nested.routers import NestedSimpleRouter, SimpleRouter -from api_v2.config.views import ConfigLogViewSet -from api_v2.host.views import HostActionViewSet, HostViewSet +from api_v2.host.views import HostActionViewSet, HostConfigViewSet, HostViewSet host_router = SimpleRouter() host_router.register(prefix="", viewset=HostViewSet) host_config_router = NestedSimpleRouter(parent_router=host_router, parent_prefix="", lookup="host") -host_config_router.register(prefix="configs", viewset=ConfigLogViewSet, basename="host-config") +host_config_router.register(prefix="configs", viewset=HostConfigViewSet, basename="host-config") host_action_router = NestedSimpleRouter(parent_router=host_router, parent_prefix="", lookup="host") host_action_router.register(prefix="actions", viewset=HostActionViewSet, basename="host-action") diff --git a/python/api_v2/host/views.py b/python/api_v2/host/views.py index a17addae61..b299e03872 100644 --- a/python/api_v2/host/views.py +++ b/python/api_v2/host/views.py @@ -42,10 +42,13 @@ ) from api_v2.api_schema import DefaultParams, ErrorSerializer -from api_v2.config.utils import ConfigSchemaMixin from api_v2.generic.action.api_schema import document_action_viewset from api_v2.generic.action.audit import audit_action_viewset from api_v2.generic.action.views import ActionViewSet +from api_v2.generic.config.api_schema import document_config_viewset +from api_v2.generic.config.audit import audit_config_viewset +from api_v2.generic.config.utils import ConfigSchemaMixin +from api_v2.generic.config.views import ConfigLogViewSet from api_v2.host.filters import HostFilter from api_v2.host.permissions import ( HostsPermissions, @@ -264,3 +267,9 @@ def maintenance_mode(self, request: Request, *args, **kwargs) -> Response: # no @audit_action_viewset(retrieve_owner=parent_host_from_lookup) class HostActionViewSet(ActionViewSet): ... + + +@document_config_viewset(object_type="host") +@audit_config_viewset(type_in_name="Host", retrieve_owner=parent_host_from_lookup) +class HostConfigViewSet(ConfigLogViewSet): + ... diff --git a/python/api_v2/hostprovider/urls.py b/python/api_v2/hostprovider/urls.py index 85a87fb171..d0af027d0e 100644 --- a/python/api_v2/hostprovider/urls.py +++ b/python/api_v2/hostprovider/urls.py @@ -16,10 +16,11 @@ from rest_framework.routers import SimpleRouter from rest_framework_nested.routers import NestedSimpleRouter -from api_v2.config.views import ConfigLogViewSet from api_v2.generic.group_config.urls_helpers import add_group_config_routers from api_v2.hostprovider.views import ( HostProviderActionViewSet, + HostProviderConfigHostGroupViewSet, + HostProviderConfigViewSet, HostProviderGroupConfigViewSet, HostProviderHostGroupConfigViewSet, HostProviderViewSet, @@ -40,7 +41,7 @@ def extract_urls_from_routers(routers: Iterable[NestedSimpleRouter]) -> tuple[st action_router.register(prefix="actions", viewset=HostProviderActionViewSet, basename="provider-action") config_router = NestedSimpleRouter(parent_router=router, parent_prefix="", lookup="hostprovider") -config_router.register(prefix="configs", viewset=ConfigLogViewSet, basename="provider-config") +config_router.register(prefix="configs", viewset=HostProviderConfigViewSet, basename="provider-config") upgrade_router = NestedSimpleRouter(parent_router=router, parent_prefix="", lookup="hostprovider") upgrade_router.register(prefix="upgrades", viewset=UpgradeViewSet) @@ -49,6 +50,7 @@ def extract_urls_from_routers(routers: Iterable[NestedSimpleRouter]) -> tuple[st group_config_routers = add_group_config_routers( group_config_viewset=HostProviderGroupConfigViewSet, host_group_config_viewset=HostProviderHostGroupConfigViewSet, + config_group_config_viewset=HostProviderConfigHostGroupViewSet, parent_router=router, parent_prefix="", lookup="hostprovider", diff --git a/python/api_v2/hostprovider/views.py b/python/api_v2/hostprovider/views.py index e520676518..e5e57dbb89 100644 --- a/python/api_v2/hostprovider/views.py +++ b/python/api_v2/hostprovider/views.py @@ -24,12 +24,19 @@ from rest_framework.status import HTTP_201_CREATED, HTTP_204_NO_CONTENT from api_v2.api_schema import ErrorSerializer -from api_v2.config.utils import ConfigSchemaMixin from api_v2.generic.action.api_schema import document_action_viewset from api_v2.generic.action.audit import audit_action_viewset from api_v2.generic.action.views import ActionViewSet +from api_v2.generic.config.api_schema import document_config_viewset +from api_v2.generic.config.audit import audit_config_viewset +from api_v2.generic.config.utils import ConfigSchemaMixin +from api_v2.generic.config.views import ConfigLogViewSet from api_v2.generic.group_config.api_schema import document_group_config_viewset, document_host_group_config_viewset -from api_v2.generic.group_config.audit import audit_group_config_viewset, audit_host_group_config_viewset +from api_v2.generic.group_config.audit import ( + audit_config_group_config_viewset, + audit_group_config_viewset, + audit_host_group_config_viewset, +) from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet from api_v2.hostprovider.filters import HostProviderFilter from api_v2.hostprovider.permissions import HostProviderPermissions @@ -168,7 +175,19 @@ class HostProviderHostGroupConfigViewSet(HostGroupConfigViewSet): ... +@document_config_viewset(object_type="hostprovider config group", operation_id_variant="HostProviderConfigGroup") +@audit_config_group_config_viewset(retrieve_owner=parent_hostprovider_from_lookup) +class HostProviderConfigHostGroupViewSet(ConfigLogViewSet): + ... + + @document_action_viewset(object_type="hostprovider") @audit_action_viewset(retrieve_owner=parent_hostprovider_from_lookup) class HostProviderActionViewSet(ActionViewSet): ... + + +@document_config_viewset(object_type="hostprovider") +@audit_config_viewset(type_in_name="Provider", retrieve_owner=parent_hostprovider_from_lookup) +class HostProviderConfigViewSet(ConfigLogViewSet): + ... diff --git a/python/api_v2/service/views.py b/python/api_v2/service/views.py index 1821493946..7666e83ea2 100644 --- a/python/api_v2/service/views.py +++ b/python/api_v2/service/views.py @@ -50,7 +50,6 @@ ) from api_v2.api_schema import DefaultParams, responses -from api_v2.config.utils import ConfigSchemaMixin from api_v2.generic.action.api_schema import document_action_viewset from api_v2.generic.action.audit import audit_action_viewset from api_v2.generic.action.views import ActionViewSet @@ -64,8 +63,16 @@ ActionHostGroupHostsViewSet, ActionHostGroupViewSet, ) +from api_v2.generic.config.api_schema import document_config_viewset +from api_v2.generic.config.audit import audit_config_viewset +from api_v2.generic.config.utils import ConfigSchemaMixin +from api_v2.generic.config.views import ConfigLogViewSet from api_v2.generic.group_config.api_schema import document_group_config_viewset, document_host_group_config_viewset -from api_v2.generic.group_config.audit import audit_group_config_viewset, audit_host_group_config_viewset +from api_v2.generic.group_config.audit import ( + audit_config_group_config_viewset, + audit_group_config_viewset, + audit_host_group_config_viewset, +) from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet from api_v2.generic.imports.serializers import ImportPostSerializer, ImportSerializer from api_v2.generic.imports.views import ImportViewSet @@ -294,6 +301,12 @@ class ServiceHostGroupConfigViewSet(HostGroupConfigViewSet): ... +@document_config_viewset(object_type="service config group", operation_id_variant="ServiceConfigGroup") +@audit_config_group_config_viewset(retrieve_owner=parent_service_from_lookup) +class ServiceConfigHostGroupViewSet(ConfigLogViewSet): + ... + + @document_action_viewset(object_type="service") @audit_action_viewset(retrieve_owner=parent_service_from_lookup) class ServiceActionViewSet(ActionViewSet): @@ -313,3 +326,9 @@ class ServiceActionHostGroupHostsViewSet(ActionHostGroupHostsViewSet): @document_action_host_group_actions_viewset(object_type="service") class ServiceActionHostGroupActionsViewSet(ActionHostGroupActionsViewSet): ... + + +@document_config_viewset(object_type="service") +@audit_config_viewset(type_in_name="Service", retrieve_owner=parent_service_from_lookup) +class ServiceConfigViewSet(ConfigLogViewSet): + ... diff --git a/python/api_v2/tests/test_cluster.py b/python/api_v2/tests/test_cluster.py index 63a22aaa4b..fb18bf17e7 100644 --- a/python/api_v2/tests/test_cluster.py +++ b/python/api_v2/tests/test_cluster.py @@ -38,7 +38,7 @@ HTTP_409_CONFLICT, ) -from api_v2.config.utils import convert_adcm_meta_to_attr +from api_v2.generic.config.utils import convert_adcm_meta_to_attr from api_v2.tests.base import BaseAPITestCase diff --git a/python/api_v2/tests/test_config.py b/python/api_v2/tests/test_config.py index e57a1b0845..7530825727 100644 --- a/python/api_v2/tests/test_config.py +++ b/python/api_v2/tests/test_config.py @@ -41,7 +41,7 @@ HTTP_409_CONFLICT, ) -from api_v2.config.utils import convert_adcm_meta_to_attr, convert_attr_to_adcm_meta +from api_v2.generic.config.utils import convert_adcm_meta_to_attr, convert_attr_to_adcm_meta from api_v2.tests.base import BaseAPITestCase CONFIGS = "configs" diff --git a/python/api_v2/upgrade/views.py b/python/api_v2/upgrade/views.py index 7fc7906b9a..41e35e40f9 100644 --- a/python/api_v2/upgrade/views.py +++ b/python/api_v2/upgrade/views.py @@ -33,9 +33,9 @@ from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_204_NO_CONTENT -from api_v2.config.utils import convert_adcm_meta_to_attr, represent_string_as_json_type from api_v2.generic.action.serializers import ActionRunSerializer from api_v2.generic.action.utils import get_action_configuration, insert_service_ids, unique_hc_entries +from api_v2.generic.config.utils import convert_adcm_meta_to_attr, represent_string_as_json_type from api_v2.task.serializers import TaskListSerializer from api_v2.upgrade.serializers import UpgradeListSerializer, UpgradeRetrieveSerializer from api_v2.views import ADCMGenericViewSet From f22150c3d6b06e3338c4d30ebac5be0adea71024 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Mon, 1 Jul 2024 11:32:59 +0000 Subject: [PATCH 06/98] ADCM-5697: Rework audit for User views from RBAC --- .../api_v2/generic/action_host_group/audit.py | 10 +-- python/api_v2/rbac/user/permissions.py | 2 - python/api_v2/rbac/user/views.py | 47 +++++++++-- python/api_v2/tests/test_audit/test_user.py | 4 +- python/api_v2/utils/audit.py | 84 +++++++++++++------ python/audit/alt/api.py | 11 ++- python/audit/alt/core.py | 26 +++++- python/audit/alt/hooks.py | 57 +++++++++++-- 8 files changed, 190 insertions(+), 51 deletions(-) diff --git a/python/api_v2/generic/action_host_group/audit.py b/python/api_v2/generic/action_host_group/audit.py index 03e587c93f..ced22dabf1 100644 --- a/python/api_v2/generic/action_host_group/audit.py +++ b/python/api_v2/generic/action_host_group/audit.py @@ -14,19 +14,19 @@ from functools import partial import json -from audit.alt.core import AuditedCallArguments, OperationAuditContext, Result +from audit.alt.core import AuditedCallArguments, IDBasedAuditObjectCreator, OperationAuditContext, Result from audit.alt.hooks import AuditHook from audit.alt.object_retrievers import GeneralAuditObjectRetriever from audit.models import AuditObjectType from cm.models import ActionHostGroup, Cluster, ClusterObject, Host, ServiceComponent -from api_v2.utils.audit import CMAuditObjectCreator, ExtractID, object_does_exist +from api_v2.utils.audit import ExtractID, object_does_exist # hooks -class ActionHostGroupAuditObjectCreator(CMAuditObjectCreator): - cm_model = ActionHostGroup +class ActionHostGroupAuditObjectCreator(IDBasedAuditObjectCreator): + model = ActionHostGroup name_field = "prototype__display_name" def get_name(self, id_: str | int) -> str | None: @@ -66,7 +66,7 @@ def get_name(self, id_: str | int) -> str | None: _extract_action_host_group = partial( GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.ACTION_HOST_GROUP, - create_new=ActionHostGroupAuditObjectCreator(cm_model=ActionHostGroup), + create_new=ActionHostGroupAuditObjectCreator(model=ActionHostGroup), ) action_host_group_from_lookup = _extract_action_host_group(extract_id=ExtractID(field="pk").from_lookup_kwargs) parent_action_host_group_from_lookup = _extract_action_host_group( diff --git a/python/api_v2/rbac/user/permissions.py b/python/api_v2/rbac/user/permissions.py index a691b01171..7a7e873515 100644 --- a/python/api_v2/rbac/user/permissions.py +++ b/python/api_v2/rbac/user/permissions.py @@ -11,7 +11,6 @@ # limitations under the License. from adcm.permissions import VIEW_USER_PERMISSION -from audit.utils import audit from rest_framework.exceptions import NotFound from rest_framework.permissions import DjangoModelPermissions @@ -27,7 +26,6 @@ class UserPermissions(DjangoModelPermissions): "DELETE": ["%(app_label)s.delete_%(model_name)s"], } - @audit def has_permission(self, request, view): if view.action not in ("create", "list") and not request.user.has_perm(VIEW_USER_PERMISSION): raise NotFound() diff --git a/python/api_v2/rbac/user/views.py b/python/api_v2/rbac/user/views.py index 6702159f86..39728f71e4 100644 --- a/python/api_v2/rbac/user/views.py +++ b/python/api_v2/rbac/user/views.py @@ -13,7 +13,13 @@ from typing import Any from adcm.permissions import VIEW_USER_PERMISSION -from audit.utils import audit +from audit.alt.api import audit_create, audit_delete, audit_update +from audit.alt.hooks import ( + extract_current_from_response, + extract_from_object, + extract_previous_from_object, + only_on_success, +) from cm.errors import AdcmEx from core.errors import NotFoundError from core.rbac.dto import UserCreateDTO, UserUpdateDTO @@ -61,6 +67,13 @@ UserSerializer, UserUpdateSerializer, ) +from api_v2.utils.audit import ( + retrieve_user_password_groups, + set_username_for_block_actions, + update_user_name, + user_from_lookup, + user_from_response, +) from api_v2.views import ADCMGenericViewSet @@ -177,7 +190,7 @@ def get_serializer_class(self) -> type[UserSerializer] | type[UserUpdateSerializ return UserSerializer - @audit + @audit_create(name="User created", object_=user_from_response) def create(self, request: Request, *_, **__) -> Response: serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) @@ -205,7 +218,25 @@ def create(self, request: Request, *_, **__) -> Response: return Response(data=UserSerializer(instance=self.get_queryset().get(id=user_id)).data, status=HTTP_201_CREATED) - @audit + @( + audit_update(name="User updated", object_=user_from_lookup) + .attach_hooks(on_collect=only_on_success(update_user_name)) + .track_changes( + before=( + extract_previous_from_object(User, "first_name", "last_name", "email", "password", "is_superuser"), + extract_from_object(func=retrieve_user_password_groups, section="previous"), + ), + after=( + extract_current_from_response( + "first_name", + "last_name", + "email", + is_superuser="is_super_user", + ), + extract_from_object(func=retrieve_user_password_groups, section="current"), + ), + ) + ) def partial_update(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) @@ -259,7 +290,9 @@ def partial_update(self, request: Request, *args, **kwargs) -> Response: # noqa return Response(data=UserSerializer(instance=self.get_queryset().get(id=user_id)).data, status=HTTP_200_OK) - @audit + @audit_update(name="{username} user blocked", object_=user_from_lookup).attach_hooks( + pre_call=set_username_for_block_actions + ) @action(methods=["post"], detail=True) def block(self, request: Request, *_, **kwargs: Any) -> Response: # to check existence @@ -285,7 +318,9 @@ def block(self, request: Request, *_, **kwargs: Any) -> Response: return Response(status=HTTP_200_OK) - @audit + @audit_update(name="{username} user unblocked", object_=user_from_lookup).attach_hooks( + pre_call=set_username_for_block_actions + ) @action(methods=["post"], detail=True) def unblock(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG001, ARG002 # to check existence @@ -302,7 +337,7 @@ def unblock(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG00 return Response(status=HTTP_200_OK) - @audit + @audit_delete(name="User deleted", object_=user_from_lookup, removed_on_success=True) def destroy(self, request: Request, *args, **kwargs) -> Response: user = self.get_object() if user.built_in: diff --git a/python/api_v2/tests/test_audit/test_user.py b/python/api_v2/tests/test_audit/test_user.py index 2298155a02..294ff1a811 100644 --- a/python/api_v2/tests/test_audit/test_user.py +++ b/python/api_v2/tests/test_audit/test_user.py @@ -110,7 +110,7 @@ def test_user_update_all_fields_success(self): "first_name": "new_first_name", "lastName": "new_last_name", "email": "email@new.mail", - "is_superuser": True, + "isSuperUser": True, "password": "new_password1", "groups": [self.group.pk], } @@ -126,6 +126,7 @@ def test_user_update_all_fields_success(self): "last_name": "new_last_name", "password": "******", "group": ["Some group [local]"], + "is_superuser": True, }, "previous": { "email": "", @@ -133,6 +134,7 @@ def test_user_update_all_fields_success(self): "last_name": "", "password": "******", "group": [], + "is_superuser": False, }, } diff --git a/python/api_v2/utils/audit.py b/python/api_v2/utils/audit.py index 0e70199ebf..605ab66847 100644 --- a/python/api_v2/utils/audit.py +++ b/python/api_v2/utils/audit.py @@ -15,12 +15,13 @@ from functools import partial import json -from audit.alt.core import AuditedCallArguments, OperationAuditContext, Result +from audit.alt.core import AuditedCallArguments, IDBasedAuditObjectCreator, OperationAuditContext, Result from audit.alt.hooks import AuditHook from audit.alt.object_retrievers import GeneralAuditObjectRetriever from audit.models import AuditObject, AuditObjectType from cm.models import ADCM, Cluster, ClusterObject, Host, HostProvider, ServiceComponent from django.db.models import Model +from rbac.models import User from rest_framework.response import Response # object retrievers @@ -41,24 +42,8 @@ def from_lookup_kwargs(self, call_arguments: AuditedCallArguments, result: Respo @dataclass(slots=True) -class CMAuditObjectCreator: - cm_model: type[Model] - name_field: str = "name" - - def __call__(self, id_: str | int, audit_object_type: AuditObjectType) -> AuditObject | None: - name = self.get_name(id_=id_) - if not name: - return None - - return AuditObject.objects.create(object_id=id_, object_type=audit_object_type, object_name=name) - - def get_name(self, id_: str | int) -> str | None: - return self.cm_model.objects.values_list(self.name_field, flat=True).filter(id=id_).first() - - -@dataclass(slots=True) -class ServiceAuditObjectCreator(CMAuditObjectCreator): - cm_model = ClusterObject +class ServiceAuditObjectCreator(IDBasedAuditObjectCreator): + model = ClusterObject name_field = "prototype__display_name" def get_name(self, id_: str | int) -> str | None: @@ -70,8 +55,8 @@ def get_name(self, id_: str | int) -> str | None: @dataclass(slots=True) -class ComponentAuditObjectCreator(CMAuditObjectCreator): - cm_model = ServiceComponent +class ComponentAuditObjectCreator(IDBasedAuditObjectCreator): + model = ServiceComponent name_field = "prototype__display_name" def get_name(self, id_: str | int) -> str | None: @@ -88,8 +73,10 @@ def get_name(self, id_: str | int) -> str | None: return "/".join(names) -create_audit_cluster_object = CMAuditObjectCreator(cm_model=Cluster) -create_audit_host_object = CMAuditObjectCreator(cm_model=Host, name_field="fqdn") +create_audit_cluster_object = IDBasedAuditObjectCreator(model=Cluster) +create_audit_host_object = IDBasedAuditObjectCreator(model=Host, name_field="fqdn") +create_audit_user_object = IDBasedAuditObjectCreator(model=User, name_field="username") + _extract_cluster_from = partial( GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.CLUSTER, create_new=create_audit_cluster_object @@ -101,21 +88,21 @@ def get_name(self, id_: str | int) -> str | None: _extract_service_from = partial( GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.SERVICE, - create_new=ServiceAuditObjectCreator(cm_model=ClusterObject), + create_new=ServiceAuditObjectCreator(model=ClusterObject), ) parent_service_from_lookup = _extract_service_from(extract_id=ExtractID(field="service_pk").from_lookup_kwargs) _extract_component_from = partial( GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.COMPONENT, - create_new=ComponentAuditObjectCreator(cm_model=ServiceComponent), + create_new=ComponentAuditObjectCreator(model=ServiceComponent), ) parent_component_from_lookup = _extract_component_from(extract_id=ExtractID(field="component_pk").from_lookup_kwargs) _extract_hostprovider_from = partial( GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.PROVIDER, - create_new=CMAuditObjectCreator(cm_model=HostProvider), + create_new=IDBasedAuditObjectCreator(model=HostProvider), ) parent_hostprovider_from_lookup = _extract_hostprovider_from( extract_id=ExtractID(field="hostprovider_pk").from_lookup_kwargs @@ -127,6 +114,12 @@ def get_name(self, id_: str | int) -> str | None: host_from_lookup = _extract_host_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) parent_host_from_lookup = _extract_host_from(extract_id=ExtractID(field="host_pk").from_lookup_kwargs) +_extract_user_from = partial( + GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.USER, create_new=create_audit_user_object +) +user_from_response = _extract_user_from(extract_id=ExtractID(field="id").from_response) +user_from_lookup = _extract_user_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) + def adcm_audit_object( context: "OperationAuditContext", # noqa: ARG001 @@ -170,6 +163,30 @@ def update_cluster_name( instance.save(update_fields=["object_name"]) +def update_user_name( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Response | None, + exception: Exception | None, +) -> None: + _ = call_arguments, result, exception + + if not context.object: + return + + instance = context.object + + new_name = User.objects.values_list("username", flat=True).filter(id=instance.object_id).first() + if not new_name: + return + + if instance.object_name == new_name: + return + + instance.object_name = new_name + instance.save(update_fields=["object_name"]) + + # hook helpers / special functions @@ -186,6 +203,13 @@ def nested_host_does_exist(hook: AuditHook) -> bool: return object_does_exist(hook=hook, model=Host) +def retrieve_user_password_groups(id_: int) -> dict: + if (user := User.objects.filter(pk=id_).first()) is None: + return {} + + return {"password": user.password, "group": list(user.groups.values_list("name", flat=True).order_by("name"))} + + # name changers @@ -220,3 +244,11 @@ def __call__(self): fqdn = Host.objects.values_list("fqdn", flat=True).filter(id=host_id).first() or "" self.context.name = f"{fqdn} host removed".strip() + + +class set_username_for_block_actions(AuditHook): # noqa: N801 + def __call__(self): + user_id = self.call_arguments.get("pk") + username = User.objects.values_list("username", flat=True).filter(id=user_id).first() or "" + + self.context.name = self.context.name.format(username=username).strip() diff --git a/python/audit/alt/api.py b/python/audit/alt/api.py index 451b22ece7..5e213ebda9 100644 --- a/python/audit/alt/api.py +++ b/python/audit/alt/api.py @@ -136,11 +136,16 @@ class audit_create(TypedAuditDecorator): # noqa: N801 class audit_update(TypedAuditDecorator): # noqa: N801 OPERATION_TYPE = AuditLogOperationType.UPDATE - def track_changes(self, before: AuditHookFunc, after: AuditHookFunc) -> Self: + def track_changes( + self, before: AuditHookFunc | Iterable[AuditHookFunc], after: AuditHookFunc | Iterable[AuditHookFunc] + ) -> Self: """Shouldn't be called more than 1 time, isn't adopted for that""" - self.extra_pre_call_hooks.append(before) - self.extra_on_collect_hooks.append(only_on_success(after)) + self.extra_pre_call_hooks.extend(before if not callable(before) else (before,)) + self.extra_on_collect_hooks.extend( + (only_on_success(hook) for hook in after) if not callable(after) else (only_on_success(after),) + ) + self.extra_on_collect_hooks.append(cleanup_changes) return self diff --git a/python/audit/alt/core.py b/python/audit/alt/core.py index a5c3e762ed..1772173f01 100644 --- a/python/audit/alt/core.py +++ b/python/audit/alt/core.py @@ -13,10 +13,18 @@ from collections import UserDict from dataclasses import dataclass, field +from django.db.models import Model from typing_extensions import Protocol, Self, TypeVar from audit.cef_logger import cef_logger as write_cef_log -from audit.models import AuditLog, AuditLogOperationResult, AuditLogOperationType, AuditObject, AuditUser +from audit.models import ( + AuditLog, + AuditLogOperationResult, + AuditLogOperationType, + AuditObject, + AuditObjectType, + AuditUser, +) Result = TypeVar("Result") @@ -198,3 +206,19 @@ def save(self) -> None: agent=self.meta.agent, ) write_cef_log(audit_instance=record, signature_id=self._signature.id) + + +@dataclass(slots=True) +class IDBasedAuditObjectCreator: + model: type[Model] + name_field: str = "name" + + def __call__(self, id_: str | int, audit_object_type: AuditObjectType) -> AuditObject | None: + name = self.get_name(id_=id_) + if not name: + return None + + return AuditObject.objects.create(object_id=id_, object_type=audit_object_type, object_name=name) + + def get_name(self, id_: str | int) -> str | None: + return self.model.objects.values_list(self.name_field, flat=True).filter(id=id_).first() diff --git a/python/audit/alt/hooks.py b/python/audit/alt/hooks.py index c1232e8547..d23e6774ad 100644 --- a/python/audit/alt/hooks.py +++ b/python/audit/alt/hooks.py @@ -12,7 +12,7 @@ from collections import deque from functools import wraps -from typing import Callable +from typing import Callable, Literal, Protocol from django.contrib.auth.models import User as DjangoUser from django.core.handlers.wsgi import WSGIRequest @@ -25,6 +25,11 @@ from audit.utils import get_client_agent, get_client_ip +class HookObjectLookupFunc(Protocol): + def __call__(self, id_: int) -> dict: + ... + + class AuditHook: """ Convenience hook implementation to avoid specifying arguments each time. @@ -106,6 +111,18 @@ def wrapped( # basic hooks and hook builders +SECRET_FIELDS = ("password",) +SECRET_VALUE = "******" + + +def _hide_secrets(data: dict) -> dict: + for field in data: + if field in SECRET_FIELDS: + data[field] = SECRET_VALUE + + return data + + class cleanup_changes(AuditHook): """ Clean up object changes stored in meta. @@ -141,7 +158,7 @@ def __call__(self): current.pop(key) if current and previous: - self.context.meta.changes |= {"previous": previous, "current": current} + self.context.meta.changes |= {"previous": _hide_secrets(previous), "current": _hide_secrets(current)} class detect_request_user(AuditHook): @@ -215,20 +232,46 @@ def __call__(self): if not id_: return - self.context.meta.changes["previous"] = ( - model.objects.values(*fields, **named_fields).filter(id=id_).first() or {} - ) + model_data = model.objects.values(*fields, **named_fields).filter(id=id_).first() or {} + self.context.meta.changes["previous"] = model_data return HookImpl -def extract_current_from_response(*fields: str) -> AuditHookFunc: +def extract_current_from_response(*fields: str, **named_fields: str) -> AuditHookFunc: class HookImpl(AuditHook): def __call__(self): if not isinstance(self.result, Response): return + fields_ = (*tuple(zip(fields, fields)), *named_fields.items()) + data = self.result.data - self.context.meta.changes["current"] = {field: data[field] for field in fields if field in data} + response_data = {audit_field: data[data_field] for audit_field, data_field in fields_ if data_field in data} + + self.context.meta.changes["current"] = response_data + + return HookImpl + + +def extract_from_object( + func: HookObjectLookupFunc, section: Literal["current", "previous"], id_arg: str = "pk", id_field: str = "id" +): + """ + Hook for cases when field is absent in response, + or it is easier to get and format data with custom func than using orm lookups. + Updates (or creates) self.context.meta.changes's `section` with `func` return value + """ + + class HookImpl(AuditHook): + def __call__(self): + id_ = self.call_arguments.get(id_arg) if section == "previous" else self.result.data.get(id_field) + if id_ is None: + return + + if section not in self.context.meta.changes: + self.context.meta.changes[section] = {} + + self.context.meta.changes[section] |= func(id_=id_) return HookImpl From aca46248a2812d9f8db0da6d2f47b72e0a5975e4 Mon Sep 17 00:00:00 2001 From: Aleksandr Alferov Date: Fri, 28 Jun 2024 18:45:59 +0300 Subject: [PATCH 07/98] ADCM-5701 Specialize Bundle ViewSets --- python/api_v2/bundle/views.py | 12 +++++++----- python/api_v2/utils/audit.py | 9 ++++++++- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/python/api_v2/bundle/views.py b/python/api_v2/bundle/views.py index 5b4c892468..2fcd54e158 100644 --- a/python/api_v2/bundle/views.py +++ b/python/api_v2/bundle/views.py @@ -10,8 +10,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from adcm.permissions import DjangoModelPermissionsAudit -from audit.utils import audit +from audit.alt.api import audit_create, audit_delete +from audit.alt.object_retrievers import ignore_object_search from cm.bundle import delete_bundle, load_bundle, upload_file from cm.models import Bundle, ObjectType from django.db.models import F @@ -23,12 +23,14 @@ ListModelMixin, RetrieveModelMixin, ) +from rest_framework.permissions import DjangoModelPermissions from rest_framework.response import Response from rest_framework.status import HTTP_201_CREATED, HTTP_204_NO_CONTENT from api_v2.api_schema import ErrorSerializer from api_v2.bundle.filters import BundleFilter from api_v2.bundle.serializers import BundleSerializer, UploadBundleSerializer +from api_v2.utils.audit import bundle_from_lookup from api_v2.views import ADCMGenericViewSet @@ -59,7 +61,7 @@ class BundleViewSet(ListModelMixin, RetrieveModelMixin, DestroyModelMixin, Creat .filter(type__in=[ObjectType.CLUSTER, ObjectType.PROVIDER]) .order_by(F("prototype__display_name").asc()) ) - permission_classes = [DjangoModelPermissionsAudit] + permission_classes = [DjangoModelPermissions] filterset_class = BundleFilter filter_backends = (DjangoFilterBackend,) @@ -80,7 +82,7 @@ def get_serializer_class(self): 409: ErrorSerializer, }, ) - @audit + @audit_create(name="Bundle uploaded", object_=ignore_object_search) def create(self, request, *args, **kwargs) -> Response: # noqa: ARG002 serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) @@ -96,7 +98,7 @@ def create(self, request, *args, **kwargs) -> Response: # noqa: ARG002 description="Delete a specific ADCM bundle.", responses={204: None, 403: ErrorSerializer, 404: ErrorSerializer, 409: ErrorSerializer}, ) - @audit + @audit_delete(name="Bundle deleted", object_=bundle_from_lookup, removed_on_success=True) def destroy(self, request, *args, **kwargs) -> Response: # noqa: ARG002 bundle = self.get_object() delete_bundle(bundle=bundle) diff --git a/python/api_v2/utils/audit.py b/python/api_v2/utils/audit.py index 605ab66847..3f639f0d7b 100644 --- a/python/api_v2/utils/audit.py +++ b/python/api_v2/utils/audit.py @@ -19,7 +19,7 @@ from audit.alt.hooks import AuditHook from audit.alt.object_retrievers import GeneralAuditObjectRetriever from audit.models import AuditObject, AuditObjectType -from cm.models import ADCM, Cluster, ClusterObject, Host, HostProvider, ServiceComponent +from cm.models import ADCM, Bundle, Cluster, ClusterObject, Host, HostProvider, ServiceComponent from django.db.models import Model from rbac.models import User from rest_framework.response import Response @@ -252,3 +252,10 @@ def __call__(self): username = User.objects.values_list("username", flat=True).filter(id=user_id).first() or "" self.context.name = self.context.name.format(username=username).strip() + + +bundle_from_lookup = GeneralAuditObjectRetriever( + audit_object_type=AuditObjectType.BUNDLE, + create_new=IDBasedAuditObjectCreator(model=Bundle), + extract_id=ExtractID(field="pk").from_lookup_kwargs, +) From 4aed0f548d3653366e325834dd4ee265df96db44 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Mon, 1 Jul 2024 12:48:27 +0000 Subject: [PATCH 08/98] ADCM-5698: Rework audit for Group views from RBAC --- python/api_v2/rbac/group/permissions.py | 29 +++++++++++++++++ python/api_v2/rbac/group/views.py | 43 ++++++++++++++++++------- python/api_v2/utils/audit.py | 41 +++++++++++++++++++++-- 3 files changed, 99 insertions(+), 14 deletions(-) create mode 100644 python/api_v2/rbac/group/permissions.py diff --git a/python/api_v2/rbac/group/permissions.py b/python/api_v2/rbac/group/permissions.py new file mode 100644 index 0000000000..e7e427a04c --- /dev/null +++ b/python/api_v2/rbac/group/permissions.py @@ -0,0 +1,29 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from adcm.permissions import VIEW_GROUP_PERMISSION +from rest_framework.exceptions import NotFound +from rest_framework.permissions import DjangoModelPermissions + + +class GroupPermissions(DjangoModelPermissions): + method_permissions_map = { + "patch": [(VIEW_GROUP_PERMISSION, NotFound)], + "delete": [(VIEW_GROUP_PERMISSION, NotFound)], + } + + def has_permission(self, request, view): + for permission, error in self.method_permissions_map.get(request.method.lower(), []): + if not request.user.has_perm(perm=permission): + raise error + + return super().has_permission(request, view) diff --git a/python/api_v2/rbac/group/views.py b/python/api_v2/rbac/group/views.py index 2d2c3cba0b..ca98a93200 100644 --- a/python/api_v2/rbac/group/views.py +++ b/python/api_v2/rbac/group/views.py @@ -10,8 +10,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from adcm.permissions import VIEW_GROUP_PERMISSION, CustomModelPermissionsByMethod -from audit.utils import audit +from adcm.permissions import VIEW_GROUP_PERMISSION +from audit.alt.api import audit_create, audit_delete, audit_update +from audit.alt.hooks import ( + extract_current_from_response, + extract_from_object, + extract_previous_from_object, + only_on_success, +) from cm.errors import AdcmEx from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view from guardian.mixins import PermissionListMixin @@ -19,7 +25,6 @@ from rbac.services.group import create as create_group from rbac.services.group import update as update_group from rbac.utils import Empty -from rest_framework.exceptions import NotFound from rest_framework.mixins import DestroyModelMixin, ListModelMixin, RetrieveModelMixin from rest_framework.request import Request from rest_framework.response import Response @@ -35,11 +40,18 @@ from api_v2.api_schema import DefaultParams, ErrorSerializer from api_v2.rbac.group.filters import GroupFilter +from api_v2.rbac.group.permissions import GroupPermissions from api_v2.rbac.group.serializers import ( GroupCreateSerializer, GroupSerializer, GroupUpdateSerializer, ) +from api_v2.utils.audit import ( + group_from_lookup, + group_from_response, + retrieve_group_name_users, + update_group_name, +) from api_v2.views import ADCMGenericViewSet @@ -109,11 +121,7 @@ class GroupViewSet(PermissionListMixin, RetrieveModelMixin, ListModelMixin, DestroyModelMixin, ADCMGenericViewSet): queryset = Group.objects.order_by("display_name").prefetch_related("user_set") filterset_class = GroupFilter - permission_classes = (CustomModelPermissionsByMethod,) - method_permissions_map = { - "patch": [(VIEW_GROUP_PERMISSION, NotFound)], - "delete": [(VIEW_GROUP_PERMISSION, NotFound)], - } + permission_classes = (GroupPermissions,) permission_required = [VIEW_GROUP_PERMISSION] def get_serializer_class(self) -> type[GroupSerializer | GroupCreateSerializer | GroupUpdateSerializer]: @@ -125,7 +133,7 @@ def get_serializer_class(self) -> type[GroupSerializer | GroupCreateSerializer | return GroupSerializer - @audit + @audit_create(name="Group created", object_=group_from_response) def create(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) @@ -139,7 +147,20 @@ def create(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 return Response(data=GroupSerializer(instance=group).data, status=HTTP_201_CREATED) - @audit + @( + audit_update(name="Group updated", object_=group_from_lookup) + .attach_hooks(on_collect=only_on_success(update_group_name)) + .track_changes( + before=( + extract_previous_from_object(Group, "description"), + extract_from_object(func=retrieve_group_name_users, section="previous"), + ), + after=( + extract_current_from_response("description"), + extract_from_object(func=retrieve_group_name_users, section="current"), + ), + ) + ) def partial_update(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) @@ -158,7 +179,7 @@ def partial_update(self, request: Request, *args, **kwargs) -> Response: # noqa return Response(data=GroupSerializer(instance=group).data, status=HTTP_200_OK) - @audit + @audit_delete(name="Group deleted", object_=group_from_lookup, removed_on_success=True) def destroy(self, request: Request, *args, **kwargs) -> Response: instance: Group = self.get_object() diff --git a/python/api_v2/utils/audit.py b/python/api_v2/utils/audit.py index 605ab66847..8d4635dcee 100644 --- a/python/api_v2/utils/audit.py +++ b/python/api_v2/utils/audit.py @@ -21,7 +21,7 @@ from audit.models import AuditObject, AuditObjectType from cm.models import ADCM, Cluster, ClusterObject, Host, HostProvider, ServiceComponent from django.db.models import Model -from rbac.models import User +from rbac.models import Group, User from rest_framework.response import Response # object retrievers @@ -76,6 +76,7 @@ def get_name(self, id_: str | int) -> str | None: create_audit_cluster_object = IDBasedAuditObjectCreator(model=Cluster) create_audit_host_object = IDBasedAuditObjectCreator(model=Host, name_field="fqdn") create_audit_user_object = IDBasedAuditObjectCreator(model=User, name_field="username") +create_audit_group_object = IDBasedAuditObjectCreator(model=Group) _extract_cluster_from = partial( @@ -120,6 +121,12 @@ def get_name(self, id_: str | int) -> str | None: user_from_response = _extract_user_from(extract_id=ExtractID(field="id").from_response) user_from_lookup = _extract_user_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) +_extract_group_from = partial( + GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.GROUP, create_new=create_audit_group_object +) +group_from_response = _extract_group_from(extract_id=ExtractID(field="id").from_response) +group_from_lookup = _extract_group_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) + def adcm_audit_object( context: "OperationAuditContext", # noqa: ARG001 @@ -177,10 +184,28 @@ def update_user_name( instance = context.object new_name = User.objects.values_list("username", flat=True).filter(id=instance.object_id).first() - if not new_name: + if not new_name or instance.object_name == new_name: return - if instance.object_name == new_name: + instance.object_name = new_name + instance.save(update_fields=["object_name"]) + + +def update_group_name( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Response | None, + exception: Exception | None, +) -> None: + _ = call_arguments, result, exception + + if not context.object: + return + + instance = context.object + + new_name = Group.objects.values_list("name", flat=True).filter(id=instance.object_id).first() + if not new_name or instance.object_name == new_name: return instance.object_name = new_name @@ -210,6 +235,16 @@ def retrieve_user_password_groups(id_: int) -> dict: return {"password": user.password, "group": list(user.groups.values_list("name", flat=True).order_by("name"))} +def retrieve_group_name_users(id_: int) -> dict: + if (group := Group.objects.prefetch_related("user_set").filter(pk=id_).first()) is None: + return {} + + return { + "name": group.display_name, + "user": sorted(user.username for user in group.user_set.all()), + } + + # name changers From 3ae420878cf1cdbb30805a50a47c811d86684d42 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Mon, 1 Jul 2024 12:48:52 +0000 Subject: [PATCH 09/98] ADCM-5735 Rework audit for background tasks / task finish --- python/audit/alt/background.py | 82 ++++++++++++++++ .../audit/management/commands/clearaudit.py | 52 +++++----- python/audit/utils.py | 96 ------------------- .../management/commands/collect_statistics.py | 5 +- python/cm/management/commands/logrotate.py | 73 +++++++++----- .../cm/management/commands/run_ldap_sync.py | 32 +++---- python/cm/services/job/run/audit.py | 42 ++++++++ python/cm/services/job/run/runners.py | 10 +- 8 files changed, 215 insertions(+), 177 deletions(-) create mode 100644 python/audit/alt/background.py create mode 100644 python/cm/services/job/run/audit.py diff --git a/python/audit/alt/background.py b/python/audit/alt/background.py new file mode 100644 index 0000000000..7a6900744a --- /dev/null +++ b/python/audit/alt/background.py @@ -0,0 +1,82 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from functools import wraps +from typing import Callable, ParamSpec, TypeVar + +from audit.alt.core import AuditSignature, Hooks, OperationAuditContext +from audit.alt.hooks import AuditHook +from audit.alt.object_retrievers import ignore_object_search +from audit.models import AuditLogOperationResult, AuditLogOperationType, AuditUser + +T = TypeVar("T") +P = ParamSpec("P") + + +class BackgroundOperationAuditContext(OperationAuditContext): + def save_on_start(self) -> None: + self.name = f"{self._default_name} launched" + self.result = AuditLogOperationResult.SUCCESS + self.run_pre_call_hooks() + self.save() + self.restore_defaults() + + def save_on_finish(self) -> None: + self.name = f"{self._default_name} completed" + self.collect().save() + self.restore_defaults() + + +class SetBackgroundOperationState(AuditHook): + def __call__(self): + self.context.result = AuditLogOperationResult.FAIL if self.exception else AuditLogOperationResult.SUCCESS + + +class SetSystemUser(AuditHook): + def __call__(self): + self.context.user = AuditUser.objects.filter(username="system").first() + + +class BackgroundOperationAudit: + def __init__(self, name: str, type_: AuditLogOperationType): + self._context: BackgroundOperationAuditContext = BackgroundOperationAuditContext( + signature=AuditSignature(id="Background operation", type=type_), + default_name=name, + retrieve_object=ignore_object_search, + # pre call will be called when creating initial operation record, + # on collect on creating finishing one + custom_hooks=Hooks(pre_call=(SetSystemUser,), on_collect=(SetBackgroundOperationState, SetSystemUser)), + ) + + @property + def context(self) -> BackgroundOperationAuditContext: + return self._context + + def __call__(self, func: Callable[P, T]) -> Callable[P, T]: + @wraps(func) + def wrapped(*args, **kwargs) -> T: + with self: + return func(*args, **kwargs) + + return wrapped + + def __enter__(self): + self._context.restore_defaults() + self._context.save_on_start() + + def __exit__(self, exc_type, exc_val, exc_tb): + self._context.attach_exception(exc_val) + self._context.save_on_finish() + + +# convenient naming to use as decorator +audit_background_operation = BackgroundOperationAudit diff --git a/python/audit/management/commands/clearaudit.py b/python/audit/management/commands/clearaudit.py index 2ebae6f0ed..40b41ecf1a 100644 --- a/python/audit/management/commands/clearaudit.py +++ b/python/audit/management/commands/clearaudit.py @@ -24,8 +24,8 @@ from django.db.models import Count, Q from django.utils import timezone -from audit.models import AuditLog, AuditLogOperationResult, AuditObject, AuditSession -from audit.utils import make_audit_log +from audit.alt.background import audit_background_operation +from audit.models import AuditLog, AuditLogOperationType, AuditObject, AuditSession logger = logging.getLogger("background_tasks") @@ -59,7 +59,6 @@ def handle(self, *args, **options): # noqa: ARG002 try: self.__handle() except Exception as e: # noqa: BLE001 - make_audit_log("audit", AuditLogOperationResult.FAIL, "completed") self.__log(e, "exception") def __handle(self): @@ -68,34 +67,31 @@ def __handle(self): self.__log("Disabled") return - threshold_date = timezone.now() - timedelta(days=config["retention_period"]) - self.__log(f"Started. Threshold date: {threshold_date}") - - # get delete candidates - target_operations = AuditLog.objects.filter(operation_time__lt=threshold_date) - target_logins = AuditSession.objects.filter(login_time__lt=threshold_date) - target_objects = ( - AuditObject.objects.filter(is_deleted=True) - .annotate(not_deleted_auditlogs_count=Count("auditlog", filter=~Q(auditlog__in=target_operations))) - .filter(not_deleted_auditlogs_count__lte=0) - ) - - cleared = False - if any(qs.exists() for qs in (target_operations, target_logins, target_objects)): - make_audit_log("audit", AuditLogOperationResult.SUCCESS, "launched") - - if config["data_archiving"]: - archive_path = os.path.join(self.archive_base_dir, self.archive_name) - self.__log(f"Target audit records will be archived to `{archive_path}`") - self.__archive(target_operations, target_logins, target_objects) - else: - self.__log("Archiving is disabled") + with audit_background_operation( + name='"Audit log cleanup/archiving on schedule" job', type_=AuditLogOperationType.DELETE + ): + threshold_date = timezone.now() - timedelta(days=config["retention_period"]) + self.__log(f"Started. Threshold date: {threshold_date}") + + # get delete candidates + target_operations = AuditLog.objects.filter(operation_time__lt=threshold_date) + target_logins = AuditSession.objects.filter(login_time__lt=threshold_date) + target_objects = ( + AuditObject.objects.filter(is_deleted=True) + .annotate(not_deleted_auditlogs_count=Count("auditlog", filter=~Q(auditlog__in=target_operations))) + .filter(not_deleted_auditlogs_count__lte=0) + ) + + if config["data_archiving"]: + archive_path = os.path.join(self.archive_base_dir, self.archive_name) + self.__log(f"Target audit records will be archived to `{archive_path}`") + self.__archive(target_operations, target_logins, target_objects) + else: + self.__log("Archiving is disabled") - cleared = self.__delete(target_operations, target_logins, target_objects) + self.__delete(target_operations, target_logins, target_objects) self.__log("Finished.") - if cleared: - make_audit_log("audit", AuditLogOperationResult.SUCCESS, "completed") def __archive(self, *querysets): os.makedirs(self.archive_base_dir, exist_ok=True) diff --git a/python/audit/utils.py b/python/audit/utils.py index 1cb96ca6eb..914de62216 100644 --- a/python/audit/utils.py +++ b/python/audit/utils.py @@ -12,7 +12,6 @@ from contextlib import suppress from functools import wraps -from typing import Callable import re from api.cluster.serializers import ClusterAuditSerializer @@ -54,8 +53,6 @@ get_cm_model_by_type, get_model_by_type, ) -from core.job.types import ExecutionStatus -from core.types import ADCMCoreType, NamedActionObject from django.contrib.auth.models import User as DjangoUser from django.core.handlers.wsgi import WSGIRequest from django.db.models import Model, ObjectDoesNotExist @@ -73,14 +70,11 @@ from rest_framework.viewsets import GenericViewSet, ModelViewSet from audit.cases.cases import get_audit_operation_and_object -from audit.cases.common import get_or_create_audit_obj from audit.cef_logger import cef_logger from audit.models import ( AuditLog, AuditLogOperationResult, - AuditLogOperationType, AuditObject, - AuditObjectType, AuditOperation, AuditUser, ) @@ -596,34 +590,6 @@ def wrapped(*args, **kwargs): return wrapped -def make_audit_log(operation_type, result, operation_status): - operation_type_map = { - "task": { - "type": AuditLogOperationType.DELETE, - "name": '"Task log cleanup on schedule" job', - }, - "config": { - "type": AuditLogOperationType.DELETE, - "name": '"Objects configurations cleanup on schedule" job', - }, - "sync": {"type": AuditLogOperationType.UPDATE, "name": '"User sync on schedule" job'}, - "audit": { - "type": AuditLogOperationType.DELETE, - "name": '"Audit log cleanup/archiving on schedule" job', - }, - "statistics": {"type": "", "name": '"Statistics collection on schedule" job'}, - } - operation_name = operation_type_map[operation_type]["name"] + " " + operation_status - audit_log = AuditLog.objects.create( - audit_object=None, - operation_name=operation_name, - operation_type=operation_type_map[operation_type]["type"], - operation_result=result, - user=AuditUser.objects.get(username="system"), - ) - cef_logger(audit_instance=audit_log, signature_id="Background operation", empty_resource=True) - - def get_client_ip(request: WSGIRequest) -> str | None: header_fields = ["HTTP_X_FORWARDED_FOR", "HTTP_X_FORWARDED_HOST", "HTTP_X_FORWARDED_SERVER", "REMOTE_ADDR"] host = None @@ -638,65 +604,3 @@ def get_client_ip(request: WSGIRequest) -> str | None: def get_client_agent(request: WSGIRequest) -> str: return request.META.get("HTTP_USER_AGENT", "")[:255] - - -def audit_job_finish( - target: NamedActionObject, display_name: str, is_upgrade: bool, job_result: ExecutionStatus -) -> None: - operation_name = f"{display_name} {'upgrade' if is_upgrade else 'action'} completed" - - if target.type == ADCMCoreType.HOSTPROVIDER: - obj_type = AuditObjectType.PROVIDER - else: - obj_type = AuditObjectType(target.type.value) - - audit_object = get_or_create_audit_obj( - object_id=str(target.id), - object_name=target.name, - object_type=obj_type, - ) - operation_result = ( - AuditLogOperationResult.SUCCESS if job_result == ExecutionStatus.SUCCESS else AuditLogOperationResult.FAIL - ) - - audit_log = AuditLog.objects.create( - audit_object=audit_object, - operation_name=operation_name, - operation_type=AuditLogOperationType.UPDATE, - operation_result=operation_result, - object_changes={}, - ) - - cef_logger(audit_instance=audit_log, signature_id="Action completion") - - -def audit_background_task(start_operation_status: str, end_operation_status: str) -> Callable: - def decorator(func: Callable) -> Callable: - @wraps(func) - def wrapped(*args, **kwargs): - make_audit_log( - operation_type="statistics", - result=AuditLogOperationResult.SUCCESS, - operation_status=start_operation_status, - ) - try: - result = func(*args, **kwargs) - except Exception as error: - make_audit_log( - operation_type="statistics", - result=AuditLogOperationResult.FAIL, - operation_status=end_operation_status, - ) - raise error - - make_audit_log( - operation_type="statistics", - result=AuditLogOperationResult.SUCCESS, - operation_status=end_operation_status, - ) - - return result - - return wrapped - - return decorator diff --git a/python/cm/management/commands/collect_statistics.py b/python/cm/management/commands/collect_statistics.py index 01b354d0ed..8b218253cb 100644 --- a/python/cm/management/commands/collect_statistics.py +++ b/python/cm/management/commands/collect_statistics.py @@ -16,7 +16,8 @@ import os import socket -from audit.utils import audit_background_task +from audit.alt.background import audit_background_operation +from audit.models import AuditLogOperationType from django.conf import settings from django.core.management import BaseCommand from django.db.models import Q @@ -98,7 +99,7 @@ def add_arguments(self, parser): default="archive-all", ) - @audit_background_task(start_operation_status="launched", end_operation_status="completed") + @audit_background_operation(name='"Statistics collection on schedule" job', type_=AuditLogOperationType.UPDATE) def handle(self, *_, mode: str, **__): logger.debug(msg="Statistics collector: started") statistics_data = { diff --git a/python/cm/management/commands/logrotate.py b/python/cm/management/commands/logrotate.py index a9a8d2c1e0..16d5f09be2 100644 --- a/python/cm/management/commands/logrotate.py +++ b/python/cm/management/commands/logrotate.py @@ -18,8 +18,8 @@ import shutil import logging -from audit.models import AuditLogOperationResult -from audit.utils import make_audit_log +from audit.alt.background import audit_background_operation +from audit.models import AuditLogOperationType from django.conf import settings from django.core.management.base import BaseCommand from django.db import transaction @@ -152,9 +152,15 @@ def __run_nginx_log_rotation(self): def __run_configlog_rotation(self): try: configlog_days_delta = self.config["config"]["config_rotation_in_db"] - if configlog_days_delta <= 0: - return + except KeyError as e: + self.__log("Error in ConfigLog rotation", "warning") + self.__log(e, "exception") + return + if configlog_days_delta <= 0: + return + + try: threshold_date = timezone.now() - timedelta(days=configlog_days_delta) self.__log(f"ConfigLog rotation started. Threshold date: {threshold_date}", "info") @@ -174,22 +180,20 @@ def __run_configlog_rotation(self): target_objectconfig_ids = { cl.obj_ref.id for cl in target_configlogs if not self.__has_related_records(cl.obj_ref) } - if target_configlog_ids or target_objectconfig_ids: - make_audit_log("config", AuditLogOperationResult.SUCCESS, "launched") - - with transaction.atomic(): - ConfigLog.objects.filter(id__in=target_configlog_ids).delete() - ObjectConfig.objects.filter(id__in=target_objectconfig_ids).delete() - if target_configlog_ids or target_objectconfig_ids: - make_audit_log("config", AuditLogOperationResult.SUCCESS, "completed") - - self.__log( - f"Deleted {len(target_configlog_ids)} ConfigLogs and " f"{len(target_objectconfig_ids)} ObjectConfigs", - "info", - ) + if target_configlog_ids or target_objectconfig_ids: + with audit_background_operation( + name='"Objects configurations cleanup on schedule" job', type_=AuditLogOperationType.DELETE + ), transaction.atomic(): + ConfigLog.objects.filter(id__in=target_configlog_ids).delete() + ObjectConfig.objects.filter(id__in=target_objectconfig_ids).delete() + + self.__log( + f"Deleted {len(target_configlog_ids)} ConfigLogs and " + f"{len(target_objectconfig_ids)} ObjectConfigs", + "info", + ) except Exception as e: # noqa: BLE001 - make_audit_log("config", AuditLogOperationResult.FAIL, "completed") self.__log("Error in ConfigLog rotation", "warning") self.__log(e, "exception") @@ -216,16 +220,24 @@ def __run_joblog_rotation(self): try: days_delta_db = self.config["config"]["log_rotation_in_db"] days_delta_fs = self.config["config"]["log_rotation_on_fs"] - if days_delta_db <= 0 and days_delta_fs <= 0: - return + except KeyError as e: + self.__log("Error in JobLog rotation", "warning") + self.__log(e, "exception") + return + + if days_delta_db <= 0 and days_delta_fs <= 0: + return + try: threshold_date_db = timezone.now() - timedelta(days=days_delta_db) threshold_date_fs = timezone.now() - timedelta(days=days_delta_fs) self.__log( f"JobLog rotation started. Threshold dates: " f"db - {threshold_date_db}, fs - {threshold_date_fs}", "info", ) + is_deleted = False + if days_delta_db > 0: target_tasklogs = TaskLog.objects.filter( finish_date__lte=threshold_date_db, @@ -233,6 +245,7 @@ def __run_joblog_rotation(self): ) if target_tasklogs: is_deleted = True + with transaction.atomic(): target_tasklogs.delete() @@ -240,26 +253,36 @@ def __run_joblog_rotation(self): JobLog.objects.filter(task__isnull=True).delete() self.__log("db JobLog rotated", "info") + if days_delta_fs > 0: for name in os.listdir(settings.RUN_DIR): if not name.startswith("."): # a line of code is used for development path = settings.RUN_DIR / name try: - m_time = datetime.fromtimestamp(os.path.getmtime(path), tz=timezone.get_current_timezone()) # noqa: PTH204 + m_time = datetime.fromtimestamp( + os.path.getmtime(path), # noqa: PTH204 + tz=timezone.get_current_timezone(), + ) if timezone.now() - m_time > timedelta(days=days_delta_fs): is_deleted = True + if os.path.isdir(path): # noqa: PTH112 shutil.rmtree(path) else: os.remove(path) # noqa: PTH107 except FileNotFoundError: pass - if is_deleted: - make_audit_log("task", AuditLogOperationResult.SUCCESS, "launched") - make_audit_log("task", AuditLogOperationResult.SUCCESS, "completed") + self.__log("fs JobLog rotated", "info") + + if is_deleted: + audit = audit_background_operation( + name='"Task log cleanup on schedule" job', type_=AuditLogOperationType.DELETE + ) + audit.context.save_on_start() + audit.context.save_on_finish() + except Exception as e: # noqa: BLE001 - make_audit_log("task", AuditLogOperationResult.FAIL, "completed") self.__log("Error in JobLog rotation", "warning") self.__log(e, "exception") diff --git a/python/cm/management/commands/run_ldap_sync.py b/python/cm/management/commands/run_ldap_sync.py index c4b5082964..02f364bbc9 100644 --- a/python/cm/management/commands/run_ldap_sync.py +++ b/python/cm/management/commands/run_ldap_sync.py @@ -13,8 +13,8 @@ from datetime import timedelta import logging -from audit.models import AuditLogOperationResult -from audit.utils import make_audit_log +from audit.alt.background import audit_background_operation +from audit.models import AuditLogOperationType from django.core.management.base import BaseCommand from django.utils import timezone @@ -41,28 +41,24 @@ def handle(self, *args, **options): # noqa: ARG002 period = get_settings(adcm_object) if period <= 0: return + if TaskLog.objects.filter(action__name="run_ldap_sync", status=JobStatus.RUNNING).exists(): logger.debug("Sync has already launched, we need to wait for the task end") return + last_sync = TaskLog.objects.filter( action__name="run_ldap_sync", status__in=[JobStatus.SUCCESS, JobStatus.FAILED], ).last() - if last_sync is None: + + if not last_sync: logger.debug("First ldap sync launched in %s", timezone.now()) - make_audit_log("sync", AuditLogOperationResult.SUCCESS, "launched") - task = run_action(action=action, obj=adcm_object, payload=ActionRunPayload()) - if task: - make_audit_log("sync", AuditLogOperationResult.SUCCESS, "completed") - else: - make_audit_log("sync", AuditLogOperationResult.FAIL, "completed") - return - new_rotate_time = last_sync.finish_date + timedelta(minutes=period - 1) - if new_rotate_time <= timezone.now(): + else: + next_rotation_time = last_sync.finish_date + timedelta(minutes=period - 1) + if next_rotation_time > timezone.now(): + return + logger.debug("Ldap sync launched in %s", timezone.now()) - make_audit_log("sync", AuditLogOperationResult.SUCCESS, "launched") - task = run_action(action=action, obj=adcm_object, payload=ActionRunPayload()) - if task: - make_audit_log("sync", AuditLogOperationResult.SUCCESS, "completed") - else: - make_audit_log("sync", AuditLogOperationResult.FAIL, "completed") + + with audit_background_operation(name='"User sync on schedule" job', type_=AuditLogOperationType.UPDATE): + run_action(action=action, obj=adcm_object, payload=ActionRunPayload()) diff --git a/python/cm/services/job/run/audit.py b/python/cm/services/job/run/audit.py new file mode 100644 index 0000000000..629d28fada --- /dev/null +++ b/python/cm/services/job/run/audit.py @@ -0,0 +1,42 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from audit.alt.core import AuditSignature, Hooks, OperationAuditContext +from audit.alt.object_retrievers import ignore_object_search +from audit.models import AuditLogOperationResult, AuditLogOperationType, AuditObject, AuditObjectType +from core.job.types import ExecutionStatus, Task +from core.types import ADCMCoreType + + +def audit_task_finish(task: Task, task_result: ExecutionStatus) -> None: + audit_context = OperationAuditContext( + signature=AuditSignature(id="Action completion", type=AuditLogOperationType.UPDATE), + default_name=f"{task.action.display_name} {'upgrade' if task.action.is_upgrade else 'action'} completed", + retrieve_object=ignore_object_search, + custom_hooks=Hooks(), + ) + audit_context.result = ( + AuditLogOperationResult.SUCCESS if task_result == ExecutionStatus.SUCCESS else AuditLogOperationResult.FAIL + ) + + # If object doesn't exist for some reason, we don't create it. + # Now action can't be launched avoiding audit. + # In case it will change, don't use `target.name`, + # use retrievers that create audit objects based on type and name + # in order to build correct audit names for service/component/action host group. + audit_context.object = AuditObject.objects.filter( + object_id=str(task.target.id), + object_type=AuditObjectType(task.target.type.value) + if task.target.type != ADCMCoreType.HOSTPROVIDER + else AuditObjectType.PROVIDER, + ).first() + audit_context.save() diff --git a/python/cm/services/job/run/runners.py b/python/cm/services/job/run/runners.py index a09e1c27f1..f4ac2b6328 100644 --- a/python/cm/services/job/run/runners.py +++ b/python/cm/services/job/run/runners.py @@ -27,6 +27,7 @@ update_issues, update_object_maintenance_mode, ) +from cm.services.job.run.audit import audit_task_finish NO_PROCESS_PID = 0 @@ -231,18 +232,11 @@ def _should_proceed(self, last_job_result: ExecutionStatus) -> bool: return last_job_result == ExecutionStatus.ABORTED def _finish(self, task: Task, last_job: Job | None): - from audit.utils import audit_job_finish - task_result = self._runtime.status remove_task_lock(task_id=task.id) - audit_job_finish( - target=task.target, - display_name=task.action.display_name, - is_upgrade=task.action.is_upgrade, - job_result=task_result, - ) + audit_task_finish(task=task, task_result=task_result) finished_task = self._repo.get_task(id=task.id) if finished_task.owner: From ac11122d67dd56f3a4a181fba1f43f4e6f57e2a0 Mon Sep 17 00:00:00 2001 From: Skrynnik Daniil Date: Mon, 1 Jul 2024 17:24:54 +0300 Subject: [PATCH 10/98] ADCM-5706: Rework audit for HostProvider views --- python/api_v2/hostprovider/permissions.py | 2 -- python/api_v2/hostprovider/views.py | 8 ++++---- python/api_v2/utils/audit.py | 2 ++ 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/python/api_v2/hostprovider/permissions.py b/python/api_v2/hostprovider/permissions.py index 41bb13160b..0053031d05 100644 --- a/python/api_v2/hostprovider/permissions.py +++ b/python/api_v2/hostprovider/permissions.py @@ -10,7 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from audit.utils import audit from rest_framework.permissions import DjangoObjectPermissions @@ -25,7 +24,6 @@ class HostProviderPermissions(DjangoObjectPermissions): "DELETE": ["%(app_label)s.delete_%(model_name)s"], } - @audit def has_permission(self, request, view) -> bool: if view.action in ["destroy", "update", "partial_update"]: return True diff --git a/python/api_v2/hostprovider/views.py b/python/api_v2/hostprovider/views.py index e5e57dbb89..4a1d108585 100644 --- a/python/api_v2/hostprovider/views.py +++ b/python/api_v2/hostprovider/views.py @@ -11,7 +11,7 @@ # limitations under the License. from adcm.permissions import VIEW_PROVIDER_PERM -from audit.utils import audit +from audit.alt.api import audit_create, audit_delete from cm.api import add_host_provider, delete_host_provider from cm.errors import AdcmEx from cm.models import HostProvider, ObjectType, Prototype @@ -44,7 +44,7 @@ HostProviderCreateSerializer, HostProviderSerializer, ) -from api_v2.utils.audit import parent_hostprovider_from_lookup +from api_v2.utils.audit import hostprovider_from_lookup, hostprovider_from_response, parent_hostprovider_from_lookup from api_v2.views import ADCMGenericViewSet @@ -139,7 +139,7 @@ def get_serializer_class(self): return self.serializer_class - @audit + @audit_create(name="Provider created", object_=hostprovider_from_response) def create(self, request, *args, **kwargs): # noqa: ARG001, ARG002 serializer = self.get_serializer(data=request.data) if not serializer.is_valid(): @@ -156,7 +156,7 @@ def create(self, request, *args, **kwargs): # noqa: ARG001, ARG002 return Response(data=HostProviderSerializer(host_provider).data, status=HTTP_201_CREATED) - @audit + @audit_delete(name="Provider deleted", object_=hostprovider_from_lookup, removed_on_success=True) def destroy(self, request, *args, **kwargs): # noqa: ARG002 host_provider = self.get_object() delete_host_provider(host_provider) diff --git a/python/api_v2/utils/audit.py b/python/api_v2/utils/audit.py index 8d4635dcee..618a3d11f3 100644 --- a/python/api_v2/utils/audit.py +++ b/python/api_v2/utils/audit.py @@ -108,6 +108,8 @@ def get_name(self, id_: str | int) -> str | None: parent_hostprovider_from_lookup = _extract_hostprovider_from( extract_id=ExtractID(field="hostprovider_pk").from_lookup_kwargs ) +hostprovider_from_lookup = _extract_hostprovider_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) +hostprovider_from_response = _extract_hostprovider_from(extract_id=ExtractID(field="id").from_response) _extract_host_from = partial( GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.HOST, create_new=create_audit_host_object From 4b6a7fab1813d6a16f5ddc852a828abc2f2def7b Mon Sep 17 00:00:00 2001 From: Egor Araslanov Date: Tue, 2 Jul 2024 12:27:32 +0500 Subject: [PATCH 11/98] ADCM-5713 Specialize Upgrade views --- python/api_v2/cluster/urls.py | 4 +- python/api_v2/cluster/views.py | 9 ++++ .../api_v2/{ => generic}/upgrade/__init__.py | 0 python/api_v2/generic/upgrade/api_schema.py | 54 +++++++++++++++++++ python/api_v2/generic/upgrade/audit.py | 49 +++++++++++++++++ .../{ => generic}/upgrade/serializers.py | 0 python/api_v2/{ => generic}/upgrade/views.py | 11 +--- python/api_v2/hostprovider/urls.py | 4 +- python/api_v2/hostprovider/views.py | 9 ++++ 9 files changed, 127 insertions(+), 13 deletions(-) rename python/api_v2/{ => generic}/upgrade/__init__.py (100%) create mode 100644 python/api_v2/generic/upgrade/api_schema.py create mode 100644 python/api_v2/generic/upgrade/audit.py rename python/api_v2/{ => generic}/upgrade/serializers.py (100%) rename python/api_v2/{ => generic}/upgrade/views.py (96%) diff --git a/python/api_v2/cluster/urls.py b/python/api_v2/cluster/urls.py index 5bf4c5f212..6546c0b877 100644 --- a/python/api_v2/cluster/urls.py +++ b/python/api_v2/cluster/urls.py @@ -26,6 +26,7 @@ ClusterHostActionViewSet, ClusterHostGroupConfigViewSet, ClusterImportViewSet, + ClusterUpgradeViewSet, ClusterViewSet, HostClusterViewSet, ) @@ -55,7 +56,6 @@ ServiceImportViewSet, ServiceViewSet, ) -from api_v2.upgrade.views import UpgradeViewSet CLUSTER_PREFIX = "" ACTION_PREFIX = "actions" @@ -174,7 +174,7 @@ def extract_urls_from_routers(routers: Iterable[NestedSimpleRouter]) -> tuple[st # other upgrade_router = NestedSimpleRouter(parent_router=cluster_router, parent_prefix=CLUSTER_PREFIX, lookup="cluster") -upgrade_router.register(prefix="upgrades", viewset=UpgradeViewSet) +upgrade_router.register(prefix="upgrades", viewset=ClusterUpgradeViewSet) urlpatterns = [ diff --git a/python/api_v2/cluster/views.py b/python/api_v2/cluster/views.py index 50d5a96313..187f3146a7 100644 --- a/python/api_v2/cluster/views.py +++ b/python/api_v2/cluster/views.py @@ -111,6 +111,9 @@ from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet from api_v2.generic.imports.serializers import ImportPostSerializer, ImportSerializer from api_v2.generic.imports.views import ImportViewSet +from api_v2.generic.upgrade.api_schema import document_upgrade_viewset +from api_v2.generic.upgrade.audit import audit_upgrade_viewset +from api_v2.generic.upgrade.views import UpgradeViewSet from api_v2.host.filters import HostMemberFilter from api_v2.host.serializers import ( HostAddSerializer, @@ -800,3 +803,9 @@ class ClusterActionHostGroupActionsViewSet(ActionHostGroupActionsViewSet): @audit_config_viewset(type_in_name="Cluster", retrieve_owner=parent_cluster_from_lookup) class ClusterConfigViewSet(ConfigLogViewSet): ... + + +@document_upgrade_viewset(object_type="cluster") +@audit_upgrade_viewset(retrieve_owner=parent_cluster_from_lookup) +class ClusterUpgradeViewSet(UpgradeViewSet): + ... diff --git a/python/api_v2/upgrade/__init__.py b/python/api_v2/generic/upgrade/__init__.py similarity index 100% rename from python/api_v2/upgrade/__init__.py rename to python/api_v2/generic/upgrade/__init__.py diff --git a/python/api_v2/generic/upgrade/api_schema.py b/python/api_v2/generic/upgrade/api_schema.py new file mode 100644 index 0000000000..2f76beb0fd --- /dev/null +++ b/python/api_v2/generic/upgrade/api_schema.py @@ -0,0 +1,54 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from drf_spectacular.utils import extend_schema, extend_schema_view +from rest_framework.status import ( + HTTP_204_NO_CONTENT, + HTTP_400_BAD_REQUEST, + HTTP_403_FORBIDDEN, + HTTP_404_NOT_FOUND, + HTTP_409_CONFLICT, +) + +from api_v2.api_schema import responses +from api_v2.generic.action.serializers import ActionRunSerializer +from api_v2.generic.upgrade.serializers import UpgradeListSerializer, UpgradeRetrieveSerializer + + +def document_upgrade_viewset(object_type: str): + capitalized_type = object_type.capitalize() + + return extend_schema_view( + run=extend_schema( + operation_id=f"post{capitalized_type}Upgrade", + summary=f"POST {object_type}'s upgrade", + description=f"Run {object_type}'s upgrade.", + responses={HTTP_204_NO_CONTENT: None} + | responses( + success=ActionRunSerializer, + errors=(HTTP_400_BAD_REQUEST, HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT), + ), + ), + list=extend_schema( + operation_id=f"get{capitalized_type}Upgrades", + summary=f"GET {object_type} upgrades", + description=f"Get a list of all {object_type}'s upgrades.", + responses=responses(success=UpgradeListSerializer, errors=(HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND)), + ), + retrieve=extend_schema( + operation_id=f"get{capitalized_type}Upgrade", + summary=f"GET {object_type} upgrade", + description=f"Get information about a specific {object_type}'s upgrade.", + responses=responses(success=UpgradeRetrieveSerializer, errors=HTTP_404_NOT_FOUND), + ), + ) diff --git a/python/api_v2/generic/upgrade/audit.py b/python/api_v2/generic/upgrade/audit.py new file mode 100644 index 0000000000..24da2fbba0 --- /dev/null +++ b/python/api_v2/generic/upgrade/audit.py @@ -0,0 +1,49 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from audit.alt.api import audit_update, audit_view +from audit.alt.core import AuditedCallArguments, OperationAuditContext, Result, RetrieveAuditObjectFunc +from audit.alt.hooks import AuditHook, adjust_denied_on_404_result +from cm.models import Upgrade + +from api_v2.utils.audit import object_does_exist + + +def audit_upgrade_viewset(retrieve_owner: RetrieveAuditObjectFunc): + return audit_view( + run=audit_update(name="upgrade launched", object_=retrieve_owner).attach_hooks( + on_collect=[set_name, adjust_denied_on_404_result(objects_exist=action_exists)] + ) + ) + + +# hooks + + +def action_exists(hook: AuditHook) -> bool: + return object_does_exist(hook=hook, model=Upgrade) + + +def set_name( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Result | None, # noqa: ARG001 + exception: Exception | None, # noqa: ARG001 +): + upgrade_name, action_name = Upgrade.objects.values_list("display_name", "action__display_name").filter( + id=call_arguments.get("pk") + ).first() or (None, None) + + if action_name is not None: + context.name = f"{action_name} upgrade launched".strip() + else: + context.name = f"Upgraded to {upgrade_name or ''}".strip() diff --git a/python/api_v2/upgrade/serializers.py b/python/api_v2/generic/upgrade/serializers.py similarity index 100% rename from python/api_v2/upgrade/serializers.py rename to python/api_v2/generic/upgrade/serializers.py diff --git a/python/api_v2/upgrade/views.py b/python/api_v2/generic/upgrade/views.py similarity index 96% rename from python/api_v2/upgrade/views.py rename to python/api_v2/generic/upgrade/views.py index 41e35e40f9..beeffe9719 100644 --- a/python/api_v2/upgrade/views.py +++ b/python/api_v2/generic/upgrade/views.py @@ -19,7 +19,6 @@ check_custom_perm, get_object_for_user, ) -from audit.utils import audit from cm.errors import AdcmEx from cm.models import Cluster, HostProvider, PrototypeConfig, TaskLog, Upgrade from cm.stack import check_hostcomponents_objects_exist @@ -36,17 +35,12 @@ from api_v2.generic.action.serializers import ActionRunSerializer from api_v2.generic.action.utils import get_action_configuration, insert_service_ids, unique_hc_entries from api_v2.generic.config.utils import convert_adcm_meta_to_attr, represent_string_as_json_type +from api_v2.generic.upgrade.serializers import UpgradeListSerializer, UpgradeRetrieveSerializer from api_v2.task.serializers import TaskListSerializer -from api_v2.upgrade.serializers import UpgradeListSerializer, UpgradeRetrieveSerializer from api_v2.views import ADCMGenericViewSet -class UpgradeViewSet( - ListModelMixin, - GetParentObjectMixin, - RetrieveModelMixin, - ADCMGenericViewSet, -): +class UpgradeViewSet(ListModelMixin, GetParentObjectMixin, RetrieveModelMixin, ADCMGenericViewSet): queryset = ( Upgrade.objects.select_related("action", "bundle", "action__prototype") .prefetch_related("bundle__prototype_set") @@ -149,7 +143,6 @@ def retrieve(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG0 return Response(serializer.data) - @audit @action(methods=["post"], detail=True) def run(self, request: Request, *_, **__) -> Response: serializer = self.get_serializer_class()(data=request.data) diff --git a/python/api_v2/hostprovider/urls.py b/python/api_v2/hostprovider/urls.py index d0af027d0e..57309c3d0f 100644 --- a/python/api_v2/hostprovider/urls.py +++ b/python/api_v2/hostprovider/urls.py @@ -23,9 +23,9 @@ HostProviderConfigViewSet, HostProviderGroupConfigViewSet, HostProviderHostGroupConfigViewSet, + HostProviderUpgradeViewSet, HostProviderViewSet, ) -from api_v2.upgrade.views import UpgradeViewSet CONFIG_GROUPS_PREFIX = "config-groups" @@ -44,7 +44,7 @@ def extract_urls_from_routers(routers: Iterable[NestedSimpleRouter]) -> tuple[st config_router.register(prefix="configs", viewset=HostProviderConfigViewSet, basename="provider-config") upgrade_router = NestedSimpleRouter(parent_router=router, parent_prefix="", lookup="hostprovider") -upgrade_router.register(prefix="upgrades", viewset=UpgradeViewSet) +upgrade_router.register(prefix="upgrades", viewset=HostProviderUpgradeViewSet) group_config_routers = add_group_config_routers( diff --git a/python/api_v2/hostprovider/views.py b/python/api_v2/hostprovider/views.py index e5e57dbb89..8bb0474a28 100644 --- a/python/api_v2/hostprovider/views.py +++ b/python/api_v2/hostprovider/views.py @@ -38,6 +38,9 @@ audit_host_group_config_viewset, ) from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet +from api_v2.generic.upgrade.api_schema import document_upgrade_viewset +from api_v2.generic.upgrade.audit import audit_upgrade_viewset +from api_v2.generic.upgrade.views import UpgradeViewSet from api_v2.hostprovider.filters import HostProviderFilter from api_v2.hostprovider.permissions import HostProviderPermissions from api_v2.hostprovider.serializers import ( @@ -191,3 +194,9 @@ class HostProviderActionViewSet(ActionViewSet): @audit_config_viewset(type_in_name="Provider", retrieve_owner=parent_hostprovider_from_lookup) class HostProviderConfigViewSet(ConfigLogViewSet): ... + + +@document_upgrade_viewset(object_type="hostprovider") +@audit_upgrade_viewset(retrieve_owner=parent_hostprovider_from_lookup) +class HostProviderUpgradeViewSet(UpgradeViewSet): + ... From 9ea7683673c97be47cb466c398eeb406a4763db0 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Tue, 2 Jul 2024 08:52:30 +0000 Subject: [PATCH 12/98] ADCM-5699: Rework audit for Policy views from RBAC --- python/api_v2/rbac/policy/permissions.py | 29 ++++++++++ python/api_v2/rbac/policy/views.py | 43 ++++++++++---- python/api_v2/tests/test_audit/test_policy.py | 2 +- python/api_v2/utils/audit.py | 58 ++++++++++++++++--- 4 files changed, 112 insertions(+), 20 deletions(-) create mode 100644 python/api_v2/rbac/policy/permissions.py diff --git a/python/api_v2/rbac/policy/permissions.py b/python/api_v2/rbac/policy/permissions.py new file mode 100644 index 0000000000..2241964745 --- /dev/null +++ b/python/api_v2/rbac/policy/permissions.py @@ -0,0 +1,29 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from adcm.permissions import VIEW_POLICY_PERMISSION +from rest_framework.exceptions import NotFound +from rest_framework.permissions import DjangoModelPermissions + + +class PolicyPermissions(DjangoModelPermissions): + method_permissions_map = { + "patch": [(VIEW_POLICY_PERMISSION, NotFound)], + "delete": [(VIEW_POLICY_PERMISSION, NotFound)], + } + + def has_permission(self, request, view): + for permission, error in self.method_permissions_map.get(request.method.lower(), []): + if not request.user.has_perm(perm=permission): + raise error + + return super().has_permission(request, view) diff --git a/python/api_v2/rbac/policy/views.py b/python/api_v2/rbac/policy/views.py index 933a10a36a..afcdd5a2fe 100644 --- a/python/api_v2/rbac/policy/views.py +++ b/python/api_v2/rbac/policy/views.py @@ -10,15 +10,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -from adcm.permissions import VIEW_POLICY_PERMISSION, CustomModelPermissionsByMethod -from audit.utils import audit +from adcm.permissions import VIEW_POLICY_PERMISSION +from audit.alt.api import audit_create, audit_delete, audit_update +from audit.alt.hooks import ( + extract_current_from_response, + extract_from_object, + extract_previous_from_object, + only_on_success, +) from cm.errors import AdcmEx from django_filters.rest_framework.backends import DjangoFilterBackend from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view from guardian.mixins import PermissionListMixin from rbac.models import Policy from rbac.services.policy import policy_create, policy_update -from rest_framework.exceptions import NotFound from rest_framework.mixins import DestroyModelMixin, ListModelMixin, RetrieveModelMixin from rest_framework.response import Response from rest_framework.status import ( @@ -33,7 +38,14 @@ from api_v2.api_schema import DefaultParams, ErrorSerializer from api_v2.rbac.policy.filters import PolicyFilter +from api_v2.rbac.policy.permissions import PolicyPermissions from api_v2.rbac.policy.serializers import PolicyCreateSerializer, PolicySerializer, PolicyUpdateSerializer +from api_v2.utils.audit import ( + policy_from_lookup, + policy_from_response, + retrieve_policy_role_object_group, + update_policy_name, +) from api_v2.views import ADCMGenericViewSet @@ -103,11 +115,7 @@ class PolicyViewSet(PermissionListMixin, ListModelMixin, RetrieveModelMixin, Des queryset = Policy.objects.select_related("role").prefetch_related("group", "object").order_by("name") filter_backends = (DjangoFilterBackend,) filterset_class = PolicyFilter - permission_classes = (CustomModelPermissionsByMethod,) - method_permissions_map = { - "patch": [(VIEW_POLICY_PERMISSION, NotFound)], - "delete": [(VIEW_POLICY_PERMISSION, NotFound)], - } + permission_classes = (PolicyPermissions,) permission_required = [VIEW_POLICY_PERMISSION] def get_serializer_class(self) -> type[PolicySerializer | PolicyCreateSerializer | PolicyUpdateSerializer]: @@ -119,14 +127,27 @@ def get_serializer_class(self) -> type[PolicySerializer | PolicyCreateSerializer return PolicySerializer - @audit + @audit_create(name="Policy created", object_=policy_from_response) def create(self, request, *args, **kwargs): # noqa: ARG002 serializer = self.get_serializer_class()(data=request.data) serializer.is_valid(raise_exception=True) policy = policy_create(**serializer.validated_data) return Response(data=PolicySerializer(policy).data, status=HTTP_201_CREATED) - @audit + @( + audit_update(name="Policy updated", object_=policy_from_lookup) + .attach_hooks(on_collect=only_on_success(update_policy_name)) + .track_changes( + before=( + extract_previous_from_object(Policy, "name", "description"), + extract_from_object(func=retrieve_policy_role_object_group, section="previous"), + ), + after=( + extract_current_from_response("name", "description"), + extract_from_object(func=retrieve_policy_role_object_group, section="current"), + ), + ) + ) def partial_update(self, request, *args, **kwargs): # noqa: ARG002 policy = self.get_object() @@ -138,7 +159,7 @@ def partial_update(self, request, *args, **kwargs): # noqa: ARG002 policy = policy_update(policy, **serializer.validated_data) return Response(data=PolicySerializer(policy).data) - @audit + @audit_delete(name="Policy deleted", object_=policy_from_lookup, removed_on_success=True) def destroy(self, request, *args, **kwargs): policy = self.get_object() if policy.built_in: diff --git a/python/api_v2/tests/test_audit/test_policy.py b/python/api_v2/tests/test_audit/test_policy.py index 6b93bdc8ec..80f9aab42f 100644 --- a/python/api_v2/tests/test_audit/test_policy.py +++ b/python/api_v2/tests/test_audit/test_policy.py @@ -158,7 +158,7 @@ def test_policy_all_fields_edit_success(self): "name": "Updated name", "description": "new description", "role": "Another custom role name", - "group": ["Other group [local]", "Other group 2 [local]"], + "group": ["Other group 2 [local]", "Other group [local]"], }, "previous": { "name": "Test policy 2", diff --git a/python/api_v2/utils/audit.py b/python/api_v2/utils/audit.py index fd7a301b9b..e4263f9804 100644 --- a/python/api_v2/utils/audit.py +++ b/python/api_v2/utils/audit.py @@ -20,8 +20,9 @@ from audit.alt.object_retrievers import GeneralAuditObjectRetriever from audit.models import AuditObject, AuditObjectType from cm.models import ADCM, Bundle, Cluster, ClusterObject, Host, HostProvider, ServiceComponent +from cm.utils import get_obj_type from django.db.models import Model -from rbac.models import Group, User +from rbac.models import Group, Policy, User from rest_framework.response import Response # object retrievers @@ -77,8 +78,15 @@ def get_name(self, id_: str | int) -> str | None: create_audit_host_object = IDBasedAuditObjectCreator(model=Host, name_field="fqdn") create_audit_user_object = IDBasedAuditObjectCreator(model=User, name_field="username") create_audit_group_object = IDBasedAuditObjectCreator(model=Group) +create_audit_policy_object = IDBasedAuditObjectCreator(model=Policy) +bundle_from_lookup = GeneralAuditObjectRetriever( + audit_object_type=AuditObjectType.BUNDLE, + create_new=IDBasedAuditObjectCreator(model=Bundle), + extract_id=ExtractID(field="pk").from_lookup_kwargs, +) + _extract_cluster_from = partial( GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.CLUSTER, create_new=create_audit_cluster_object ) @@ -129,6 +137,12 @@ def get_name(self, id_: str | int) -> str | None: group_from_response = _extract_group_from(extract_id=ExtractID(field="id").from_response) group_from_lookup = _extract_group_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) +_extract_policy_from = partial( + GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.POLICY, create_new=create_audit_policy_object +) +policy_from_response = _extract_policy_from(extract_id=ExtractID(field="id").from_response) +policy_from_lookup = _extract_policy_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) + def adcm_audit_object( context: "OperationAuditContext", # noqa: ARG001 @@ -214,6 +228,27 @@ def update_group_name( instance.save(update_fields=["object_name"]) +def update_policy_name( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Response | None, + exception: Exception | None, +) -> None: + _ = call_arguments, result, exception + + if not context.object: + return + + instance = context.object + + new_name = Policy.objects.values_list("name", flat=True).filter(id=instance.object_id).first() + if not new_name or instance.object_name == new_name: + return + + instance.object_name = new_name + instance.save(update_fields=["object_name"]) + + # hook helpers / special functions @@ -247,6 +282,20 @@ def retrieve_group_name_users(id_: int) -> dict: } +def retrieve_policy_role_object_group(id_: int) -> dict: + if (policy := Policy.objects.prefetch_related("group", "object").filter(pk=id_).first()) is None: + return {} + + return { + "role": policy.role.display_name if policy.role else "", + "object": [ + {"id": obj.object.pk, "name": obj.object.name, "type": get_obj_type(obj.content_type.name)} + for obj in policy.object.all() + ], + "group": sorted(group.name for group in policy.group.all()), + } + + # name changers @@ -289,10 +338,3 @@ def __call__(self): username = User.objects.values_list("username", flat=True).filter(id=user_id).first() or "" self.context.name = self.context.name.format(username=username).strip() - - -bundle_from_lookup = GeneralAuditObjectRetriever( - audit_object_type=AuditObjectType.BUNDLE, - create_new=IDBasedAuditObjectCreator(model=Bundle), - extract_id=ExtractID(field="pk").from_lookup_kwargs, -) From e7db493ccdadda9d652eb383ed3356ef42791f5a Mon Sep 17 00:00:00 2001 From: Aleksandr Alferov Date: Tue, 2 Jul 2024 14:13:54 +0000 Subject: [PATCH 13/98] ADCM-5705 Specialize Host ViewSets --- python/api_v2/host/permissions.py | 2 -- python/api_v2/host/views.py | 23 +++++++++++++++++------ python/api_v2/utils/audit.py | 23 +++++++++++++++++++++++ 3 files changed, 40 insertions(+), 8 deletions(-) diff --git a/python/api_v2/host/permissions.py b/python/api_v2/host/permissions.py index e0738f48d2..ab94a7eca1 100644 --- a/python/api_v2/host/permissions.py +++ b/python/api_v2/host/permissions.py @@ -10,7 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from audit.utils import audit from rest_framework.permissions import DjangoObjectPermissions @@ -25,7 +24,6 @@ class HostsPermissions(DjangoObjectPermissions): "DELETE": ["%(app_label)s.delete_%(model_name)s"], } - @audit def has_permission(self, request, view) -> bool: if view.action in ["destroy", "update", "partial_update"]: return True diff --git a/python/api_v2/host/views.py b/python/api_v2/host/views.py index b299e03872..74f290d1fe 100644 --- a/python/api_v2/host/views.py +++ b/python/api_v2/host/views.py @@ -19,7 +19,8 @@ check_custom_perm, get_object_for_user, ) -from audit.utils import audit +from audit.alt.api import audit_create, audit_delete, audit_update +from audit.alt.hooks import extract_current_from_response, extract_previous_from_object, only_on_success from cm.api import delete_host from cm.errors import AdcmEx from cm.models import Cluster, ConcernType, Host, HostProvider @@ -60,7 +61,7 @@ HostUpdateSerializer, ) from api_v2.host.utils import create_host, maintenance_mode, process_config_issues_policies_hc -from api_v2.utils.audit import parent_host_from_lookup +from api_v2.utils.audit import host_from_lookup, host_from_response, parent_host_from_lookup, update_host_name from api_v2.views import ADCMGenericViewSet, ObjectWithStatusViewMixin @@ -197,7 +198,7 @@ def get_serializer_class(self): return HostSerializer - @audit + @audit_create(name="Host created", object_=host_from_response) def create(self, request, *args, **kwargs): # noqa: ARG002 serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) @@ -225,14 +226,21 @@ def create(self, request, *args, **kwargs): # noqa: ARG002 data=HostSerializer(instance=host, context=self.get_serializer_context()).data, status=HTTP_201_CREATED ) - @audit + @audit_delete(name="Host deleted", object_=host_from_lookup, removed_on_success=True) def destroy(self, request, *args, **kwargs): # noqa: ARG002 host = self.get_object() check_custom_perm(request.user, "remove", "host", host) delete_host(host=host) return Response(status=HTTP_204_NO_CONTENT) - @audit + @( + audit_update(name="Host updated", object_=host_from_lookup) + .attach_hooks(on_collect=only_on_success(update_host_name)) + .track_changes( + before=extract_previous_from_object(Host, "fqdn", "description"), + after=extract_current_from_response("fqdn", "description", fqdn="name"), + ) + ) def partial_update(self, request, *args, **kwargs): # noqa: ARG002 instance = self.get_object() check_custom_perm(request.user, "change", "host", instance) @@ -257,7 +265,10 @@ def partial_update(self, request, *args, **kwargs): # noqa: ARG002 status=HTTP_200_OK, data=HostSerializer(instance=instance, context=self.get_serializer_context()).data ) - @audit + @audit_update(name="Host updated", object_=host_from_lookup).track_changes( + before=extract_previous_from_object(Host, "maintenance_mode"), + after=extract_current_from_response("maintenance_mode"), + ) @action(methods=["post"], detail=True, url_path="maintenance-mode", permission_classes=[ChangeMMPermissions]) def maintenance_mode(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 return maintenance_mode(request=request, host=self.get_object()) diff --git a/python/api_v2/utils/audit.py b/python/api_v2/utils/audit.py index e4263f9804..d2292991e1 100644 --- a/python/api_v2/utils/audit.py +++ b/python/api_v2/utils/audit.py @@ -123,6 +123,7 @@ def get_name(self, id_: str | int) -> str | None: GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.HOST, create_new=create_audit_host_object ) host_from_lookup = _extract_host_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) +host_from_response = _extract_host_from(extract_id=ExtractID(field="id").from_response) parent_host_from_lookup = _extract_host_from(extract_id=ExtractID(field="host_pk").from_lookup_kwargs) _extract_user_from = partial( @@ -338,3 +339,25 @@ def __call__(self): username = User.objects.values_list("username", flat=True).filter(id=user_id).first() or "" self.context.name = self.context.name.format(username=username).strip() + + +def update_host_name( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Response | None, + exception: Exception | None, +) -> None: + _ = call_arguments, result, exception + + if not context.object: + return + + instance = context.object + + new_name = Host.objects.values_list("fqdn", flat=True).filter(id=instance.object_id).first() + + if not new_name: + return + + instance.object_name = new_name + instance.save(update_fields=["object_name"]) From 7b1c48025e975832019547b523398a3da6d91f24 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Wed, 3 Jul 2024 07:14:41 +0000 Subject: [PATCH 14/98] ADCM-5700: Rework audit for Role views from RBAC --- python/api_v2/rbac/role/permissions.py | 29 +++++++++++++++ python/api_v2/rbac/role/views.py | 40 ++++++++++++++------- python/api_v2/tests/test_audit/test_role.py | 2 +- python/api_v2/utils/audit.py | 40 +++++++++++++++++++-- 4 files changed, 96 insertions(+), 15 deletions(-) create mode 100644 python/api_v2/rbac/role/permissions.py diff --git a/python/api_v2/rbac/role/permissions.py b/python/api_v2/rbac/role/permissions.py new file mode 100644 index 0000000000..d1baa60ddb --- /dev/null +++ b/python/api_v2/rbac/role/permissions.py @@ -0,0 +1,29 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from adcm.permissions import VIEW_ROLE_PERMISSION +from rest_framework.exceptions import NotFound +from rest_framework.permissions import DjangoModelPermissions + + +class RolePermissions(DjangoModelPermissions): + method_permissions_map = { + "patch": [(VIEW_ROLE_PERMISSION, NotFound)], + "delete": [(VIEW_ROLE_PERMISSION, NotFound)], + } + + def has_permission(self, request, view): + for permission, error in self.method_permissions_map.get(request.method.lower(), []): + if not request.user.has_perm(perm=permission): + raise error + + return super().has_permission(request, view) diff --git a/python/api_v2/rbac/role/views.py b/python/api_v2/rbac/role/views.py index 2f9bc13ba9..4d8f11f385 100644 --- a/python/api_v2/rbac/role/views.py +++ b/python/api_v2/rbac/role/views.py @@ -12,8 +12,14 @@ from collections import defaultdict -from adcm.permissions import VIEW_ROLE_PERMISSION, CustomModelPermissionsByMethod -from audit.utils import audit +from adcm.permissions import VIEW_ROLE_PERMISSION +from audit.alt.api import audit_create, audit_delete, audit_update +from audit.alt.hooks import ( + extract_current_from_response, + extract_from_object, + extract_previous_from_object, + only_on_success, +) from cm.errors import AdcmEx from cm.models import Cluster, ClusterObject, Host, HostProvider, ProductCategory from django.db.models import Prefetch @@ -23,7 +29,6 @@ from rbac.models import Role, RoleTypes from rbac.services.role import role_create, role_update from rest_framework.decorators import action -from rest_framework.exceptions import NotFound from rest_framework.mixins import CreateModelMixin, DestroyModelMixin, ListModelMixin, RetrieveModelMixin from rest_framework.response import Response from rest_framework.status import ( @@ -38,7 +43,9 @@ from api_v2.api_schema import DefaultParams, ErrorSerializer from api_v2.rbac.role.filters import RoleFilter +from api_v2.rbac.role.permissions import RolePermissions from api_v2.rbac.role.serializers import RoleCreateSerializer, RoleSerializer, RoleUpdateSerializer +from api_v2.utils.audit import retrieve_role_children, role_from_lookup, role_from_response, update_role_name from api_v2.views import ADCMGenericViewSet @@ -153,12 +160,8 @@ class RoleViewSet( .exclude(type=RoleTypes.HIDDEN) .order_by("display_name") ) - permission_classes = (CustomModelPermissionsByMethod,) - method_permissions_map = { - "patch": [(VIEW_ROLE_PERMISSION, NotFound)], - "delete": [(VIEW_ROLE_PERMISSION, NotFound)], - } - permission_required = ["rbac.view_role"] + permission_classes = (RolePermissions,) + permission_required = [VIEW_ROLE_PERMISSION] filterset_class = RoleFilter def get_serializer_class(self): @@ -170,7 +173,7 @@ def get_serializer_class(self): return RoleSerializer - @audit + @audit_create(name="Role created", object_=role_from_response) def create(self, request, *args, **kwargs): # noqa: ARG002 serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) @@ -178,7 +181,20 @@ def create(self, request, *args, **kwargs): # noqa: ARG002 return Response(data=RoleSerializer(instance=role).data, status=HTTP_201_CREATED) - @audit + @( + audit_update(name="Role updated", object_=role_from_lookup) + .attach_hooks(on_collect=only_on_success(update_role_name)) + .track_changes( + before=( + extract_previous_from_object(Role, "name", "display_name", "description"), + extract_from_object(func=retrieve_role_children, section="previous"), + ), + after=( + extract_current_from_response("name", "display_name", "description"), + extract_from_object(func=retrieve_role_children, section="current"), + ), + ) + ) def partial_update(self, request, *args, **kwargs): # noqa: ARG002 instance = self.get_object() @@ -191,7 +207,7 @@ def partial_update(self, request, *args, **kwargs): # noqa: ARG002 return Response(data=RoleSerializer(instance=role).data, status=HTTP_200_OK) - @audit + @audit_delete(name="Role deleted", object_=role_from_lookup, removed_on_success=True) def destroy(self, request, *args, **kwargs): instance = self.get_object() diff --git a/python/api_v2/tests/test_audit/test_role.py b/python/api_v2/tests/test_audit/test_role.py index f5dc6b9522..1566804fab 100644 --- a/python/api_v2/tests/test_audit/test_role.py +++ b/python/api_v2/tests/test_audit/test_role.py @@ -136,7 +136,7 @@ def test_role_update_all_fields_success(self): "current": { "description": "new description", "display_name": "Custom `view cluster configurations` role", - "child": ["View cluster configurations", "Manage cluster Maintenance mode"], + "child": ["Manage cluster Maintenance mode", "View cluster configurations"], }, "previous": { "description": old_description, diff --git a/python/api_v2/utils/audit.py b/python/api_v2/utils/audit.py index e4263f9804..a1076163a6 100644 --- a/python/api_v2/utils/audit.py +++ b/python/api_v2/utils/audit.py @@ -21,8 +21,8 @@ from audit.models import AuditObject, AuditObjectType from cm.models import ADCM, Bundle, Cluster, ClusterObject, Host, HostProvider, ServiceComponent from cm.utils import get_obj_type -from django.db.models import Model -from rbac.models import Group, Policy, User +from django.db.models import Model, Prefetch +from rbac.models import Group, Policy, Role, User from rest_framework.response import Response # object retrievers @@ -79,6 +79,7 @@ def get_name(self, id_: str | int) -> str | None: create_audit_user_object = IDBasedAuditObjectCreator(model=User, name_field="username") create_audit_group_object = IDBasedAuditObjectCreator(model=Group) create_audit_policy_object = IDBasedAuditObjectCreator(model=Policy) +create_audit_role_object = IDBasedAuditObjectCreator(model=Role) bundle_from_lookup = GeneralAuditObjectRetriever( @@ -143,6 +144,12 @@ def get_name(self, id_: str | int) -> str | None: policy_from_response = _extract_policy_from(extract_id=ExtractID(field="id").from_response) policy_from_lookup = _extract_policy_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) +_extract_role_from = partial( + GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.ROLE, create_new=create_audit_role_object +) +role_from_response = _extract_role_from(extract_id=ExtractID(field="id").from_response) +role_from_lookup = _extract_role_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) + def adcm_audit_object( context: "OperationAuditContext", # noqa: ARG001 @@ -249,6 +256,27 @@ def update_policy_name( instance.save(update_fields=["object_name"]) +def update_role_name( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Response | None, + exception: Exception | None, +) -> None: + _ = call_arguments, result, exception + + if not context.object: + return + + instance = context.object + + new_name = Role.objects.values_list("name", flat=True).filter(id=instance.object_id).first() + if not new_name or instance.object_name == new_name: + return + + instance.object_name = new_name + instance.save(update_fields=["object_name"]) + + # hook helpers / special functions @@ -296,6 +324,14 @@ def retrieve_policy_role_object_group(id_: int) -> dict: } +def retrieve_role_children(id_: int) -> dict: + prefetch_child_roles = Prefetch("child", queryset=Role.objects.only("display_name")) + if (role := Role.objects.prefetch_related(prefetch_child_roles).filter(pk=id_).only("id").first()) is None: + return {} + + return {"child": sorted(child_role.display_name for child_role in role.child.all())} + + # name changers From 089e1610f21c35d2b28fb416b751c3fd6f4b3444 Mon Sep 17 00:00:00 2001 From: astarovo Date: Thu, 4 Jul 2024 08:52:02 +0300 Subject: [PATCH 15/98] ADCM-5708: Rework audit for Prototype views --- python/api_v2/prototype/views.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/python/api_v2/prototype/views.py b/python/api_v2/prototype/views.py index 0421f25d4d..3029b33a82 100644 --- a/python/api_v2/prototype/views.py +++ b/python/api_v2/prototype/views.py @@ -10,13 +10,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from adcm.permissions import VIEW_CLUSTER_PERM, DjangoModelPermissionsAudit +from adcm.permissions import VIEW_CLUSTER_PERM from adcm.serializers import EmptySerializer -from audit.utils import audit +from audit.alt.api import audit_update from cm.models import ObjectType, Prototype from django.db.models import QuerySet from drf_spectacular.utils import extend_schema, extend_schema_view from rest_framework.decorators import action +from rest_framework.permissions import DjangoModelPermissions from rest_framework.request import Request from rest_framework.response import Response from rest_framework.status import HTTP_200_OK @@ -28,6 +29,7 @@ PrototypeVersionsSerializer, ) from api_v2.prototype.utils import accept_license +from api_v2.utils.audit import bundle_from_lookup from api_v2.views import ADCMReadOnlyModelViewSet @@ -41,7 +43,7 @@ ) class PrototypeViewSet(ADCMReadOnlyModelViewSet): queryset = Prototype.objects.exclude(type="adcm").select_related("bundle").order_by("name") - permission_classes = [DjangoModelPermissionsAudit] + permission_classes = [DjangoModelPermissions] permission_required = [VIEW_CLUSTER_PERM] filterset_class = PrototypeFilter @@ -69,7 +71,7 @@ def versions(self, request): # noqa: ARG001, ARG002 description="Accept prototype license.", responses={200: None, 404: ErrorSerializer, 409: ErrorSerializer}, ) - @audit + @audit_update(name="Bundle license accepted", object_=bundle_from_lookup) @action(methods=["post"], detail=True, url_path="license/accept", url_name="accept-license") def accept(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG001, ARG002 prototype = self.get_object() From 57a15903399747c061e43c296aeb0bb704becb4c Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Fri, 5 Jul 2024 11:23:32 +0000 Subject: [PATCH 16/98] ADCM-5707: Rework audit for service and component views --- python/api_v2/component/views.py | 14 +++-- python/api_v2/service/permissions.py | 2 - python/api_v2/service/views.py | 29 +++++++++-- python/api_v2/utils/audit.py | 77 +++++++++++++++++++++++++--- 4 files changed, 103 insertions(+), 19 deletions(-) diff --git a/python/api_v2/component/views.py b/python/api_v2/component/views.py index adb22123a2..d1dfe187da 100644 --- a/python/api_v2/component/views.py +++ b/python/api_v2/component/views.py @@ -21,15 +21,18 @@ check_custom_perm, get_object_for_user, ) -from audit.utils import audit +from audit.alt.api import audit_update +from audit.alt.hooks import extract_current_from_response, extract_previous_from_object from cm.errors import AdcmEx from cm.models import Cluster, ClusterObject, Host, ServiceComponent from cm.services.maintenance_mode import get_maintenance_mode_response from cm.services.status.notify import update_mm_objects +from django.db.models import F from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view from guardian.mixins import PermissionListMixin from rest_framework.decorators import action from rest_framework.mixins import ListModelMixin +from rest_framework.permissions import DjangoModelPermissions from rest_framework.request import Request from rest_framework.response import Response from rest_framework.status import ( @@ -72,7 +75,7 @@ audit_host_group_config_viewset, ) from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet -from api_v2.utils.audit import parent_component_from_lookup +from api_v2.utils.audit import component_from_lookup, parent_component_from_lookup from api_v2.views import ( ADCMGenericViewSet, ADCMReadOnlyModelViewSet, @@ -149,7 +152,7 @@ ) class ComponentViewSet(PermissionListMixin, ConfigSchemaMixin, ObjectWithStatusViewMixin, ADCMReadOnlyModelViewSet): queryset = ServiceComponent.objects.select_related("cluster", "service").order_by("pk") - permission_classes = [DjangoModelPermissionsAudit] + permission_classes = [DjangoModelPermissions] permission_required = [VIEW_COMPONENT_PERM] filterset_class = ComponentFilter retrieve_status_map_actions = ("statuses", "list") @@ -173,7 +176,10 @@ def get_serializer_class(self): return ComponentSerializer - @audit + @audit_update(name="Component updated", object_=component_from_lookup).track_changes( + before=extract_previous_from_object(model=ServiceComponent, maintenance_mode=F("_maintenance_mode")), + after=extract_current_from_response("maintenance_mode"), + ) @update_mm_objects @action(methods=["post"], detail=True, url_path="maintenance-mode", permission_classes=[ChangeMMPermissions]) def maintenance_mode(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 diff --git a/python/api_v2/service/permissions.py b/python/api_v2/service/permissions.py index e9f76f5e90..5abfd008e0 100644 --- a/python/api_v2/service/permissions.py +++ b/python/api_v2/service/permissions.py @@ -10,7 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from audit.utils import audit from rest_framework.permissions import DjangoObjectPermissions @@ -25,7 +24,6 @@ class ServicePermissions(DjangoObjectPermissions): "DELETE": ["%(app_label)s.delete_%(model_name)s"], } - @audit def has_permission(self, request, view) -> bool: if view.action in {"maintenance_mode", "destroy"}: return True diff --git a/python/api_v2/service/views.py b/python/api_v2/service/views.py index 7666e83ea2..685afb413a 100644 --- a/python/api_v2/service/views.py +++ b/python/api_v2/service/views.py @@ -21,12 +21,17 @@ get_object_for_user, ) from audit.alt.api import audit_update, audit_view -from audit.utils import audit +from audit.alt.hooks import ( + adjust_denied_on_404_result, + extract_current_from_response, + extract_previous_from_object, +) from cm.errors import AdcmEx from cm.models import Cluster, ClusterObject from cm.services.maintenance_mode import get_maintenance_mode_response from cm.services.service import delete_service_from_api from cm.services.status.notify import update_mm_objects +from django.db.models import F from django_filters.rest_framework.backends import DjangoFilterBackend from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view from guardian.mixins import PermissionListMixin @@ -88,7 +93,14 @@ bulk_add_services_to_cluster, validate_service_prototypes, ) -from api_v2.utils.audit import parent_service_from_lookup +from api_v2.utils.audit import ( + parent_cluster_from_lookup, + parent_service_from_lookup, + service_does_exist, + service_from_lookup, + set_service_name_from_object, + set_service_names_from_request, +) from api_v2.views import ADCMGenericViewSet, ObjectWithStatusViewMixin @@ -192,7 +204,9 @@ def get_serializer_class(self): return ServiceRetrieveSerializer - @audit + @audit_update(name="{service_names} service(s) added", object_=parent_cluster_from_lookup).attach_hooks( + pre_call=set_service_names_from_request + ) def create(self, request: Request, *args, **kwargs): # noqa: ARG002 cluster = get_object_for_user( user=request.user, perms=VIEW_CLUSTER_PERM, klass=Cluster, pk=kwargs["cluster_pk"] @@ -223,12 +237,17 @@ def create(self, request: Request, *args, **kwargs): # noqa: ARG002 data=ServiceRetrieveSerializer(instance=added_services[0], context=context).data, ) - @audit + @audit_update(name="{service_name} service removed", object_=parent_cluster_from_lookup).attach_hooks( + pre_call=set_service_name_from_object, on_collect=adjust_denied_on_404_result(service_does_exist) + ) def destroy(self, request: Request, *args, **kwargs): # noqa: ARG002 instance = self.get_object() return delete_service_from_api(service=instance) - @audit + @audit_update(name="Service updated", object_=service_from_lookup).track_changes( + before=extract_previous_from_object(model=ClusterObject, maintenance_mode=F("_maintenance_mode")), + after=extract_current_from_response("maintenance_mode"), + ) @update_mm_objects @action(methods=["post"], detail=True, url_path="maintenance-mode", permission_classes=[ChangeMMPermissions]) def maintenance_mode(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 diff --git a/python/api_v2/utils/audit.py b/python/api_v2/utils/audit.py index 620cbed256..d2b0ba4d29 100644 --- a/python/api_v2/utils/audit.py +++ b/python/api_v2/utils/audit.py @@ -13,15 +13,18 @@ from contextlib import suppress from dataclasses import dataclass from functools import partial +from typing import Any import json from audit.alt.core import AuditedCallArguments, IDBasedAuditObjectCreator, OperationAuditContext, Result from audit.alt.hooks import AuditHook from audit.alt.object_retrievers import GeneralAuditObjectRetriever from audit.models import AuditObject, AuditObjectType -from cm.models import ADCM, Bundle, Cluster, ClusterObject, Host, HostProvider, ServiceComponent +from cm.models import ADCM, Bundle, Cluster, ClusterObject, Host, HostProvider, Prototype, ServiceComponent from cm.utils import get_obj_type +from django.core.handlers.wsgi import WSGIRequest from django.db.models import Model, Prefetch +from django.http.request import RawPostDataException from rbac.models import Group, Policy, Role, User from rest_framework.response import Response @@ -101,6 +104,7 @@ def get_name(self, id_: str | int) -> str | None: create_new=ServiceAuditObjectCreator(model=ClusterObject), ) parent_service_from_lookup = _extract_service_from(extract_id=ExtractID(field="service_pk").from_lookup_kwargs) +service_from_lookup = _extract_service_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) _extract_component_from = partial( GeneralAuditObjectRetriever, @@ -108,6 +112,7 @@ def get_name(self, id_: str | int) -> str | None: create_new=ComponentAuditObjectCreator(model=ServiceComponent), ) parent_component_from_lookup = _extract_component_from(extract_id=ExtractID(field="component_pk").from_lookup_kwargs) +component_from_lookup = _extract_component_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) _extract_hostprovider_from = partial( GeneralAuditObjectRetriever, @@ -281,6 +286,18 @@ def update_role_name( # hook helpers / special functions +def _retrieve_request_body(request: WSGIRequest) -> Any | None: + if not request: + return None + + # request's body can be read only once + body = None + with suppress(AttributeError, json.JSONDecodeError, RawPostDataException): + body = json.loads(request.body) + + return body + + def object_does_exist(hook: AuditHook, model: type[Model], id_field: str = "pk") -> bool: id_ = hook.call_arguments.get(id_field) if not id_: @@ -294,6 +311,10 @@ def nested_host_does_exist(hook: AuditHook) -> bool: return object_does_exist(hook=hook, model=Host) +def service_does_exist(hook: AuditHook) -> bool: + return object_does_exist(hook=hook, model=ClusterObject) + + def retrieve_user_password_groups(id_: int) -> dict: if (user := User.objects.filter(pk=id_).first()) is None: return {} @@ -339,13 +360,7 @@ def retrieve_role_children(id_: int) -> dict: class set_add_hosts_name(AuditHook): # noqa: N801 def __call__(self): request = self.call_arguments.get("request", "") - - data = None - # if body was already read without assigning to `request._data`, - # those exceptions won't be enough to silence, - # but if such a problem will occur, it should be addressed more thoughtfully than just suppress - with suppress(AttributeError, json.JSONDecodeError): - data = json.loads(request.body) + data = _retrieve_request_body(request=request) host_fqdn = "" if isinstance(data, list): @@ -397,3 +412,49 @@ def update_host_name( instance.object_name = new_name instance.save(update_fields=["object_name"]) + + +class set_service_names_from_request(AuditHook): # noqa: N801 + def __call__(self): + ids = self._get_ids_from_request(request=self.call_arguments.get("request")) + service_display_names = ( + Prototype.objects.filter(pk__in=ids).order_by("display_name").values_list("display_name", flat=True) + ) + + service_display_names = ", ".join(service_display_names) + + self.context.name = self.context.name.format(service_names=f"[{service_display_names}]").strip() + + @staticmethod + def _get_ids_from_request(request: WSGIRequest) -> tuple: + if request is None: + return () + + if (data := _retrieve_request_body(request=request)) is None: + return () + + ids = () + if isinstance(data, list): + ids = (entry.get("prototypeId", entry.get("prototype_id")) for entry in data if isinstance(entry, dict)) + elif isinstance(data, dict) and (prototype_id := data.get("prototypeId", data.get("prototype_id"))) is not None: + ids = (prototype_id,) + + return ids + + +def set_service_name_from_object( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Response | None, # noqa: ARG001 + exception: Exception | None, # noqa: ARG001 +) -> None: + service_name = ( + ClusterObject.objects.filter(pk=call_arguments.get("pk")) + .select_related("prototype__display_name") + .only("prototype__display_name") + .values_list("prototype__display_name", flat=True) + .first() + or "" + ) + + context.name = context.name.format(service_name=service_name).strip() From f9ffbcb5da07e489929ec2e64593f76c750d905c Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 9 Jul 2024 11:09:17 +0000 Subject: [PATCH 17/98] ADCM-5749 Alternative issues re-distribution for mapping change Changed: 1. Issues recalculation approach on POST request to /clusters/{id}/mapping/ 2. `send_delete_event` minor rework to avoid lots of separate requests and serializations Added: 1. Alternative to issues recalculation for changing mapping via API with introduction of `redistribute_concerns_on_mapping_change` and `unlink_concerns_of_objects_in_mm` methods Caveats: 1. MM recalculation may not work correctly for some cases, but mapping is itself quite restrictive with MM turned on 1. Old approach commented out instead of total removal for comparison reasons --- python/api_v2/cluster/utils.py | 21 +- .../bundles/cluster_all_concerns/config.yaml | 68 +++++ .../bundles/provider_concerns/config.yaml | 22 ++ python/api_v2/tests/test_concerns.py | 227 ++++++++++++++ python/cm/models.py | 12 + python/cm/services/concern/__init__.py | 3 + python/cm/services/concern/_operaitons.py | 22 ++ python/cm/services/concern/distribution.py | 282 ++++++++++++++++++ python/cm/signals.py | 22 +- python/core/types.py | 1 + 10 files changed, 665 insertions(+), 15 deletions(-) create mode 100644 python/api_v2/tests/bundles/cluster_all_concerns/config.yaml create mode 100644 python/api_v2/tests/bundles/provider_concerns/config.yaml create mode 100644 python/cm/services/concern/_operaitons.py create mode 100644 python/cm/services/concern/distribution.py diff --git a/python/api_v2/cluster/utils.py b/python/api_v2/cluster/utils.py index 776e202677..da6f0fce55 100644 --- a/python/api_v2/cluster/utils.py +++ b/python/api_v2/cluster/utils.py @@ -30,12 +30,11 @@ add_concern_to_object, check_components_mapping_contraints, remove_concern_from_object, - update_hierarchy_issues, - update_issue_after_deleting, ) from cm.models import ( Cluster, ClusterObject, + ConcernCause, GroupConfig, Host, HostComponent, @@ -44,8 +43,12 @@ Prototype, ServiceComponent, ) +from cm.services.cluster import retrieve_clusters_topology +from cm.services.concern import delete_issue +from cm.services.concern.distribution import redistribute_issues_and_flags from cm.services.status.notify import reset_hc_map, reset_objects_in_mm from cm.status_api import send_host_component_map_update_event +from core.types import ADCMCoreType, CoreObjectDescriptor from django.contrib.contenttypes.models import ContentType from django.db.models import QuerySet from django.db.transaction import atomic, on_commit @@ -280,10 +283,16 @@ def _save_mapping(mapping_data: MappingData) -> QuerySet[HostComponent]: HostComponent.objects.filter(cluster_id=mapping_data.cluster.id).delete() HostComponent.objects.bulk_create(objs=mapping_objects) - update_hierarchy_issues(obj=mapping_data.orm_objects["cluster"]) - for provider_id in {host.provider_id for host in mapping_data.hosts.values()}: - update_hierarchy_issues(obj=mapping_data.orm_objects["providers"][provider_id]) - update_issue_after_deleting() + delete_issue( + owner=CoreObjectDescriptor(id=mapping_data.cluster.id, type=ADCMCoreType.CLUSTER), + cause=ConcernCause.HOSTCOMPONENT, + ) + redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((mapping_data.cluster.id,)))) + + # update_hierarchy_issues(obj=mapping_data.orm_objects["cluster"]) + # for provider_id in {host.provider_id for host in mapping_data.hosts.values()}: + # update_hierarchy_issues(obj=mapping_data.orm_objects["providers"][provider_id]) + # update_issue_after_deleting() _handle_mapping_policies(mapping_data=mapping_data) send_host_component_map_update_event(cluster=mapping_data.orm_objects["cluster"]) diff --git a/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml b/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml new file mode 100644 index 0000000000..ca9013b9db --- /dev/null +++ b/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml @@ -0,0 +1,68 @@ +- type: cluster + name: all_concerns + version: 3.4 + + # no import for service + import: &import + for_export: + multibind: false + required: true + + config: &required_config + - name: field + type: integer + required: true + + actions: &actions + dummy: + type: job + script_type: ansible + script: ./action.yaml + masking: + +- type: service + name: main + version: 1 + + config: *required_config + + actions: *actions + + components: + single: + constraint: [1] + config: *required_config + free: + actions: *actions + +- type: service + name: required + version: 2 + required: true + +- type: service + name: dummy + version: 4 + + components: + same_dummy: + +- type: service + name: require_dummy_service + version: 3 + + requires: + - service: dummy + + actions: *actions + + components: + silent: + sir: + config: *required_config + +- type: service + name: no_components + version: 5 + + import: *import diff --git a/python/api_v2/tests/bundles/provider_concerns/config.yaml b/python/api_v2/tests/bundles/provider_concerns/config.yaml new file mode 100644 index 0000000000..6db1d64e7f --- /dev/null +++ b/python/api_v2/tests/bundles/provider_concerns/config.yaml @@ -0,0 +1,22 @@ +- type: provider + name: provider_with_concerns + version: 12 + + config: &required_config + - name: field + type: integer + required: true + + actions: &actions + dummy: + type: job + script_type: ansible + script: ./action.yaml + masking: + +- type: host + name: hohoho + version: 2 + + config: *required_config + actions: *actions diff --git a/python/api_v2/tests/test_concerns.py b/python/api_v2/tests/test_concerns.py index 213bfe7748..6461278f29 100644 --- a/python/api_v2/tests/test_concerns.py +++ b/python/api_v2/tests/test_concerns.py @@ -10,8 +10,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +from pathlib import Path +from typing import Iterable + from cm.models import ( Action, + ADCMEntity, + Cluster, + ConcernCause, + ConcernItem, + Host, JobLog, ObjectType, Prototype, @@ -345,3 +353,222 @@ def test_adcm_5677_hc_issue_on_link_host_to_cluster_with_plus_constraint(self): # not mapped host has no concerns response: Response = self.client.v2[host_2].get() self.assertEqual(len(response.json()["concerns"]), 0) + + +class TestConcernRedistribution(BaseAPITestCase): + def setUp(self) -> None: + super().setUp() + + bundles_dir = Path(__file__).parent / "bundles" + + self.cluster = self.add_cluster( + bundle=self.add_bundle(bundles_dir / "cluster_all_concerns"), name="With Concerns" + ) + + self.provider = self.add_provider( + bundle=self.add_bundle(bundles_dir / "provider_concerns"), name="Concerned HP" + ) + + self.control_cluster = self.add_cluster(bundle=self.cluster.prototype.bundle, name="Control Cluster") + self.control_provider = self.add_provider(bundle=self.provider.prototype.bundle, name="Control HP") + self.control_host = self.add_host(provider=self.control_provider, fqdn="control_host") + self.control_service = self.add_services_to_cluster(["main"], cluster=self.control_cluster).get() + self.control_component = self.control_service.servicecomponent_set.get(prototype__name="single") + + self.control_concerns = { + object_: tuple(object_.concerns.all()) + for object_ in ( + self.control_cluster, + self.control_service, + self.control_component, + self.control_provider, + self.control_host, + ) + } + + def repr_concerns(self, concerns: Iterable[ConcernItem]) -> str: + return "\n".join( + f" {i}. {rec}" + for i, rec in enumerate(sorted(f"{concern.cause} from {concern.owner}" for concern in concerns), start=1) + ) + + def check_concerns(self, object_: ADCMEntity, concerns: Iterable[ConcernItem]) -> None: + expected_concerns = tuple(concerns) + object_concerns = tuple(object_.concerns.all()) + + actual_amount = len(object_concerns) + expected_amount = len(expected_concerns) + + # avoid calculation of message for success passes + if actual_amount != expected_amount: + message = ( + "Incorrect amount of records.\n" + f"Actual:\n{self.repr_concerns(object_concerns)}\n" + f"Expected:\n{self.repr_concerns(expected_concerns)}\n" + ) + self.assertEqual(actual_amount, expected_amount, message) + + for concern in expected_concerns: + self.assertIn(concern, object_concerns) + + def check_concerns_of_control_objects(self) -> None: + for object_, expected_concerns in self.control_concerns.items(): + self.check_concerns(object_, expected_concerns) + + def change_mapping_via_api(self, entries: Iterable[tuple[Host, ServiceComponent]]) -> None: + response = self.client.v2[self.cluster, "mapping"].post( + data=[{"hostId": host.id, "componentId": component.id} for host, component in entries] + ) + self.assertEqual(response.status_code, HTTP_201_CREATED) + + def test_concerns_swap_on_mapping_changes(self) -> None: + # prepare + host_1, host_2, unmapped_host = ( + self.add_host(self.provider, fqdn=f"host_{i}", cluster=self.cluster) for i in range(3) + ) + unbound_host = self.add_host(self.provider, fqdn="free-host") + self.change_configuration(host_2, config_diff={"field": 4}) + + main_s = self.add_services_to_cluster(["main"], cluster=self.cluster).get() + single_c = main_s.servicecomponent_set.get(prototype__name="single") + free_c = main_s.servicecomponent_set.get(prototype__name="free") + + require_dummy_s = self.add_services_to_cluster(["require_dummy_service"], cluster=self.cluster).get() + silent_c = require_dummy_s.servicecomponent_set.get(prototype__name="silent") + sir_c = require_dummy_s.servicecomponent_set.get(prototype__name="sir") + + # have to add it to proceed to hc set + dummy_s = self.add_services_to_cluster(["dummy"], cluster=self.cluster).get() + dummy_c = dummy_s.servicecomponent_set.get() + + # component-less service + no_components_s = self.add_services_to_cluster(["no_components"], cluster=self.cluster).get() + + # find own concerns + provider_config_con = self.provider.get_own_issue(ConcernCause.CONFIG) + host_1_config_con = host_1.get_own_issue(ConcernCause.CONFIG) + unbound_host_con = unbound_host.get_own_issue(ConcernCause.CONFIG) + unmapped_host_con = unmapped_host.get_own_issue(ConcernCause.CONFIG) + + main_service_own_con = main_s.get_own_issue(ConcernCause.CONFIG) + + cluster_own_cons = tuple( + ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) + ) + main_and_single_cons = (main_service_own_con, single_c.get_own_issue(ConcernCause.CONFIG)) + sir_c_conn = sir_c.get_own_issue(ConcernCause.CONFIG) + no_components_conn = no_components_s.get_own_issue(ConcernCause.IMPORT) + + with self.subTest("Pre-Mapping Concerns Distribution"): + self.check_concerns_of_control_objects() + + self.check_concerns(unbound_host, concerns=(provider_config_con, unbound_host_con)) + self.check_concerns(host_1, concerns=(provider_config_con, host_1_config_con)) + self.check_concerns(host_2, concerns=(provider_config_con,)) + self.check_concerns(unmapped_host, concerns=(provider_config_con, unmapped_host_con)) + + self.check_concerns( + self.cluster, concerns=(*cluster_own_cons, *main_and_single_cons, sir_c_conn, no_components_conn) + ) + + self.check_concerns(main_s, concerns=(*cluster_own_cons, *main_and_single_cons)) + self.check_concerns(require_dummy_s, concerns=(*cluster_own_cons, sir_c_conn)) + self.check_concerns(dummy_s, concerns=cluster_own_cons) + self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_conn)) + + self.check_concerns(single_c, concerns=(*cluster_own_cons, *main_and_single_cons)) + self.check_concerns(free_c, concerns=(*cluster_own_cons, main_service_own_con)) + self.check_concerns(silent_c, concerns=cluster_own_cons) + self.check_concerns(sir_c, concerns=(*cluster_own_cons, sir_c_conn)) + + with self.subTest("Fist Mapping Set"): + hc_concern = self.cluster.get_own_issue(ConcernCause.HOSTCOMPONENT) + self.assertIsNotNone(hc_concern) + + self.change_mapping_via_api( + entries=( + (host_1, single_c), + (host_1, silent_c), + (host_2, free_c), + (host_2, silent_c), + (host_2, sir_c), + (host_1, dummy_c), + ), + ) + + cluster_own_cons = tuple( + ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) + ) + self.assertNotIn(hc_concern, cluster_own_cons) + + self.check_concerns(unbound_host, concerns=(provider_config_con, unbound_host_con)) + self.check_concerns( + host_1, concerns=(provider_config_con, host_1_config_con, *cluster_own_cons, *main_and_single_cons) + ) + self.check_concerns( + host_2, concerns=(provider_config_con, *cluster_own_cons, main_service_own_con, sir_c_conn) + ) + self.check_concerns(unmapped_host, concerns=(provider_config_con, unmapped_host_con)) + + self.check_concerns( + self.cluster, + concerns=( + *cluster_own_cons, + *main_and_single_cons, + sir_c_conn, + no_components_conn, + provider_config_con, + host_1_config_con, + ), + ) + + self.check_concerns( + main_s, concerns=(*cluster_own_cons, *main_and_single_cons, provider_config_con, host_1_config_con) + ) + self.check_concerns( + require_dummy_s, concerns=(*cluster_own_cons, sir_c_conn, provider_config_con, host_1_config_con) + ) + self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_conn)) + + self.check_concerns( + single_c, concerns=(*cluster_own_cons, *main_and_single_cons, provider_config_con, host_1_config_con) + ) + self.check_concerns(free_c, concerns=(*cluster_own_cons, main_service_own_con, provider_config_con)) + self.check_concerns(silent_c, concerns=(*cluster_own_cons, host_1_config_con, provider_config_con)) + self.check_concerns(sir_c, concerns=(*cluster_own_cons, sir_c_conn, provider_config_con)) + + self.check_concerns_of_control_objects() + + with self.subTest("Second Mapping Set"): + self.change_mapping_via_api( + entries=((host_2, single_c), (host_2, free_c), (host_1, silent_c), (host_1, dummy_c)) + ) + + self.check_concerns(host_1, concerns=(provider_config_con, host_1_config_con, *cluster_own_cons)) + self.check_concerns(host_2, concerns=(provider_config_con, *cluster_own_cons, *main_and_single_cons)) + self.check_concerns(unmapped_host, concerns=(provider_config_con, unmapped_host_con)) + + self.check_concerns( + self.cluster, + concerns=( + *cluster_own_cons, + *main_and_single_cons, + sir_c_conn, + no_components_conn, + provider_config_con, + host_1_config_con, + ), + ) + + self.check_concerns(main_s, concerns=(*cluster_own_cons, *main_and_single_cons, provider_config_con)) + self.check_concerns( + require_dummy_s, concerns=(*cluster_own_cons, sir_c_conn, provider_config_con, host_1_config_con) + ) + self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_conn)) + + self.check_concerns(single_c, concerns=(*cluster_own_cons, *main_and_single_cons, provider_config_con)) + self.check_concerns(free_c, concerns=(*cluster_own_cons, main_service_own_con, provider_config_con)) + self.check_concerns(silent_c, concerns=(*cluster_own_cons, host_1_config_con, provider_config_con)) + self.check_concerns(sir_c, concerns=(*cluster_own_cons, sir_c_conn)) + + self.check_concerns_of_control_objects() diff --git a/python/cm/models.py b/python/cm/models.py index c6bf298072..7f284e1882 100644 --- a/python/cm/models.py +++ b/python/cm/models.py @@ -27,6 +27,7 @@ from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ObjectDoesNotExist from django.db import models, transaction +from django.db.models import QuerySet from django.db.models.signals import post_delete from django.dispatch import receiver @@ -1644,6 +1645,17 @@ def related_objects(self) -> Iterable[ADCMEntity]: self.host_entities.order_by("id"), ) + @property + def related_querysets(self) -> Iterable[QuerySet]: + return ( + self.adcm_entities, + self.cluster_entities, + self.clusterobject_entities, + self.servicecomponent_entities, + self.hostprovider_entities, + self.host_entities, + ) + class ADCMEntityStatus(models.TextChoices): UP = "up", "up" diff --git a/python/cm/services/concern/__init__.py b/python/cm/services/concern/__init__.py index f3e59d81d8..31267ac0ad 100644 --- a/python/cm/services/concern/__init__.py +++ b/python/cm/services/concern/__init__.py @@ -10,3 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. +from ._operaitons import delete_issue + +__all__ = ["delete_issue"] diff --git a/python/cm/services/concern/_operaitons.py b/python/cm/services/concern/_operaitons.py new file mode 100644 index 0000000000..1a59a10592 --- /dev/null +++ b/python/cm/services/concern/_operaitons.py @@ -0,0 +1,22 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from core.types import CoreObjectDescriptor +from django.contrib.contenttypes.models import ContentType + +from cm.converters import core_type_to_model +from cm.models import ConcernCause, ConcernItem, ConcernType + + +def delete_issue(owner: CoreObjectDescriptor, cause: ConcernCause) -> None: + owner_type = ContentType.objects.get_for_model(core_type_to_model(core_type=owner.type)) + ConcernItem.objects.filter(owner_id=owner.id, owner_type=owner_type, cause=cause, type=ConcernType.ISSUE).delete() diff --git a/python/cm/services/concern/distribution.py b/python/cm/services/concern/distribution.py new file mode 100644 index 0000000000..93fd080b47 --- /dev/null +++ b/python/cm/services/concern/distribution.py @@ -0,0 +1,282 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import defaultdict, deque +from copy import copy +from itertools import chain +from typing import Iterable, TypeAlias + +from core.cluster.operations import calculate_maintenance_mode_for_cluster_objects +from core.cluster.types import ClusterTopology, ObjectMaintenanceModeState +from core.types import ADCMCoreType, ClusterID, ComponentID, ConcernID, HostID, HostProviderID, ObjectID, ServiceID +from django.contrib.contenttypes.models import ContentType +from django.db.models import Q + +from cm.converters import core_type_to_model, model_name_to_core_type +from cm.models import ( + Cluster, + ClusterObject, + ConcernItem, + ConcernType, + Host, + HostProvider, + ServiceComponent, +) +from cm.services.cluster import retrieve_clusters_objects_maintenance_mode + +# PUBLIC redistribute_concerns_on_mapping_change + + +TopologyObjectMap: TypeAlias = dict[ADCMCoreType, tuple[ObjectID, ...]] +OwnObjectConcernMap: TypeAlias = dict[ADCMCoreType, dict[ObjectID, set[ConcernID]]] +AffectedObjectConcernMap: TypeAlias = OwnObjectConcernMap +ProviderHostMap: TypeAlias = dict[HostProviderID, set[HostID]] + + +def redistribute_issues_and_flags(topology: ClusterTopology) -> None: + topology_objects: TopologyObjectMap = { + ADCMCoreType.CLUSTER: (topology.cluster_id,), + ADCMCoreType.SERVICE: tuple(topology.services), + ADCMCoreType.COMPONENT: tuple(topology.component_ids), + ADCMCoreType.HOST: tuple(topology.hosts), + } + + provider_host_ids_mapping: ProviderHostMap = defaultdict(set) + for host_id, provider_id in Host.objects.values_list("id", "provider_id").filter( + id__in=topology_objects[ADCMCoreType.HOST] + ): + provider_host_ids_mapping[provider_id].add(host_id) + + # Step #1. Get own concerns of all related objects + objects_concerns = _retrieve_concerns_of_objects_in_topology( + topology_objects=topology_objects, provider_host_mapping=provider_host_ids_mapping + ) + + if not objects_concerns: + # nothing to redistribute, expected that no links will be found too + return + + # Step #2. Calculate new concern relations + concern_links: AffectedObjectConcernMap = _calculate_concerns_distribution_for_topology( + topology=topology, objects_concerns=objects_concerns + ) + + # Step #3. Remove concerns from objects in MM + concern_links = _drop_concerns_from_objects_in_mm( + topology=topology, concern_links=concern_links, provider_host_map=provider_host_ids_mapping + ) + + # Step #4. Link objects to concerns + _relink_concerns_to_objects_in_db( + concern_links=concern_links, + topology_objects=topology_objects, + hosts_existing_concerns=objects_concerns.get(ADCMCoreType.HOST, {}), + ) + + +def _retrieve_concerns_of_objects_in_topology( + topology_objects: TopologyObjectMap, provider_host_mapping: ProviderHostMap +) -> OwnObjectConcernMap: + objects_concerns = _get_own_concerns_of_objects( + with_types=(ConcernType.ISSUE, ConcernType.FLAG), + clusters=topology_objects[ADCMCoreType.CLUSTER], + hosts=topology_objects[ADCMCoreType.HOST], + services=topology_objects[ADCMCoreType.SERVICE], + components=topology_objects[ADCMCoreType.COMPONENT], + hostproviders=set(provider_host_mapping), + ) + + if not objects_concerns: + # nothing to redistribute, expected that no links will be found too + return objects_concerns + + if ADCMCoreType.HOSTPROVIDER in objects_concerns: + # Merge HostProvider concerns to corresponding hosts so the passing of concerns will go smoothly + # without need to extract provider's concerns for each host-component relation + for provider_id, concerns in objects_concerns.pop(ADCMCoreType.HOSTPROVIDER).items(): + for host_id in provider_host_mapping[provider_id]: + objects_concerns[ADCMCoreType.HOST][host_id] |= concerns + + return objects_concerns + + +def _calculate_concerns_distribution_for_topology( + topology: ClusterTopology, objects_concerns: OwnObjectConcernMap +) -> AffectedObjectConcernMap: + concern_links: dict[ADCMCoreType, dict[int, set[int]]] = defaultdict(lambda: defaultdict(set)) + + cluster_own_concerns: set[int] = objects_concerns.get(ADCMCoreType.CLUSTER, {}).get(topology.cluster_id, set()) + concern_links[ADCMCoreType.CLUSTER][topology.cluster_id] = copy(cluster_own_concerns) + + hosts_existing_concerns = objects_concerns.get(ADCMCoreType.HOST, {}) + + for service_id, service_topology in topology.services.items(): + concerns_from_service = objects_concerns.get(ADCMCoreType.SERVICE, {}).get(service_id, set()) + concern_links[ADCMCoreType.SERVICE][service_id] |= cluster_own_concerns | concerns_from_service + + for component_id, component_topology in service_topology.components.items(): + concerns_from_component = objects_concerns.get(ADCMCoreType.COMPONENT, {}).get(component_id, set()) + + concerns_from_hosts = set() + for host_id in set(component_topology.hosts): + host_related_concerns = hosts_existing_concerns.get(host_id, set()) + concerns_from_hosts |= host_related_concerns + + concern_links[ADCMCoreType.HOST][host_id] |= ( + cluster_own_concerns | concerns_from_service | concerns_from_component | host_related_concerns + ) + + # "push" concerns up a "tree" + concern_links[ADCMCoreType.COMPONENT][component_id] = ( + cluster_own_concerns | concerns_from_service | concerns_from_component | concerns_from_hosts + ) + concern_links[ADCMCoreType.SERVICE][service_id] |= concerns_from_component | concerns_from_hosts + + concern_links[ADCMCoreType.CLUSTER][topology.cluster_id] |= concern_links[ADCMCoreType.SERVICE][service_id] + + return concern_links + + +def _drop_concerns_from_objects_in_mm( + topology: ClusterTopology, concern_links: AffectedObjectConcernMap, provider_host_map: ProviderHostMap +) -> AffectedObjectConcernMap: + mm_of_objects = calculate_maintenance_mode_for_cluster_objects( + topology=topology, + own_maintenance_mode=retrieve_clusters_objects_maintenance_mode(cluster_ids=(topology.cluster_id,)), + ) + + hosts_in_mm = set(_keep_objects_in_mm(mm_of_objects.hosts)) + + objects_in_mm_own_concerns: OwnObjectConcernMap = _get_own_concerns_of_objects( + with_types=(ConcernType.ISSUE, ConcernType.FLAG), + hosts=hosts_in_mm, + services=_keep_objects_in_mm(mm_of_objects.services), + components=_keep_objects_in_mm(mm_of_objects.components), + ) + + if not objects_in_mm_own_concerns: + return concern_links + + # todo check this logic out, because: + # 1. it's strange AS IS (or unclear) + # 2. probably unmapped hosts shouldn't participate in counting too + hostprovider_concerns_to_unlink = set() + hostproviders_to_exclude = deque() + for hostprovider_id, hosts in provider_host_map: + # If all hosts are in MM, then HP concerns should be removed from all objects that aren't hosts. + # If at least one host is not in MM, concerns should be passed in a regular way. + if hosts == hosts_in_mm: + hostproviders_to_exclude.append(hostprovider_id) + + if hostproviders_to_exclude: + hostprovider_concerns_to_unlink |= _get_own_concerns_of_objects( + with_types=(ConcernType.ISSUE, ConcernType.FLAG), hostproviders=hostproviders_to_exclude + ).get(ADCMCoreType.HOSTPROVIDER, set()) + + own_concerns_to_keep: OwnObjectConcernMap = defaultdict(lambda: defaultdict(set)) + concerns_to_unlink: set[int] = copy(hostprovider_concerns_to_unlink) + + for core_type, concern_dict in objects_in_mm_own_concerns.items(): + hostprovider_concerns_to_keep = set() if core_type != ADCMCoreType.HOST else hostprovider_concerns_to_unlink + for object_id, concerns in concern_dict.items(): + own_concerns_to_keep[core_type][object_id] = concerns | hostprovider_concerns_to_keep + concerns_to_unlink |= concerns + + for core_type, concern_dict in concern_links.items(): + for object_id, concerns in concern_dict.items(): + concern_dict[object_id] = concerns - (concerns_to_unlink - own_concerns_to_keep[core_type][object_id]) + + return concern_links + + +def _relink_concerns_to_objects_in_db( + concern_links: dict[ADCMCoreType, dict[ObjectID, set[int]]], + topology_objects: TopologyObjectMap, + hosts_existing_concerns: dict[ObjectID, set[int]], +) -> None: + # ADCMCoreType.HOST is a special case, because we really don't want to delete host/provider-related concerns + for core_type in (ADCMCoreType.CLUSTER, ADCMCoreType.SERVICE, ADCMCoreType.COMPONENT): + orm_model = core_type_to_model(core_type) + id_field = f"{orm_model.__name__.lower()}_id" + m2m_model = orm_model.concerns.through + + # Delete all concern relations for objects in question + m2m_model.objects.filter(**{f"{id_field}__in": topology_objects[core_type]}).exclude( + concernitem__type=ConcernType.LOCK + ).delete() + + # ... and create them again + m2m_model.objects.bulk_create( + ( + m2m_model(concernitem_id=concern_id, **{id_field: object_id}) + for object_id, concerns in concern_links[core_type].items() + for concern_id in concerns + ) + ) + + # handle hosts links + m2m_model = Host.concerns.through + hostprovider_hierarchy_concerns = set(chain.from_iterable(hosts_existing_concerns.values())) + # Delete all cluster/service/component related concern links, but keep host/hostprovider ones: + # thou we could recreate those concerns too, but it doesn't make much sense. + ( + m2m_model.objects.filter(host_id__in=topology_objects[ADCMCoreType.HOST]) + .exclude(Q(concernitem_id__in=hostprovider_hierarchy_concerns) | Q(concernitem__type=ConcernType.LOCK)) + .delete() + ) + + # create only cluster/service/component related concern links + m2m_model.objects.bulk_create( + ( + m2m_model(concernitem_id=concern_id, host_id=host_id) + for host_id, concerns in concern_links[ADCMCoreType.HOST].items() + for concern_id in (concerns - hostprovider_hierarchy_concerns) + ) + ) + + +# PROTECTED generic-purpose methods + + +def _keep_objects_in_mm(entries: dict[int, ObjectMaintenanceModeState]) -> Iterable[int]: + return (id_ for id_, mm in entries.items() if mm != ObjectMaintenanceModeState.OFF) + + +def _get_own_concerns_of_objects( + with_types: Iterable[ConcernType], + *, + clusters: Iterable[ClusterID] = (), + services: Iterable[ServiceID] = (), + components: Iterable[ComponentID] = (), + hosts: Iterable[HostID] = (), + hostproviders: Iterable[HostProviderID] = (), +) -> dict[ADCMCoreType, dict[ObjectID, set[int]]]: + existing_concerns_qs = ( + ConcernItem.objects.select_related("owner_type") + .filter(type__in=with_types) + .filter( + Q(owner_id__in=clusters, owner_type=ContentType.objects.get_for_model(Cluster)) + | Q(owner_id__in=hosts, owner_type=ContentType.objects.get_for_model(Host)) + | Q(owner_id__in=services, owner_type=ContentType.objects.get_for_model(ClusterObject)) + | Q(owner_id__in=components, owner_type=ContentType.objects.get_for_model(ServiceComponent)) + | Q(owner_id__in=hostproviders, owner_type=ContentType.objects.get_for_model(HostProvider)) + ) + ) + + # Those are own concerns of objects defined by type and ID + # except Host which set of concerns will also contain HostProvider concerns + objects_concerns: dict[ADCMCoreType, dict[int, set[int]]] = defaultdict(lambda: defaultdict(set)) + for concern in existing_concerns_qs.all(): + concern: ConcernItem + objects_concerns[model_name_to_core_type(concern.owner_type.model)][concern.owner_id].add(concern.id) + + return objects_concerns diff --git a/python/cm/signals.py b/python/cm/signals.py index 696a13dc0c..5df8406cb3 100644 --- a/python/cm/signals.py +++ b/python/cm/signals.py @@ -63,13 +63,17 @@ def rename_audit_object_host(sender, instance, **kwargs) -> None: @receiver(signal=pre_delete, sender=ConcernItem) -def send_delete_event(sender, instance, **kwargs): # noqa: ARG001 - for object_ in instance.related_objects: - on_commit( - func=partial( - send_concern_delete_event, - object_id=object_.pk, - object_type=object_.prototype.type, - concern_id=instance.pk, +def send_delete_event(sender, instance: ConcernItem, **kwargs): # noqa: ARG001 + # This is "sort of" optimization, not sure if there's a lot of profit in sending all these stuff anyway. + # Also, probably it'll be better to collect all data for send and then pass closure func looping over those values: + # that way there won't be much of a queue for on commit => fewer objects => less memory => less processing. + for qs in instance.related_querysets: + for object_id, object_type in qs.values_list("id", "prototype__type"): + on_commit( + func=partial( + send_concern_delete_event, + object_id=object_id, + object_type=object_type, + concern_id=instance.pk, + ) ) - ) diff --git a/python/core/types.py b/python/core/types.py index 5465adead9..c6eb58bf13 100644 --- a/python/core/types.py +++ b/python/core/types.py @@ -27,6 +27,7 @@ TaskID: TypeAlias = int ConfigID: TypeAlias = int +ConcernID: TypeAlias = int HostName: TypeAlias = str ServiceName: TypeAlias = str From 8d5bce1028c532fc1228df69ae5181b7d9072de8 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Wed, 10 Jul 2024 11:43:57 +0000 Subject: [PATCH 18/98] ADCM-5768 Apply new issues/flag distribution approach to MM change via API --- .../bundles/cluster_all_concerns/config.yaml | 1 + python/api_v2/tests/test_concerns.py | 242 ++++++++++++++++++ python/cm/services/concern/distribution.py | 25 +- python/cm/services/maintenance_mode.py | 25 +- python/core/cluster/types.py | 11 + 5 files changed, 283 insertions(+), 21 deletions(-) diff --git a/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml b/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml index ca9013b9db..7fb3f692b3 100644 --- a/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml +++ b/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml @@ -1,6 +1,7 @@ - type: cluster name: all_concerns version: 3.4 + allow_maintenance_mode: true # no import for service import: &import diff --git a/python/api_v2/tests/test_concerns.py b/python/api_v2/tests/test_concerns.py index 6461278f29..3428fd950d 100644 --- a/python/api_v2/tests/test_concerns.py +++ b/python/api_v2/tests/test_concerns.py @@ -17,6 +17,7 @@ Action, ADCMEntity, Cluster, + ClusterObject, ConcernCause, ConcernItem, Host, @@ -28,6 +29,7 @@ ) from cm.services.concern.messages import ConcernMessage from cm.tests.mocks.task_runner import RunTaskMock +from core.cluster.types import ObjectMaintenanceModeState as MM # noqa: N814 from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED @@ -421,6 +423,18 @@ def change_mapping_via_api(self, entries: Iterable[tuple[Host, ServiceComponent] ) self.assertEqual(response.status_code, HTTP_201_CREATED) + def change_mm_via_api(self, mm_value: MM, *objects: ClusterObject | ServiceComponent | Host) -> None: + for object_ in objects: + object_endpoint = ( + self.client.v2[object_] + if not isinstance(object_, Host) + else self.client.v2[object_.cluster, "hosts", object_] + ) + self.assertEqual( + (object_endpoint / "maintenance-mode").post(data={"maintenanceMode": mm_value.value}).status_code, + HTTP_200_OK, + ) + def test_concerns_swap_on_mapping_changes(self) -> None: # prepare host_1, host_2, unmapped_host = ( @@ -572,3 +586,231 @@ def test_concerns_swap_on_mapping_changes(self) -> None: self.check_concerns(sir_c, concerns=(*cluster_own_cons, sir_c_conn)) self.check_concerns_of_control_objects() + + def test_concerns_distribution_mm(self) -> None: + # prepare + second_provider = self.add_provider(bundle=self.provider.prototype.bundle, name="No Concerns HP") + host_no_concerns = self.add_host(provider=second_provider, fqdn="no-concerns-host", cluster=self.cluster) + + host_1, host_2, unmapped_host = ( + self.add_host(self.provider, fqdn=f"host-{i}", cluster=self.cluster) for i in range(3) + ) + + for object_ in (host_no_concerns, host_2): + self.change_configuration(object_, config_diff={"field": 1}) + + main_s, no_components_s = ( + self.add_services_to_cluster(["main", "no_components"], cluster=self.cluster) + .order_by("prototype__name") + .all() + ) + single_c = main_s.servicecomponent_set.get(prototype__name="single") + free_c = main_s.servicecomponent_set.get(prototype__name="free") + + # find own concerns + provider_config_con = self.provider.get_own_issue(ConcernCause.CONFIG) + second_provider_con = second_provider.get_own_issue(ConcernCause.CONFIG) + host_1_config_con = host_1.get_own_issue(ConcernCause.CONFIG) + unmapped_host_con = unmapped_host.get_own_issue(ConcernCause.CONFIG) + provider_cons = (provider_config_con, second_provider_con) + all_mapped_hosts_cons = (*provider_cons, host_1_config_con) + + main_service_own_con = main_s.get_own_issue(ConcernCause.CONFIG) + no_components_service_own_con = no_components_s.get_own_issue(ConcernCause.IMPORT) + + cluster_own_cons = tuple( + ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) + ) + single_con = single_c.get_own_issue(ConcernCause.CONFIG) + main_and_single_cons = (main_service_own_con, single_con) + + # test + with self.subTest("Unmapped Distribution Turn Service ON"): + self.change_mm_via_api(MM.ON, main_s) + + self.check_concerns(self.cluster, concerns=(*cluster_own_cons, no_components_service_own_con)) + self.check_concerns(main_s, concerns=(*cluster_own_cons, main_service_own_con)) + self.check_concerns(single_c, concerns=(*cluster_own_cons, single_con)) + self.check_concerns(free_c, concerns=cluster_own_cons) + self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_service_own_con)) + + self.check_concerns(host_1, concerns=(host_1_config_con, provider_config_con)) + self.check_concerns(host_2, concerns=(provider_config_con,)) + self.check_concerns(unmapped_host, concerns=(provider_config_con, unmapped_host_con)) + self.check_concerns(host_no_concerns, concerns=(second_provider_con,)) + + self.check_concerns_of_control_objects() + + with self.subTest("Unmapped Distribution Turn Service OFF"): + self.change_mm_via_api(MM.OFF, main_s) + + self.check_concerns( + self.cluster, concerns=(*cluster_own_cons, *main_and_single_cons, no_components_service_own_con) + ) + self.check_concerns(main_s, concerns=(*cluster_own_cons, *main_and_single_cons)) + self.check_concerns(single_c, concerns=(*cluster_own_cons, *main_and_single_cons)) + self.check_concerns(free_c, concerns=(*cluster_own_cons, main_service_own_con)) + self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_service_own_con)) + + self.check_concerns(host_1, concerns=(host_1_config_con, provider_config_con)) + self.check_concerns(host_2, concerns=(provider_config_con,)) + self.check_concerns(unmapped_host, concerns=(provider_config_con, unmapped_host_con)) + self.check_concerns(host_no_concerns, concerns=(second_provider_con,)) + + self.check_concerns_of_control_objects() + + self.set_hostcomponent( + cluster=self.cluster, + entries=((host_1, single_c), (host_1, free_c), (host_2, free_c), (host_no_concerns, free_c)), + ) + cluster_own_cons = tuple( + ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) + ) + + with self.subTest("Mapped Turn Component ON"): + self.change_mm_via_api(MM.ON, single_c) + + self.check_concerns( + self.cluster, + concerns=( + *cluster_own_cons, + main_service_own_con, + no_components_service_own_con, + *all_mapped_hosts_cons, + ), + ) + self.check_concerns(main_s, concerns=(*cluster_own_cons, main_service_own_con, *all_mapped_hosts_cons)) + self.check_concerns( + single_c, + concerns=(*cluster_own_cons, *main_and_single_cons, provider_config_con, host_1_config_con), + ) + self.check_concerns(free_c, concerns=(*cluster_own_cons, main_service_own_con, *all_mapped_hosts_cons)) + self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_service_own_con)) + + self.check_concerns( + host_1, concerns=(*cluster_own_cons, main_service_own_con, host_1_config_con, provider_config_con) + ) + self.check_concerns( + host_2, + concerns=(*cluster_own_cons, main_service_own_con, provider_config_con), + ) + self.check_concerns(unmapped_host, concerns=(provider_config_con, unmapped_host_con)) + self.check_concerns( + host_no_concerns, concerns=(*cluster_own_cons, main_service_own_con, second_provider_con) + ) + + self.check_concerns_of_control_objects() + + with self.subTest("Mapped Turn Host ON"): + self.change_mm_via_api(MM.ON, host_1) + + self.check_concerns( + self.cluster, + concerns=(*cluster_own_cons, main_service_own_con, no_components_service_own_con, *provider_cons), + ) + self.check_concerns(main_s, concerns=(*cluster_own_cons, main_service_own_con, *provider_cons)) + self.check_concerns( + single_c, + concerns=(*cluster_own_cons, *main_and_single_cons, provider_config_con), + ) + self.check_concerns(free_c, concerns=(*cluster_own_cons, main_service_own_con, *provider_cons)) + self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_service_own_con)) + + self.check_concerns( + host_1, concerns=(*cluster_own_cons, main_service_own_con, host_1_config_con, provider_config_con) + ) + self.check_concerns( + host_2, + concerns=(*cluster_own_cons, main_service_own_con, provider_config_con), + ) + self.check_concerns(unmapped_host, concerns=(provider_config_con, unmapped_host_con)) + self.check_concerns( + host_no_concerns, concerns=(*cluster_own_cons, main_service_own_con, second_provider_con) + ) + + self.check_concerns_of_control_objects() + + with self.subTest("Mapped Turn Second Host ON"): + self.change_mm_via_api(MM.ON, host_2) + + self.check_concerns( + self.cluster, + concerns=(*cluster_own_cons, main_service_own_con, no_components_service_own_con, second_provider_con), + ) + self.check_concerns(main_s, concerns=(*cluster_own_cons, main_service_own_con, second_provider_con)) + self.check_concerns(single_c, concerns=(*cluster_own_cons, *main_and_single_cons)) + self.check_concerns(free_c, concerns=(*cluster_own_cons, main_service_own_con, second_provider_con)) + self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_service_own_con)) + + self.check_concerns( + host_1, concerns=(*cluster_own_cons, main_service_own_con, host_1_config_con, provider_config_con) + ) + self.check_concerns( + host_2, + concerns=(*cluster_own_cons, main_service_own_con, provider_config_con), + ) + self.check_concerns(unmapped_host, concerns=(provider_config_con, unmapped_host_con)) + self.check_concerns( + host_no_concerns, concerns=(*cluster_own_cons, main_service_own_con, second_provider_con) + ) + + self.check_concerns_of_control_objects() + + with self.subTest("Mapped Turn Service Without Components ON"): + self.change_mm_via_api(MM.ON, no_components_s) + + self.check_concerns(self.cluster, concerns=(*cluster_own_cons, main_service_own_con, second_provider_con)) + self.check_concerns(main_s, concerns=(*cluster_own_cons, main_service_own_con, second_provider_con)) + self.check_concerns(single_c, concerns=(*cluster_own_cons, *main_and_single_cons)) + self.check_concerns(free_c, concerns=(*cluster_own_cons, main_service_own_con, second_provider_con)) + self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_service_own_con)) + + self.check_concerns( + host_1, concerns=(*cluster_own_cons, main_service_own_con, host_1_config_con, provider_config_con) + ) + self.check_concerns( + host_2, + concerns=(*cluster_own_cons, main_service_own_con, provider_config_con), + ) + self.check_concerns(unmapped_host, concerns=(provider_config_con, unmapped_host_con)) + self.check_concerns( + host_no_concerns, + concerns=(*cluster_own_cons, main_service_own_con, second_provider_con), + ) + + self.check_concerns_of_control_objects() + + with self.subTest("Mapped Turn All OFF"): + self.change_mm_via_api(MM.OFF, no_components_s, host_1, host_2, single_c) + + self.check_concerns( + self.cluster, + concerns=( + *cluster_own_cons, + *main_and_single_cons, + no_components_service_own_con, + *all_mapped_hosts_cons, + ), + ) + self.check_concerns(main_s, concerns=(*cluster_own_cons, *main_and_single_cons, *all_mapped_hosts_cons)) + self.check_concerns( + single_c, + concerns=(*cluster_own_cons, *main_and_single_cons, provider_config_con, host_1_config_con), + ) + self.check_concerns(free_c, concerns=(*cluster_own_cons, main_service_own_con, *all_mapped_hosts_cons)) + self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_service_own_con)) + + self.check_concerns( + host_1, concerns=(*cluster_own_cons, *main_and_single_cons, host_1_config_con, provider_config_con) + ) + self.check_concerns( + host_2, + concerns=(*cluster_own_cons, main_service_own_con, provider_config_con), + ) + self.check_concerns(unmapped_host, concerns=(provider_config_con, unmapped_host_con)) + self.check_concerns( + host_no_concerns, + concerns=(*cluster_own_cons, main_service_own_con, second_provider_con), + ) + + self.check_concerns_of_control_objects() diff --git a/python/cm/services/concern/distribution.py b/python/cm/services/concern/distribution.py index 93fd080b47..9b9fe8d4d7 100644 --- a/python/cm/services/concern/distribution.py +++ b/python/cm/services/concern/distribution.py @@ -166,21 +166,26 @@ def _drop_concerns_from_objects_in_mm( if not objects_in_mm_own_concerns: return concern_links - # todo check this logic out, because: - # 1. it's strange AS IS (or unclear) - # 2. probably unmapped hosts shouldn't participate in counting too + unmapped_hosts = topology.unmapped_hosts hostprovider_concerns_to_unlink = set() hostproviders_to_exclude = deque() - for hostprovider_id, hosts in provider_host_map: - # If all hosts are in MM, then HP concerns should be removed from all objects that aren't hosts. - # If at least one host is not in MM, concerns should be passed in a regular way. - if hosts == hosts_in_mm: + for hostprovider_id, hosts in provider_host_map.items(): + # If all mapped hosts are in MM, then HP concerns should be removed from all objects that aren't hosts. + # If at least one mapped host is not in MM, concerns should be passed in a regular way. + mapped_hosts = hosts - unmapped_hosts + if mapped_hosts and mapped_hosts.issubset(hosts_in_mm): hostproviders_to_exclude.append(hostprovider_id) if hostproviders_to_exclude: - hostprovider_concerns_to_unlink |= _get_own_concerns_of_objects( - with_types=(ConcernType.ISSUE, ConcernType.FLAG), hostproviders=hostproviders_to_exclude - ).get(ADCMCoreType.HOSTPROVIDER, set()) + hostprovider_concerns_to_unlink |= set( + chain.from_iterable( + _get_own_concerns_of_objects( + with_types=(ConcernType.ISSUE, ConcernType.FLAG), hostproviders=hostproviders_to_exclude + ) + .get(ADCMCoreType.HOSTPROVIDER, {}) + .values() + ) + ) own_concerns_to_keep: OwnObjectConcernMap = defaultdict(lambda: defaultdict(set)) concerns_to_unlink: set[int] = copy(hostprovider_concerns_to_unlink) diff --git a/python/cm/services/maintenance_mode.py b/python/cm/services/maintenance_mode.py index 82b776b165..05c0e6460b 100644 --- a/python/cm/services/maintenance_mode.py +++ b/python/cm/services/maintenance_mode.py @@ -15,7 +15,6 @@ from rest_framework.serializers import Serializer from rest_framework.status import HTTP_400_BAD_REQUEST, HTTP_409_CONFLICT -from cm.issue import update_hierarchy_issues, update_issue_after_deleting from cm.models import ( Action, ClusterObject, @@ -27,6 +26,8 @@ Prototype, ServiceComponent, ) +from cm.services.cluster import retrieve_clusters_topology +from cm.services.concern.distribution import redistribute_issues_and_flags from cm.services.concern.flags import update_hierarchy from cm.services.job.action import ActionRunPayload, run_action from cm.services.status.notify import reset_objects_in_mm @@ -52,16 +53,18 @@ def _change_mm_via_action( def _update_mm_hierarchy_issues(obj: Host | ClusterObject | ServiceComponent) -> None: - if isinstance(obj, Host): - update_hierarchy_issues(obj.provider) - - providers = {host_component.host.provider for host_component in HostComponent.objects.filter(cluster=obj.cluster)} - for provider in providers: - update_hierarchy_issues(provider) - - update_hierarchy_issues(obj.cluster) - update_issue_after_deleting() - _update_flags() + redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((obj.cluster_id,)))) + + # if isinstance(obj, Host): + # update_hierarchy_issues(obj.provider) + # + # providers = {host_component.host.provider for host_component in HostComponent.objects.filter(cluster=obj.cluster)} + # for provider in providers: + # update_hierarchy_issues(provider) + # + # update_hierarchy_issues(obj.cluster) + # update_issue_after_deleting() + # _update_flags() reset_objects_in_mm() diff --git a/python/core/cluster/types.py b/python/core/cluster/types.py index 7b4fa0368f..458a37e9cd 100644 --- a/python/core/cluster/types.py +++ b/python/core/cluster/types.py @@ -50,6 +50,17 @@ class ClusterTopology(NamedTuple): def component_ids(self) -> Generator[ComponentID, None, None]: return chain.from_iterable(service.components for service in self.services.values()) + @property + def unmapped_hosts(self) -> set[HostID]: + mapped_hosts = chain.from_iterable( + component_topology.hosts + for component_topology in chain.from_iterable( + service.components.values() for service in self.services.values() + ) + ) + + return set(self.hosts).difference(mapped_hosts) + class ObjectMaintenanceModeState(Enum): ON = "on" From b07526c08376a278aa564db0d80d02190fc873f1 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Wed, 10 Jul 2024 11:56:30 +0000 Subject: [PATCH 19/98] ADCM-5709: Rework audit for Profile views --- python/api_v2/profile/views.py | 13 +++++++++++-- python/api_v2/utils/audit.py | 28 ++++++++++++++++++++++++++++ python/audit/alt/hooks.py | 21 +++++++++++++++++++++ 3 files changed, 60 insertions(+), 2 deletions(-) diff --git a/python/api_v2/profile/views.py b/python/api_v2/profile/views.py index 04ac33b540..52156640c3 100644 --- a/python/api_v2/profile/views.py +++ b/python/api_v2/profile/views.py @@ -10,7 +10,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from audit.utils import audit +from audit.alt.api import audit_update +from audit.alt.hooks import ( + extract_for_current_user, +) from cm.errors import AdcmEx from cm.services.adcm import retrieve_password_requirements from core.rbac.operations import update_user_password @@ -24,6 +27,7 @@ from api_v2.api_schema import ErrorSerializer from api_v2.profile.serializers import ProfileSerializer, ProfileUpdateSerializer +from api_v2.utils.audit import profile_of_current_user, retrieve_user_password_groups from api_v2.views import ADCMGenericViewSet @@ -59,7 +63,12 @@ def get_serializer_class(self) -> type[ProfileSerializer | ProfileUpdateSerializ return ProfileSerializer - @audit + @( + audit_update(name="Profile updated", object_=profile_of_current_user).track_changes( + before=(extract_for_current_user(func=retrieve_user_password_groups, section="previous"),), + after=(extract_for_current_user(func=retrieve_user_password_groups, section="current"),), + ) + ) def partial_update(self, request, *_, **__): user = self.get_object() diff --git a/python/api_v2/utils/audit.py b/python/api_v2/utils/audit.py index d2b0ba4d29..e0370ac991 100644 --- a/python/api_v2/utils/audit.py +++ b/python/api_v2/utils/audit.py @@ -135,8 +135,36 @@ def get_name(self, id_: str | int) -> str | None: _extract_user_from = partial( GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.USER, create_new=create_audit_user_object ) + + +@dataclass +class ProfileRetriever(GeneralAuditObjectRetriever): + def __call__( + self, + context: "OperationAuditContext", + call_arguments: AuditedCallArguments, # noqa: ARG002 + result: Result | None, # noqa: ARG002 + exception: Exception | None, # noqa: ARG002 + ) -> AuditObject | None: + id_ = str(context.user.id) if context.user else None + if not id_: + return None + + audit_object = AuditObject.objects.filter( + object_id=id_, object_type=self.audit_object_type, is_deleted=self.is_deleted + ).first() + if audit_object: + return audit_object + + return self.create_new(id_, self.audit_object_type) + + +_extract_profile_from = partial( + ProfileRetriever, audit_object_type=AuditObjectType.USER, create_new=create_audit_user_object +) user_from_response = _extract_user_from(extract_id=ExtractID(field="id").from_response) user_from_lookup = _extract_user_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) +profile_of_current_user = _extract_profile_from(extract_id=ExtractID(field="pk").from_lookup_kwargs) _extract_group_from = partial( GeneralAuditObjectRetriever, audit_object_type=AuditObjectType.GROUP, create_new=create_audit_group_object diff --git a/python/audit/alt/hooks.py b/python/audit/alt/hooks.py index d23e6774ad..3c6511d806 100644 --- a/python/audit/alt/hooks.py +++ b/python/audit/alt/hooks.py @@ -266,6 +266,27 @@ def extract_from_object( class HookImpl(AuditHook): def __call__(self): id_ = self.call_arguments.get(id_arg) if section == "previous" else self.result.data.get(id_field) + + if id_ is None: + return + + if section not in self.context.meta.changes: + self.context.meta.changes[section] = {} + + self.context.meta.changes[section] |= func(id_=id_) + + return HookImpl + + +def extract_for_current_user(func: HookObjectLookupFunc, section: Literal["current", "previous"]): + """ + Hook for retrieving user in requests to profile endpoints + """ + + class HookImpl(AuditHook): + def __call__(self): + id_ = self.context.user.id if self.context.user else None + if id_ is None: return From 8c9c1c58aec7e97463e5ffaaa7ea926dc9778ae6 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Fri, 12 Jul 2024 04:24:59 +0000 Subject: [PATCH 20/98] ADCM-5775 Rework concerns update during config save --- .../bundles/cluster_all_concerns/config.yaml | 9 + .../bundles/provider_concerns/config.yaml | 6 + python/api_v2/tests/test_concerns.py | 164 +++++++++++++++++- python/cm/api.py | 33 ++-- python/cm/services/concern/distribution.py | 143 ++++++++++++++- python/cm/services/concern/flags.py | 9 +- 6 files changed, 347 insertions(+), 17 deletions(-) diff --git a/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml b/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml index 7fb3f692b3..6f0ae263d0 100644 --- a/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml +++ b/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml @@ -3,6 +3,9 @@ version: 3.4 allow_maintenance_mode: true + flag_autogeneration: + enable_outdated_config: true + # no import for service import: &import for_export: @@ -24,6 +27,8 @@ - type: service name: main version: 1 + flag_autogeneration: + enable_outdated_config: true config: *required_config @@ -33,8 +38,12 @@ single: constraint: [1] config: *required_config + flag_autogeneration: + enable_outdated_config: true free: actions: *actions + flag_autogeneration: + enable_outdated_config: true - type: service name: required diff --git a/python/api_v2/tests/bundles/provider_concerns/config.yaml b/python/api_v2/tests/bundles/provider_concerns/config.yaml index 6db1d64e7f..ef8926dc48 100644 --- a/python/api_v2/tests/bundles/provider_concerns/config.yaml +++ b/python/api_v2/tests/bundles/provider_concerns/config.yaml @@ -2,6 +2,9 @@ name: provider_with_concerns version: 12 + flag_autogeneration: + enable_outdated_config: true + config: &required_config - name: field type: integer @@ -18,5 +21,8 @@ name: hohoho version: 2 + flag_autogeneration: + enable_outdated_config: true + config: *required_config actions: *actions diff --git a/python/api_v2/tests/test_concerns.py b/python/api_v2/tests/test_concerns.py index 3428fd950d..40f828e151 100644 --- a/python/api_v2/tests/test_concerns.py +++ b/python/api_v2/tests/test_concerns.py @@ -13,6 +13,7 @@ from pathlib import Path from typing import Iterable +from cm.converters import orm_object_to_core_type from cm.models import ( Action, ADCMEntity, @@ -20,6 +21,7 @@ ClusterObject, ConcernCause, ConcernItem, + ConcernType, Host, JobLog, ObjectType, @@ -27,9 +29,12 @@ PrototypeImport, ServiceComponent, ) +from cm.services.concern.flags import BuiltInFlag, lower_flag from cm.services.concern.messages import ConcernMessage from cm.tests.mocks.task_runner import RunTaskMock from core.cluster.types import ObjectMaintenanceModeState as MM # noqa: N814 +from core.types import ADCMCoreType, CoreObjectDescriptor +from django.db.models import Q from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED @@ -391,9 +396,28 @@ def setUp(self) -> None: def repr_concerns(self, concerns: Iterable[ConcernItem]) -> str: return "\n".join( f" {i}. {rec}" - for i, rec in enumerate(sorted(f"{concern.cause} from {concern.owner}" for concern in concerns), start=1) + for i, rec in enumerate( + sorted(f"{concern.type} | {concern.cause} from {concern.owner}" for concern in concerns), start=1 + ) + ) + + def get_config_issues_of(self, *objects: ADCMEntity) -> tuple[ConcernItem, ...]: + return ConcernItem.objects.filter( + self.prepare_objects_filter(*objects), type=ConcernType.ISSUE, cause=ConcernCause.CONFIG + ) + + def get_config_flags_of(self, *objects: ADCMEntity) -> tuple[ConcernItem, ...]: + return ConcernItem.objects.filter( + self.prepare_objects_filter(*objects), type=ConcernType.FLAG, cause=ConcernCause.CONFIG ) + def prepare_objects_filter(self, *objects: ADCMEntity): + object_filter = Q() + for object_ in objects: + object_filter |= Q(owner_id=object_.id, owner_type=object_.content_type) + + return object_filter + def check_concerns(self, object_: ADCMEntity, concerns: Iterable[ConcernItem]) -> None: expected_concerns = tuple(concerns) object_concerns = tuple(object_.concerns.all()) @@ -435,6 +459,12 @@ def change_mm_via_api(self, mm_value: MM, *objects: ClusterObject | ServiceCompo HTTP_200_OK, ) + def change_config_via_api(self, object_: ADCMEntity) -> None: + self.assertEqual( + self.client.v2[object_, "configs"].post(data={"config": {"field": 1}, "adcmMeta": {}}).status_code, + HTTP_201_CREATED, + ) + def test_concerns_swap_on_mapping_changes(self) -> None: # prepare host_1, host_2, unmapped_host = ( @@ -442,6 +472,10 @@ def test_concerns_swap_on_mapping_changes(self) -> None: ) unbound_host = self.add_host(self.provider, fqdn="free-host") self.change_configuration(host_2, config_diff={"field": 4}) + lower_flag( + BuiltInFlag.ADCM_OUTDATED_CONFIG.value.name, + on_objects=[CoreObjectDescriptor(id=host_2.id, type=ADCMCoreType.HOST)], + ) main_s = self.add_services_to_cluster(["main"], cluster=self.cluster).get() single_c = main_s.servicecomponent_set.get(prototype__name="single") @@ -598,6 +632,8 @@ def test_concerns_distribution_mm(self) -> None: for object_ in (host_no_concerns, host_2): self.change_configuration(object_, config_diff={"field": 1}) + object_desc = CoreObjectDescriptor(id=object_.id, type=orm_object_to_core_type(object_)) + lower_flag(BuiltInFlag.ADCM_OUTDATED_CONFIG.value.name, on_objects=[object_desc]) main_s, no_components_s = ( self.add_services_to_cluster(["main", "no_components"], cluster=self.cluster) @@ -814,3 +850,129 @@ def test_concerns_distribution_mm(self) -> None: ) self.check_concerns_of_control_objects() + + def test_concern_removal_with_flag_autogeneration_on_config_change(self) -> None: + # prepare + host_1 = self.add_host(self.provider, fqdn="host-1", cluster=self.cluster) + host_2 = self.add_host(self.provider, fqdn="host-2", cluster=self.cluster) + unmapped_host = self.add_host(self.provider, fqdn="unmapped-host", cluster=self.cluster) + another_provider = self.add_provider(bundle=self.provider.prototype.bundle, name="No Concerns HP") + another_host = self.add_host(provider=another_provider, fqdn="no-concerns-host", cluster=self.cluster) + + main_s = self.add_services_to_cluster(["main"], cluster=self.cluster).get() + no_components_s = self.add_services_to_cluster(["no_components"], cluster=self.cluster).get() + single_c = main_s.servicecomponent_set.get(prototype__name="single") + free_c = main_s.servicecomponent_set.get(prototype__name="free") + + self.set_hostcomponent( + cluster=self.cluster, + entries=((host_1, single_c), (host_1, free_c), (host_2, free_c), (another_host, free_c)), + ) + self.change_mm_via_api(MM.ON, host_2, single_c) + + # find own concerns + expected = {} + + no_components_s_own_con = no_components_s.get_own_issue(ConcernCause.IMPORT) + expected["main_s_own_con"] = main_s.get_own_issue(ConcernCause.CONFIG) + expected["cluster_own_cons"] = tuple( + ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) + ) + expected["single_c_con"] = single_c.get_own_issue(ConcernCause.CONFIG) + + def check_concerns(): + mapped_hosts_concerns = (*expected["host_1_concerns"], *expected["another_host_concerns"]) + self.check_concerns( + self.cluster, + concerns=( + *expected["cluster_own_cons"], + expected["main_s_own_con"], + no_components_s_own_con, + *mapped_hosts_concerns, + ), + ) + self.check_concerns(no_components_s, concerns=(*expected["cluster_own_cons"], no_components_s_own_con)) + self.check_concerns( + main_s, concerns=(*expected["cluster_own_cons"], expected["main_s_own_con"], *mapped_hosts_concerns) + ) + self.check_concerns( + free_c, concerns=(*expected["cluster_own_cons"], expected["main_s_own_con"], *mapped_hosts_concerns) + ) + self.check_concerns( + single_c, + concerns=( + *expected["cluster_own_cons"], + expected["main_s_own_con"], + expected["single_c_con"], + *expected["host_1_concerns"], + ), + ) + + self.check_concerns( + host_1, + concerns=(*expected["cluster_own_cons"], expected["main_s_own_con"], *expected["host_1_concerns"]), + ) + self.check_concerns( + host_2, + concerns=( + *expected["cluster_own_cons"], + expected["main_s_own_con"], + *self.get_config_issues_of(host_2, self.provider), + ), + ) + self.check_concerns( + another_host, + concerns=( + *expected["cluster_own_cons"], + expected["main_s_own_con"], + *expected["another_host_concerns"], + ), + ) + self.check_concerns(unmapped_host, concerns=self.get_config_issues_of(unmapped_host, self.provider)) + self.check_concerns(self.provider, concerns=self.get_config_issues_of(self.provider)) + self.check_concerns(another_provider, concerns=self.get_config_flags_of(another_provider)) + + self.check_concerns_of_control_objects() + + # test + self.change_config_via_api(another_provider) + + expected["host_1_concerns"] = self.get_config_issues_of(host_1, self.provider) + expected["another_host_concerns"] = ( + *self.get_config_issues_of(another_host), + *self.get_config_flags_of(another_provider), + ) + + with self.subTest("Change HostProvider Config"): + check_concerns() + + self.change_config_via_api(host_1) + + expected["host_1_concerns"] = (*self.get_config_issues_of(self.provider), *self.get_config_flags_of(host_1)) + expected["another_host_concerns"] = ( + *self.get_config_issues_of(another_host), + *self.get_config_flags_of(another_provider), + ) + + with self.subTest("Change Host Config"): + check_concerns() + + self.change_config_via_api(single_c) + expected["single_c_con"] = self.get_config_flags_of(single_c)[0] + + with self.subTest("Change Component in MM Config"): + check_concerns() + + self.change_config_via_api(self.cluster) + expected["cluster_own_cons"] = tuple( + ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) + ) + + with self.subTest("Change Cluster Config"): + check_concerns() + + self.change_config_via_api(main_s) + expected["main_s_own_con"] = self.get_config_flags_of(main_s)[0] + + with self.subTest("Change Service Config"): + check_concerns() diff --git a/python/cm/api.py b/python/cm/api.py index 8747bab8bb..2a665fd1c1 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -32,7 +32,7 @@ ) from cm.adcm_config.utils import proto_ref from cm.api_context import CTX -from cm.converters import orm_object_to_core_type +from cm.converters import orm_object_to_action_target_type, orm_object_to_core_type from cm.errors import AdcmEx, raise_adcm_ex from cm.issue import ( add_concern_to_object, @@ -52,6 +52,7 @@ Cluster, ClusterBind, ClusterObject, + ConcernCause, ConcernItem, ConcernType, ConfigLog, @@ -68,7 +69,9 @@ ServiceComponent, TaskLog, ) -from cm.services.concern.flags import BuiltInFlag, raise_flag, update_hierarchy +from cm.services.concern import delete_issue +from cm.services.concern.distribution import distribute_concern_on_related_objects +from cm.services.concern.flags import BuiltInFlag, raise_flag from cm.services.status.notify import reset_hc_map, reset_objects_in_mm from cm.status_api import ( send_config_creation_event, @@ -398,7 +401,11 @@ def update_obj_config(obj_conf: ObjectConfig, config: dict, attr: dict, descript with atomic(): config_log = save_object_config(object_config=obj_conf, config=new_conf, attr=attr, description=description) - update_hierarchy_issues(obj=obj) + + delete_issue( + owner=CoreObjectDescriptor(id=obj.id, type=orm_object_to_action_target_type(object_=obj)), + cause=ConcernCause.CONFIG, + ) # flag on ADCM can't be raised (only objects of `ADCMCoreType` are supported) if not isinstance(obj, ADCM): raise_outdated_config_flag_if_required(object_=obj) @@ -414,15 +421,22 @@ def raise_outdated_config_flag_if_required(object_: MainObject): return flag = BuiltInFlag.ADCM_OUTDATED_CONFIG.value - flag_exists = object_.concerns.filter(name=flag.name, type=ConcernType.FLAG).exists() + flag_exists = object_.concerns.filter( + name=flag.name, type=ConcernType.FLAG, owner_id=object_.id, owner_type=object_.content_type + ).exists() # raise unconditionally here, because message should be from "default" flag - raise_flag(flag=flag, on_objects=[CoreObjectDescriptor(id=object_.id, type=orm_object_to_core_type(object_))]) + owner = CoreObjectDescriptor(id=object_.id, type=orm_object_to_core_type(object_)) + raise_flag(flag=flag, on_objects=[owner]) if not flag_exists: - update_hierarchy( - concern=ConcernItem.objects.get( - name=flag.name, type=ConcernType.FLAG, owner_id=object_.id, owner_type=object_.content_type - ) + concern_id = ConcernItem.objects.values_list("id", flat=True).get( + name=flag.name, type=ConcernType.FLAG, owner_id=object_.id, owner_type=object_.content_type ) + distribute_concern_on_related_objects(owner=owner, concern_id=concern_id) + # update_hierarchy( + # concern=ConcernItem.objects.get( + # name=flag.name, type=ConcernType.FLAG, owner_id=object_.id, owner_type=object_.content_type + # ) + # ) def set_object_config_with_plugin(obj: ADCMEntity, config: dict, attr: dict) -> ConfigLog: @@ -432,7 +446,6 @@ def set_object_config_with_plugin(obj: ADCMEntity, config: dict, attr: dict) -> config_log = save_object_config( object_config=obj.config, config=new_conf, attr=attr, description="ansible update" ) - update_hierarchy_issues(obj=obj) apply_policy_for_new_config(config_object=obj, config_log=config_log) return config_log diff --git a/python/cm/services/concern/distribution.py b/python/cm/services/concern/distribution.py index 9b9fe8d4d7..bb20d48a24 100644 --- a/python/cm/services/concern/distribution.py +++ b/python/cm/services/concern/distribution.py @@ -13,11 +13,26 @@ from collections import defaultdict, deque from copy import copy from itertools import chain +from operator import itemgetter from typing import Iterable, TypeAlias -from core.cluster.operations import calculate_maintenance_mode_for_cluster_objects +from core.cluster.operations import ( + calculate_maintenance_mode_for_cluster_objects, + calculate_maintenance_mode_for_component, + calculate_maintenance_mode_for_service, +) from core.cluster.types import ClusterTopology, ObjectMaintenanceModeState -from core.types import ADCMCoreType, ClusterID, ComponentID, ConcernID, HostID, HostProviderID, ObjectID, ServiceID +from core.types import ( + ADCMCoreType, + ClusterID, + ComponentID, + ConcernID, + CoreObjectDescriptor, + HostID, + HostProviderID, + ObjectID, + ServiceID, +) from django.contrib.contenttypes.models import ContentType from django.db.models import Q @@ -28,12 +43,13 @@ ConcernItem, ConcernType, Host, + HostComponent, HostProvider, ServiceComponent, ) from cm.services.cluster import retrieve_clusters_objects_maintenance_mode -# PUBLIC redistribute_concerns_on_mapping_change +# PUBLIC redistribute_issues_and_flags TopologyObjectMap: TypeAlias = dict[ADCMCoreType, tuple[ObjectID, ...]] @@ -249,6 +265,127 @@ def _relink_concerns_to_objects_in_db( ) +# PUBLIC distribute_concern_on_related_objects + +ConcernRelatedObjects: TypeAlias = dict[ADCMCoreType, set[ObjectID]] + + +def distribute_concern_on_related_objects(owner: CoreObjectDescriptor, concern_id: ConcernID): + distribution_targets = _find_concern_distribution_targets(owner=owner) + _add_concern_links_to_objects_in_db(targets=distribution_targets, concern_id=concern_id) + + +def _find_concern_distribution_targets(owner: CoreObjectDescriptor) -> ConcernRelatedObjects: + """ + Find objects that should be affected by appeared concern on given objects considering HC and MM. + """ + targets: ConcernRelatedObjects = defaultdict(set) + + targets[owner.type].add(owner.id) + + match owner.type: + case ADCMCoreType.CLUSTER: + targets[ADCMCoreType.SERVICE] |= set( + ClusterObject.objects.values_list("id", flat=True).filter(cluster_id=owner.id) + ) + targets[ADCMCoreType.COMPONENT] |= set( + ServiceComponent.objects.values_list("id", flat=True).filter(cluster_id=owner.id) + ) + targets[ADCMCoreType.HOST] |= set( + HostComponent.objects.values_list("host_id", flat=True).filter(cluster_id=owner.id) + ) + + case ADCMCoreType.SERVICE: + hosts_info = HostComponent.objects.values_list("host_id", "host__maintenance_mode").filter( + service_id=owner.id + ) + components_info = ServiceComponent.objects.values_list("id", "_maintenance_mode").filter( + service_id=owner.id + ) + + raw_own_mm, cluster_id = ClusterObject.objects.values_list("_maintenance_mode", "cluster_id").get( + id=owner.id + ) + + own_mm = calculate_maintenance_mode_for_service( + own_mm=ObjectMaintenanceModeState(raw_own_mm), + service_components_own_mm=( + ObjectMaintenanceModeState(component_mm) for _, component_mm in components_info + ), + service_hosts_mm=(ObjectMaintenanceModeState(host_mm) for _, host_mm in hosts_info), + ) + + if own_mm == ObjectMaintenanceModeState.OFF: + targets[ADCMCoreType.CLUSTER].add(cluster_id) + targets[ADCMCoreType.COMPONENT].update(map(itemgetter(0), components_info)) + targets[ADCMCoreType.HOST].update(map(itemgetter(0), hosts_info)) + + case ADCMCoreType.COMPONENT: + raw_own_mm, cluster_id, service_id, service_raw_own_mm = ServiceComponent.objects.values_list( + "_maintenance_mode", "cluster_id", "service_id", "service___maintenance_mode" + ).get(id=owner.id) + + hosts_info = HostComponent.objects.values_list("host_id", "host__maintenance_mode").filter( + component_id=owner.id + ) + + own_mm = calculate_maintenance_mode_for_component( + own_mm=ObjectMaintenanceModeState(raw_own_mm), + service_mm=ObjectMaintenanceModeState(service_raw_own_mm), + component_hosts_mm=(ObjectMaintenanceModeState(host_mm) for _, host_mm in hosts_info), + ) + + if own_mm == ObjectMaintenanceModeState.OFF: + targets[ADCMCoreType.CLUSTER].add(cluster_id) + targets[ADCMCoreType.SERVICE].add(service_id) + targets[ADCMCoreType.HOST].update(map(itemgetter(0), hosts_info)) + + case ADCMCoreType.HOST: + own_mm = ObjectMaintenanceModeState( + Host.objects.values_list("maintenance_mode", flat=True).get(id=owner.id) + ) + + if own_mm == ObjectMaintenanceModeState.OFF: + hc_records = tuple( + HostComponent.objects.values("cluster_id", "service_id", "component_id").filter(host_id=owner.id) + ) + if hc_records: + targets[ADCMCoreType.CLUSTER].add(hc_records[0]["cluster_id"]) + targets[ADCMCoreType.SERVICE].update(map(itemgetter("service_id"), hc_records)) + targets[ADCMCoreType.COMPONENT].update(map(itemgetter("component_id"), hc_records)) + + case ADCMCoreType.HOSTPROVIDER: + targets[ADCMCoreType.HOST] |= set(Host.objects.values_list("id", flat=True).filter(provider_id=owner.id)) + + hc_records = tuple( + HostComponent.objects.values("cluster_id", "service_id", "component_id").filter( + host_id__in=targets.get(ADCMCoreType.HOST, ()) + ) + ) + if hc_records: + targets[ADCMCoreType.CLUSTER].add(hc_records[0]["cluster_id"]) + targets[ADCMCoreType.SERVICE].update(map(itemgetter("service_id"), hc_records)) + targets[ADCMCoreType.COMPONENT].update(map(itemgetter("component_id"), hc_records)) + + case _: + message = f"Direct concerns distribution isn't implemented for {owner.type}" + raise NotImplementedError(message) + + return targets + + +def _add_concern_links_to_objects_in_db(targets: ConcernRelatedObjects, concern_id: ConcernID) -> None: + for core_type, ids in targets.items(): + orm_model = core_type_to_model(core_type) + id_field = f"{orm_model.__name__.lower()}_id" + m2m_model = orm_model.concerns.through + + m2m_model.objects.bulk_create( + objs=(m2m_model(concernitem_id=concern_id, **{id_field: object_id}) for object_id in ids), + ignore_conflicts=True, + ) + + # PROTECTED generic-purpose methods diff --git a/python/cm/services/concern/flags.py b/python/cm/services/concern/flags.py index e41246443f..77b1240dfa 100644 --- a/python/cm/services/concern/flags.py +++ b/python/cm/services/concern/flags.py @@ -22,10 +22,11 @@ from django.contrib.contenttypes.models import ContentType from django.db.models import Q -from cm.converters import core_type_to_model +from cm.converters import core_type_to_model, model_name_to_core_type from cm.hierarchy import Tree from cm.issue import add_concern_to_object, remove_concern_from_object from cm.models import ADCMEntity, ConcernCause, ConcernItem, ConcernType +from cm.services.concern.distribution import distribute_concern_on_related_objects from cm.services.concern.messages import ( ADCM_ENTITY_AS_PLACEHOLDERS, ConcernMessage, @@ -120,13 +121,15 @@ def lower_all_flags(on_objects: Collection[CoreObjectDescriptor]) -> bool: def update_hierarchy_for_flag(flag: ConcernFlag, on_objects: Collection[CoreObjectDescriptor]) -> None: - for concern in ConcernItem.objects.filter( + for concern in ConcernItem.objects.select_related("owner_type").filter( Q(name=flag.name, cause=flag.cause, type=ConcernType.FLAG) & _get_filter_for_flags_of_objects( content_type_id_map=_get_owner_ids_grouped_by_content_type(objects=on_objects) ) ): - update_hierarchy(concern) + owner = CoreObjectDescriptor(id=concern.owner_id, type=model_name_to_core_type(concern.owner_type.model)) + distribute_concern_on_related_objects(owner=owner, concern_id=concern.id) + # update_hierarchy(concern) def update_hierarchy(concern: ConcernItem) -> None: From 55ed1f4ca6b8681589fe2b522d353ec9adbf475a Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Mon, 15 Jul 2024 07:00:42 +0000 Subject: [PATCH 21/98] ADCM-5782 Change concerns distribution approach for imports --- .../bundles/cluster_all_concerns/config.yaml | 21 +++++- python/api_v2/tests/test_concerns.py | 75 ++++++++++++++++++- python/cm/api.py | 9 ++- python/cm/issue.py | 30 ++++---- python/cm/tests/test_issue.py | 14 ++-- 5 files changed, 119 insertions(+), 30 deletions(-) diff --git a/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml b/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml index 6f0ae263d0..bf1548e435 100644 --- a/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml +++ b/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml @@ -8,7 +8,7 @@ # no import for service import: &import - for_export: + cluster_export: multibind: false required: true @@ -76,3 +76,22 @@ version: 5 import: *import + +- type: service + name: with_multiple_imports + version: 2 + + + import: + cluster_export: + multibind: false + required: true + + service_export: + multibind: true + required: true + + components: + component_1: + config: *required_config + component_2: diff --git a/python/api_v2/tests/test_concerns.py b/python/api_v2/tests/test_concerns.py index 40f828e151..78ad406341 100644 --- a/python/api_v2/tests/test_concerns.py +++ b/python/api_v2/tests/test_concerns.py @@ -366,14 +366,14 @@ class TestConcernRedistribution(BaseAPITestCase): def setUp(self) -> None: super().setUp() - bundles_dir = Path(__file__).parent / "bundles" + self.bundles_dir = Path(__file__).parent / "bundles" self.cluster = self.add_cluster( - bundle=self.add_bundle(bundles_dir / "cluster_all_concerns"), name="With Concerns" + bundle=self.add_bundle(self.bundles_dir / "cluster_all_concerns"), name="With Concerns" ) self.provider = self.add_provider( - bundle=self.add_bundle(bundles_dir / "provider_concerns"), name="Concerned HP" + bundle=self.add_bundle(self.bundles_dir / "provider_concerns"), name="Concerned HP" ) self.control_cluster = self.add_cluster(bundle=self.cluster.prototype.bundle, name="Control Cluster") @@ -435,7 +435,10 @@ def check_concerns(self, object_: ADCMEntity, concerns: Iterable[ConcernItem]) - self.assertEqual(actual_amount, expected_amount, message) for concern in expected_concerns: - self.assertIn(concern, object_concerns) + if concern not in expected_concerns: + cur_concern = f"{concern.type} | {concern.cause} from {concern.owner}" + message = f"{cur_concern} not found in:\n{self.repr_concerns(concerns)}" + self.assertIn(concern, object_concerns, message) def check_concerns_of_control_objects(self) -> None: for object_, expected_concerns in self.control_concerns.items(): @@ -465,6 +468,12 @@ def change_config_via_api(self, object_: ADCMEntity) -> None: HTTP_201_CREATED, ) + def change_imports_via_api(self, target: Cluster | ClusterObject, imports: list[dict]) -> None: + self.assertEqual( + self.client.v2[target, "imports"].post(data=imports).status_code, + HTTP_201_CREATED, + ) + def test_concerns_swap_on_mapping_changes(self) -> None: # prepare host_1, host_2, unmapped_host = ( @@ -976,3 +985,61 @@ def check_concerns(): with self.subTest("Change Service Config"): check_concerns() + + def test_concerns_changes_on_import(self) -> None: + # prepare + host_1 = self.add_host(self.provider, fqdn="host-1", cluster=self.cluster) + host_2 = self.add_host(self.provider, fqdn="host-2", cluster=self.cluster) + + import_s = self.add_services_to_cluster(["with_multiple_imports"], cluster=self.cluster).get() + component_1, component_2 = import_s.servicecomponent_set.order_by("prototype__name") + + self.set_hostcomponent( + cluster=self.cluster, + entries=((host_1, component_2),), + ) + + export_cluster = self.add_cluster(self.add_bundle(self.bundles_dir / "cluster_export"), "Exporter") + export_service = self.add_services_to_cluster(["service_export"], cluster=export_cluster).get() + + # find own concerns + provider_config_con = self.provider.get_own_issue(ConcernCause.CONFIG) + host_1_cons = (provider_config_con, host_1.get_own_issue(ConcernCause.CONFIG)) + host_2_cons = (provider_config_con, host_2.get_own_issue(ConcernCause.CONFIG)) + cluster_own_cons = tuple( + ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) + ) + import_s_con = import_s.get_own_issue(ConcernCause.IMPORT) + component_1_con = component_1.get_own_issue(ConcernCause.CONFIG) + + # test + + self.change_imports_via_api( + import_s, + imports=[ + {"source": {"type": "service", "id": export_service.id}}, + {"source": {"type": "cluster", "id": export_cluster.id}}, + ], + ) + + with self.subTest("Set All Imports On Service"): + self.check_concerns(self.cluster, concerns=(*cluster_own_cons, component_1_con, *host_1_cons)) + self.check_concerns(import_s, concerns=(*cluster_own_cons, component_1_con, *host_1_cons)) + self.check_concerns(component_1, concerns=(*cluster_own_cons, component_1_con)) + self.check_concerns(component_2, concerns=(*cluster_own_cons, *host_1_cons)) + self.check_concerns(host_1, concerns=(*host_1_cons, *cluster_own_cons)) + self.check_concerns(host_2, concerns=host_2_cons) + + self.check_concerns_of_control_objects() + + self.change_imports_via_api(import_s, imports=[{"source": {"type": "service", "id": export_service.id}}]) + + with self.subTest("Set 1/2 Required Imports On Service"): + self.check_concerns(self.cluster, concerns=(*cluster_own_cons, import_s_con, component_1_con, *host_1_cons)) + self.check_concerns(import_s, concerns=(*cluster_own_cons, import_s_con, component_1_con, *host_1_cons)) + self.check_concerns(component_1, concerns=(*cluster_own_cons, import_s_con, component_1_con)) + self.check_concerns(component_2, concerns=(*cluster_own_cons, import_s_con, *host_1_cons)) + self.check_concerns(host_1, concerns=(*host_1_cons, import_s_con, *cluster_own_cons)) + self.check_concerns(host_2, concerns=host_2_cons) + + self.check_concerns_of_control_objects() diff --git a/python/cm/api.py b/python/cm/api.py index 2a665fd1c1..6e54926baf 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -39,7 +39,9 @@ check_bound_components, check_component_constraint, check_hc_requires, + check_required_import, check_service_requires, + create_issue, remove_concern_from_object, update_hierarchy_issues, update_issue_after_deleting, @@ -852,7 +854,12 @@ def multi_bind(cluster: Cluster, service: ClusterObject | None, bind_list: list[ cluster_bind.save() logger.info("bind %s to %s", obj_ref(obj=export_obj), obj_ref(obj=import_obj)) - update_hierarchy_issues(obj=cluster) + import_target = CoreObjectDescriptor(id=import_obj.id, type=orm_object_to_core_type(import_obj)) + if check_required_import(obj=import_obj): + delete_issue(owner=import_target, cause=ConcernCause.IMPORT) + elif not import_obj.get_own_issue(ConcernCause.IMPORT): + concern = create_issue(obj=import_obj, issue_cause=ConcernCause.IMPORT) + distribute_concern_on_related_objects(owner=import_target, concern_id=concern.id) return get_import(cluster=cluster, service=service) diff --git a/python/cm/issue.py b/python/cm/issue.py index 65e516fffc..8707c9fe93 100755 --- a/python/cm/issue.py +++ b/python/cm/issue.py @@ -97,8 +97,7 @@ def check_required_import(obj: [Cluster, ClusterObject]) -> bool: else: raise AdcmEx(code="ISSUE_INTEGRITY_ERROR", msg=f"Could not check import for {obj}") - res, _ = do_check_import(cluster=cluster, service=service) - return res + return do_check_import(cluster=cluster, service=service) def check_service_requires(cluster: Cluster, proto: Prototype) -> None: @@ -135,8 +134,7 @@ def check_requires(service: ClusterObject) -> bool: return True -def do_check_import(cluster: Cluster, service: ClusterObject | None = None) -> tuple[bool, str | None]: - import_exist = (True, None) +def do_check_import(cluster: Cluster, service: ClusterObject | None = None) -> bool: proto = cluster.prototype if service: @@ -144,24 +142,22 @@ def do_check_import(cluster: Cluster, service: ClusterObject | None = None) -> t prototype_imports = PrototypeImport.objects.filter(prototype=proto) if not prototype_imports.exists(): - return import_exist + return True if not any(prototype_imports.values_list("required", flat=True)): - return True, "NOT_REQUIRED" + return True - for prototype_import in prototype_imports.filter(required=True): - import_exist = (False, None) - for cluster_bind in ClusterBind.objects.filter(cluster=cluster): - if cluster_bind.source_cluster and cluster_bind.source_cluster.prototype.name == prototype_import.name: - import_exist = (True, "CLUSTER_IMPORTED") + required_import_names = set(prototype_imports.values_list("name", flat=True).filter(required=True)) - if cluster_bind.source_service and cluster_bind.source_service.prototype.name == prototype_import.name: - import_exist = (True, "SERVICE_IMPORTED") + for cluster_name, service_name in ClusterBind.objects.values_list( + "source_cluster__prototype__name", "source_service__prototype__name" + ).filter(cluster=cluster, service=service): + if service_name: + required_import_names -= {service_name} + elif cluster_name: + required_import_names -= {cluster_name} - if not import_exist[0]: - break - - return import_exist + return required_import_names == set() def check_hc(cluster: Cluster) -> bool: diff --git a/python/cm/tests/test_issue.py b/python/cm/tests/test_issue.py index e5efb59c6f..cb0d2c621c 100644 --- a/python/cm/tests/test_issue.py +++ b/python/cm/tests/test_issue.py @@ -200,19 +200,19 @@ def cook_cluster(proto_name, cluster_name): def test_no_import(self): _, _, cluster = self.cook_cluster("Hadoop", "Cluster1") - self.assertEqual(do_check_import(cluster), (True, None)) + self.assertTrue(do_check_import(cluster)) def test_import_required(self): _, proto1, cluster = self.cook_cluster("Hadoop", "Cluster1") PrototypeImport.objects.create(prototype=proto1, name="Monitoring", required=True) - self.assertEqual(do_check_import(cluster), (False, None)) + self.assertFalse(do_check_import(cluster)) def test_import_not_required(self): _, proto1, cluster = self.cook_cluster("Hadoop", "Cluster1") PrototypeImport.objects.create(prototype=proto1, name="Monitoring", required=False) - self.assertEqual(do_check_import(cluster), (True, "NOT_REQUIRED")) + self.assertTrue(do_check_import(cluster)) def test_cluster_imported(self): _, proto1, cluster1 = self.cook_cluster("Hadoop", "Cluster1") @@ -221,7 +221,7 @@ def test_cluster_imported(self): _, _, cluster2 = self.cook_cluster("Monitoring", "Cluster2") ClusterBind.objects.create(cluster=cluster1, source_cluster=cluster2) - self.assertEqual(do_check_import(cluster1), (True, "CLUSTER_IMPORTED")) + self.assertTrue(do_check_import(cluster1)) def test_service_imported(self): _, proto1, cluster1 = self.cook_cluster("Hadoop", "Cluster1") @@ -232,7 +232,7 @@ def test_service_imported(self): service = add_service_to_cluster(cluster2, proto3) ClusterBind.objects.create(cluster=cluster1, source_cluster=cluster2, source_service=service) - self.assertEqual(do_check_import(cluster1), (True, "SERVICE_IMPORTED")) + self.assertTrue(do_check_import(cluster1)) def test_import_to_service(self): bundle_1, _, cluster1 = self.cook_cluster("Hadoop", "Cluster1") @@ -243,7 +243,7 @@ def test_import_to_service(self): _, _, cluster2 = self.cook_cluster("Monitoring", "Cluster2") ClusterBind.objects.create(cluster=cluster1, service=service, source_cluster=cluster2) - self.assertEqual(do_check_import(cluster1, service), (True, "CLUSTER_IMPORTED")) + self.assertTrue(do_check_import(cluster1, service)) def test_import_service_to_service(self): bundle_1, _, cluster1 = self.cook_cluster("Hadoop", "Cluster1") @@ -261,7 +261,7 @@ def test_import_service_to_service(self): source_service=service2, ) - self.assertEqual(do_check_import(cluster1, service1), (True, "SERVICE_IMPORTED")) + self.assertTrue(do_check_import(cluster1, service1)) def test_issue_cluster_required_import(self): _, proto1, cluster1 = self.cook_cluster("Hadoop", "Cluster1") From 32e2f71d7d67f4d4ba4959883501bf10158c9521 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Mon, 15 Jul 2024 09:35:28 +0000 Subject: [PATCH 22/98] ADCM-5790: fix audit of service/component mm views --- python/api_v2/component/views.py | 18 +++++++++++----- python/api_v2/service/views.py | 11 +++++++--- python/api_v2/utils/audit.py | 35 +++++++++++++++++++++++++++++--- 3 files changed, 53 insertions(+), 11 deletions(-) diff --git a/python/api_v2/component/views.py b/python/api_v2/component/views.py index d1dfe187da..efbaaeb5c3 100644 --- a/python/api_v2/component/views.py +++ b/python/api_v2/component/views.py @@ -22,7 +22,7 @@ get_object_for_user, ) from audit.alt.api import audit_update -from audit.alt.hooks import extract_current_from_response, extract_previous_from_object +from audit.alt.hooks import adjust_denied_on_404_result, extract_current_from_response, extract_previous_from_object from cm.errors import AdcmEx from cm.models import Cluster, ClusterObject, Host, ServiceComponent from cm.services.maintenance_mode import get_maintenance_mode_response @@ -75,7 +75,11 @@ audit_host_group_config_viewset, ) from api_v2.generic.group_config.views import GroupConfigViewSet, HostGroupConfigViewSet -from api_v2.utils.audit import component_from_lookup, parent_component_from_lookup +from api_v2.utils.audit import ( + component_from_lookup, + component_with_parents_specified_in_path_exists, + parent_component_from_lookup, +) from api_v2.views import ( ADCMGenericViewSet, ADCMReadOnlyModelViewSet, @@ -176,9 +180,13 @@ def get_serializer_class(self): return ComponentSerializer - @audit_update(name="Component updated", object_=component_from_lookup).track_changes( - before=extract_previous_from_object(model=ServiceComponent, maintenance_mode=F("_maintenance_mode")), - after=extract_current_from_response("maintenance_mode"), + @( + audit_update(name="Component updated", object_=component_from_lookup) + .attach_hooks(on_collect=adjust_denied_on_404_result(component_with_parents_specified_in_path_exists)) + .track_changes( + before=extract_previous_from_object(model=ServiceComponent, maintenance_mode=F("_maintenance_mode")), + after=extract_current_from_response("maintenance_mode"), + ) ) @update_mm_objects @action(methods=["post"], detail=True, url_path="maintenance-mode", permission_classes=[ChangeMMPermissions]) diff --git a/python/api_v2/service/views.py b/python/api_v2/service/views.py index 685afb413a..015e985b9b 100644 --- a/python/api_v2/service/views.py +++ b/python/api_v2/service/views.py @@ -98,6 +98,7 @@ parent_service_from_lookup, service_does_exist, service_from_lookup, + service_with_parents_specified_in_path_exists, set_service_name_from_object, set_service_names_from_request, ) @@ -244,9 +245,13 @@ def destroy(self, request: Request, *args, **kwargs): # noqa: ARG002 instance = self.get_object() return delete_service_from_api(service=instance) - @audit_update(name="Service updated", object_=service_from_lookup).track_changes( - before=extract_previous_from_object(model=ClusterObject, maintenance_mode=F("_maintenance_mode")), - after=extract_current_from_response("maintenance_mode"), + @( + audit_update(name="Service updated", object_=service_from_lookup) + .attach_hooks(on_collect=adjust_denied_on_404_result(service_with_parents_specified_in_path_exists)) + .track_changes( + before=extract_previous_from_object(model=ClusterObject, maintenance_mode=F("_maintenance_mode")), + after=extract_current_from_response("maintenance_mode"), + ) ) @update_mm_objects @action(methods=["post"], detail=True, url_path="maintenance-mode", permission_classes=[ChangeMMPermissions]) diff --git a/python/api_v2/utils/audit.py b/python/api_v2/utils/audit.py index e0370ac991..c036c43286 100644 --- a/python/api_v2/utils/audit.py +++ b/python/api_v2/utils/audit.py @@ -13,7 +13,7 @@ from contextlib import suppress from dataclasses import dataclass from functools import partial -from typing import Any +from typing import Any, TypeAlias import json from audit.alt.core import AuditedCallArguments, IDBasedAuditObjectCreator, OperationAuditContext, Result @@ -31,6 +31,10 @@ # object retrievers +ObjectField: TypeAlias = str +CallArgument: TypeAlias = str + + @dataclass(slots=True) class ExtractID: field: str @@ -326,13 +330,27 @@ def _retrieve_request_body(request: WSGIRequest) -> Any | None: return body -def object_does_exist(hook: AuditHook, model: type[Model], id_field: str = "pk") -> bool: +def object_does_exist( + hook: AuditHook, + model: type[Model], + id_field: str = "pk", + arg_model_field_map: dict[CallArgument, ObjectField] | None = None, +) -> bool: id_ = hook.call_arguments.get(id_field) if not id_: # it's quite a stretch, but I don't see an alternative way for a safe implementation here return False - return model.objects.filter(id=id_).exists() + lookup_kwargs = ( + { + object_field: hook.call_arguments.get(call_argument) + for call_argument, object_field in arg_model_field_map.items() + } + if arg_model_field_map + else {} + ) + + return model.objects.filter(id=id_, **lookup_kwargs).exists() def nested_host_does_exist(hook: AuditHook) -> bool: @@ -343,6 +361,17 @@ def service_does_exist(hook: AuditHook) -> bool: return object_does_exist(hook=hook, model=ClusterObject) +service_with_parents_specified_in_path_exists = partial( + object_does_exist, model=ClusterObject, arg_model_field_map={"cluster_pk": "cluster_id"} +) + +component_with_parents_specified_in_path_exists = partial( + object_does_exist, + model=ServiceComponent, + arg_model_field_map={"cluster_pk": "cluster_id", "service_pk": "service_id"}, +) + + def retrieve_user_password_groups(id_: int) -> dict: if (user := User.objects.filter(pk=id_).first()) is None: return {} From 44be4c33cc9d12d5c37494d8e80f5c5a7743bdaa Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Mon, 15 Jul 2024 09:36:10 +0000 Subject: [PATCH 23/98] ADCM-5763: Rework calculate concerns for adding service --- python/api_v2/service/utils.py | 16 ++++- .../bundles/service_add_concerns/config.yaml | 25 +++++++ python/api_v2/tests/test_concerns.py | 71 +++++++++++++++++++ python/cm/services/concern/cases.py | 67 +++++++++++++++++ 4 files changed, 176 insertions(+), 3 deletions(-) create mode 100644 python/api_v2/tests/bundles/service_add_concerns/config.yaml create mode 100644 python/cm/services/concern/cases.py diff --git a/python/api_v2/service/utils.py b/python/api_v2/service/utils.py index 976f70a1db..ff1ba6d4f2 100644 --- a/python/api_v2/service/utils.py +++ b/python/api_v2/service/utils.py @@ -14,7 +14,6 @@ from cm.adcm_config.config import get_prototype_config, process_file_type from cm.errors import AdcmEx -from cm.issue import update_hierarchy_issues from cm.models import ( ADCMEntity, Cluster, @@ -25,6 +24,9 @@ Prototype, ServiceComponent, ) +from cm.services.cluster import retrieve_clusters_topology +from cm.services.concern.cases import recalculate_own_concerns_on_add_services +from cm.services.concern.distribution import redistribute_issues_and_flags from cm.services.status.notify import reset_hc_map from django.db import connection, transaction from django.db.models import QuerySet @@ -37,7 +39,7 @@ def bulk_add_services_to_cluster(cluster: Cluster, prototypes: QuerySet[Prototyp services = ClusterObject.objects.filter(cluster=cluster, prototype__in=prototypes).select_related("prototype") bulk_init_config(objects=services) - service_proto_service_map = {serv.prototype.pk: serv for serv in services} + service_proto_service_map = {service.prototype.pk: service for service in services} ServiceComponent.objects.bulk_create( objs=[ ServiceComponent( @@ -51,7 +53,15 @@ def bulk_add_services_to_cluster(cluster: Cluster, prototypes: QuerySet[Prototyp components = ServiceComponent.objects.filter(cluster=cluster, service__in=services).select_related("prototype") bulk_init_config(objects=components) - update_hierarchy_issues(obj=cluster) + new_concerns = recalculate_own_concerns_on_add_services( + cluster=cluster, + services=services.prefetch_related( + "servicecomponent_set" + ).all(), # refresh values from db to update `config` field + ) + if new_concerns: # TODO: redistribute only new issues. See ADCM-5798 + redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((cluster.pk,)))) + re_apply_object_policy(apply_object=cluster) reset_hc_map() diff --git a/python/api_v2/tests/bundles/service_add_concerns/config.yaml b/python/api_v2/tests/bundles/service_add_concerns/config.yaml new file mode 100644 index 0000000000..4998926111 --- /dev/null +++ b/python/api_v2/tests/bundles/service_add_concerns/config.yaml @@ -0,0 +1,25 @@ +- type: cluster + name: cluster_with_many_concerns_on_add_services + version: &version '1.0' + edition: community + +- name: service_requires_service_with_many_issues_on_add + type: service + version: *version + requires: + - service: service_with_many_issues_on_add + +- name: service_with_many_issues_on_add + type: service + version: *version + required: true + components: + component_plus_constraint: + constraint: [ + ] + config: + - name: boolean + type: boolean + required: true + import: + some_cluster: + required: true diff --git a/python/api_v2/tests/test_concerns.py b/python/api_v2/tests/test_concerns.py index 40f828e151..452f9976e0 100644 --- a/python/api_v2/tests/test_concerns.py +++ b/python/api_v2/tests/test_concerns.py @@ -34,6 +34,7 @@ from cm.tests.mocks.task_runner import RunTaskMock from core.cluster.types import ObjectMaintenanceModeState as MM # noqa: N814 from core.types import ADCMCoreType, CoreObjectDescriptor +from django.contrib.contenttypes.models import ContentType from django.db.models import Q from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED @@ -229,9 +230,26 @@ def setUp(self) -> None: bundle_dir = self.test_bundles_dir / "hc_mapping_constraints" self.hc_mapping_constraints_bundle = self.add_bundle(source_dir=bundle_dir) + bundle_dir = self.test_bundles_dir / "service_add_concerns" + self.service_add_concerns_bundle = self.add_bundle(source_dir=bundle_dir) + bundle_dir = self.test_bundles_dir / "provider_no_config" self.provider_no_config_bundle = self.add_bundle(source_dir=bundle_dir) + def _check_concerns(self, object_: Cluster | ClusterObject | ServiceComponent, expected_concerns: list[dict]): + object_concerns = object_.concerns.all() + self.assertEqual(object_concerns.count(), len(expected_concerns)) + + for expected_concern in expected_concerns: + target_concern = object_concerns.filter( + owner_id=expected_concern["owner_id"], + owner_type=expected_concern["owner_type"], + cause=expected_concern["cause"], + name=expected_concern["name"], + type="issue", + ) + self.assertEqual(target_concern.count(), 1) + def test_import_concern_resolved_after_saving_import(self): import_cluster = self.add_cluster(bundle=self.required_import_bundle, name="required_import_cluster") export_cluster = self.cluster_1 @@ -361,6 +379,59 @@ def test_adcm_5677_hc_issue_on_link_host_to_cluster_with_plus_constraint(self): response: Response = self.client.v2[host_2].get() self.assertEqual(len(response.json()["concerns"]), 0) + def test_concerns_on_add_services(self): + cluster = self.add_cluster(bundle=self.service_add_concerns_bundle, name="service_add_concerns_cluster") + required_service_concern = { + "owner_id": cluster.pk, + "owner_type": ContentType.objects.get_for_model(cluster), + "cause": "service", + "name": "service_issue", + } + self._check_concerns(object_=cluster, expected_concerns=[required_service_concern]) + + service_1 = self.add_services_to_cluster( + service_names=["service_requires_service_with_many_issues_on_add"], cluster=cluster + ).get() + unsatisfied_requirements_concern = { + "owner_id": service_1.pk, + "owner_type": ContentType.objects.get_for_model(service_1), + "cause": "requirement", + "name": "requirement_issue", + } + self._check_concerns( + object_=cluster, expected_concerns=[required_service_concern, unsatisfied_requirements_concern] + ) + self._check_concerns( + object_=service_1, expected_concerns=[required_service_concern, unsatisfied_requirements_concern] + ) + + service_2 = self.add_services_to_cluster( + service_names=["service_with_many_issues_on_add"], cluster=cluster + ).get() + component = service_2.servicecomponent_set.get() + hc_concern = { + "owner_id": cluster.pk, + "owner_type": ContentType.objects.get_for_model(cluster), + "cause": "host-component", + "name": "host-component_issue", + } + config_concern = { + "owner_id": service_2.pk, + "owner_type": ContentType.objects.get_for_model(service_2), + "cause": "config", + "name": "config_issue", + } + import_concern = { + "owner_id": service_2.pk, + "owner_type": ContentType.objects.get_for_model(service_2), + "cause": "import", + "name": "import_issue", + } + self._check_concerns(object_=service_2, expected_concerns=[hc_concern, config_concern, import_concern]) + self._check_concerns(object_=component, expected_concerns=[hc_concern, config_concern, import_concern]) + self._check_concerns(object_=service_1, expected_concerns=[hc_concern]) + self._check_concerns(object_=cluster, expected_concerns=[hc_concern, config_concern, import_concern]) + class TestConcernRedistribution(BaseAPITestCase): def setUp(self) -> None: diff --git a/python/cm/services/concern/cases.py b/python/cm/services/concern/cases.py new file mode 100644 index 0000000000..653146ea4d --- /dev/null +++ b/python/cm/services/concern/cases.py @@ -0,0 +1,67 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import defaultdict + +from core.types import ADCMCoreType, CoreObjectDescriptor +from django.db.models import QuerySet + +from cm.issue import ( + check_config, + check_hc, + check_required_import, + check_required_services, + check_requires, + create_issue, +) +from cm.models import Cluster, ClusterObject, ConcernCause +from cm.services.concern import delete_issue +from cm.services.concern.distribution import OwnObjectConcernMap + + +def recalculate_own_concerns_on_add_services( + cluster: Cluster, services: QuerySet[ClusterObject] +) -> OwnObjectConcernMap: + new_concerns: OwnObjectConcernMap = defaultdict(lambda: defaultdict(set)) + + # create new concerns + if not check_hc(cluster=cluster) and cluster.get_own_issue(cause=ConcernCause.HOSTCOMPONENT) is None: + issue = create_issue(obj=cluster, issue_cause=ConcernCause.HOSTCOMPONENT) + new_concerns[ADCMCoreType.CLUSTER][cluster.pk].add(issue.pk) + + service_checks = ( + (ConcernCause.CONFIG, check_config), + (ConcernCause.IMPORT, check_required_import), + (ConcernCause.REQUIREMENT, check_requires), + ) + for service in services: + for concern_cause, func in service_checks: + if not func(service): + issue = create_issue(obj=service, issue_cause=concern_cause) + new_concerns[ADCMCoreType.SERVICE][service.pk].add(issue.pk) + + for component in service.servicecomponent_set.all(): + if not check_config(component): + issue = create_issue(obj=component, issue_cause=ConcernCause.CONFIG) + new_concerns[ADCMCoreType.COMPONENT][component.pk].add(issue.pk) + + # remove gone concerns + if check_required_services(cluster=cluster): + delete_issue(owner=CoreObjectDescriptor(type=ADCMCoreType.CLUSTER, id=cluster.pk), cause=ConcernCause.SERVICE) + + for service in cluster.clusterobject_set.exclude(pk__in=(service.pk for service in services)): + if check_requires(service=service): + delete_issue( + owner=CoreObjectDescriptor(type=ADCMCoreType.SERVICE, id=service.pk), cause=ConcernCause.REQUIREMENT + ) + + return new_concerns From cd40b55adeef5f28bc2f70a4e5235373d305520b Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Mon, 15 Jul 2024 12:20:11 +0000 Subject: [PATCH 24/98] ADCM-5776: Rework creation concerns for add cluster --- python/cm/api.py | 8 ++++++-- python/cm/services/concern/cases.py | 18 ++++++++++++++++++ 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/python/cm/api.py b/python/cm/api.py index 6e54926baf..5e620395c2 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -71,8 +71,10 @@ ServiceComponent, TaskLog, ) +from cm.services.cluster import retrieve_clusters_topology from cm.services.concern import delete_issue -from cm.services.concern.distribution import distribute_concern_on_related_objects +from cm.services.concern.cases import recalculate_own_concerns_on_add_clusters +from cm.services.concern.distribution import distribute_concern_on_related_objects, redistribute_issues_and_flags from cm.services.concern.flags import BuiltInFlag, raise_flag from cm.services.status.notify import reset_hc_map, reset_objects_in_mm from cm.status_api import ( @@ -134,7 +136,9 @@ def add_cluster(prototype: Prototype, name: str, description: str = "") -> Clust object_type=ContentType.objects.get_for_model(Cluster), ) - update_hierarchy_issues(cluster) + # update_hierarchy_issues(cluster) + if recalculate_own_concerns_on_add_clusters(cluster): # TODO: redistribute only new issues. See ADCM-5798 + redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((cluster.pk,)))) reset_hc_map() diff --git a/python/cm/services/concern/cases.py b/python/cm/services/concern/cases.py index 653146ea4d..3c26b3eead 100644 --- a/python/cm/services/concern/cases.py +++ b/python/cm/services/concern/cases.py @@ -28,6 +28,24 @@ from cm.services.concern.distribution import OwnObjectConcernMap +def recalculate_own_concerns_on_add_clusters(cluster: Cluster) -> OwnObjectConcernMap: + new_concerns: OwnObjectConcernMap = defaultdict(lambda: defaultdict(set)) + + cluster_checks = ( + (ConcernCause.CONFIG, check_config), + (ConcernCause.IMPORT, check_required_import), + (ConcernCause.HOSTCOMPONENT, check_hc), + (ConcernCause.SERVICE, check_required_services), + ) + + for cause, check in cluster_checks: + if not check(cluster): + issue = create_issue(obj=cluster, issue_cause=cause) + new_concerns[ADCMCoreType.CLUSTER][cluster.pk].add(issue.pk) + + return new_concerns + + def recalculate_own_concerns_on_add_services( cluster: Cluster, services: QuerySet[ClusterObject] ) -> OwnObjectConcernMap: From c7fbb96451b57d3082d7f480e31fbea015ed1a78 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Mon, 15 Jul 2024 13:05:38 +0000 Subject: [PATCH 25/98] ADCM-5786 Change concerns distribution approach in adcm_hc plugin --- python/api/tests/test_api.py | 4 +--- python/api_v2/cluster/utils.py | 5 ----- python/cm/api.py | 16 +++++++++++----- 3 files changed, 12 insertions(+), 13 deletions(-) diff --git a/python/api/tests/test_api.py b/python/api/tests/test_api.py index 78edfcee0f..fd6d413dfb 100755 --- a/python/api/tests/test_api.py +++ b/python/api/tests/test_api.py @@ -871,8 +871,7 @@ def setUp(self): ) @patch("cm.api.reset_hc_map") - @patch("cm.api.update_hierarchy_issues") - def test_save_hc(self, mock_update_issues, mock_reset_hc_map): + def test_save_hc(self, mock_reset_hc_map): cluster_object = ClusterObject.objects.create(prototype=self.prototype, cluster=self.cluster) host = Host.objects.create(prototype=self.prototype, cluster=self.cluster) component = Prototype.objects.create( @@ -899,7 +898,6 @@ def test_save_hc(self, mock_update_issues, mock_reset_hc_map): self.assertListEqual(hc_list, [HostComponent.objects.first()]) - mock_update_issues.assert_called() mock_reset_hc_map.assert_called_once() @patch("cm.api.CTX") diff --git a/python/api_v2/cluster/utils.py b/python/api_v2/cluster/utils.py index da6f0fce55..b300fadcfc 100644 --- a/python/api_v2/cluster/utils.py +++ b/python/api_v2/cluster/utils.py @@ -289,11 +289,6 @@ def _save_mapping(mapping_data: MappingData) -> QuerySet[HostComponent]: ) redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((mapping_data.cluster.id,)))) - # update_hierarchy_issues(obj=mapping_data.orm_objects["cluster"]) - # for provider_id in {host.provider_id for host in mapping_data.hosts.values()}: - # update_hierarchy_issues(obj=mapping_data.orm_objects["providers"][provider_id]) - # update_issue_after_deleting() - _handle_mapping_policies(mapping_data=mapping_data) send_host_component_map_update_event(cluster=mapping_data.orm_objects["cluster"]) diff --git a/python/cm/api.py b/python/cm/api.py index 5e620395c2..ced521b348 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -16,7 +16,7 @@ import json from adcm_version import compare_prototype_versions -from core.types import CoreObjectDescriptor +from core.types import ADCMCoreType, CoreObjectDescriptor from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.core.exceptions import MultipleObjectsReturned @@ -46,6 +46,7 @@ update_hierarchy_issues, update_issue_after_deleting, ) +from cm.issue import check_hc as check_hostcomponent_issue from cm.logger import logger from cm.models import ( ADCM, @@ -589,12 +590,17 @@ def save_hc( host_component.save() host_component_list.append(host_component) - update_hierarchy_issues(cluster) + # HC may break + # We can't be sure this method is called after some sort of "check" + if check_hostcomponent_issue(cluster=cluster): + delete_issue( + owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), cause=ConcernCause.HOSTCOMPONENT + ) + elif not cluster.get_own_issue(cause=ConcernCause.HOSTCOMPONENT): + create_issue(obj=cluster, issue_cause=ConcernCause.HOSTCOMPONENT) - for provider in {host.provider for host in Host.objects.filter(cluster=cluster)}: - update_hierarchy_issues(provider) + redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((cluster.id,)))) - update_issue_after_deleting() reset_hc_map() reset_objects_in_mm() From 6b91ef1f64b9f067aaf0db89cba671582ea4815f Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Mon, 15 Jul 2024 13:56:51 +0000 Subject: [PATCH 26/98] ADCM-5780 & ADCM-5783 & ADCM-5805 Rework concerns recalculation for host add/remove to/from cluster AND in `adcm_change_maintenance_mode` plugin --- .../executors/change_maintenance_mode.py | 8 ++- .../bundles/cluster_all_concerns/config.yaml | 11 +++- python/api_v2/tests/test_concerns.py | 57 ++++++++++++++++++- python/cm/api.py | 8 ++- python/cm/services/cluster.py | 23 ++++++-- 5 files changed, 97 insertions(+), 10 deletions(-) diff --git a/python/ansible_plugin/executors/change_maintenance_mode.py b/python/ansible_plugin/executors/change_maintenance_mode.py index 09b8b985dd..adab145a5d 100644 --- a/python/ansible_plugin/executors/change_maintenance_mode.py +++ b/python/ansible_plugin/executors/change_maintenance_mode.py @@ -13,8 +13,9 @@ from contextlib import suppress from typing import Any, Collection -from cm.issue import update_hierarchy_issues from cm.models import Host, MaintenanceMode +from cm.services.cluster import retrieve_clusters_topology +from cm.services.concern.distribution import redistribute_issues_and_flags from cm.services.status.notify import reset_objects_in_mm from cm.status_api import send_object_update_event from core.types import ADCMCoreType, CoreObjectDescriptor @@ -96,7 +97,10 @@ def __call__( update_fields=["maintenance_mode"] if isinstance(target_object, Host) else ["_maintenance_mode"] ) - update_hierarchy_issues(target_object.cluster) + if not value: + # In terms of concerns CHANGING and ON is the same, + # so recalculation is required only for turning it OFF + redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((target_object.cluster_id,)))) with suppress(Exception): send_object_update_event(object_=target_object, changes={"maintenanceMode": target_object.maintenance_mode}) diff --git a/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml b/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml index bf1548e435..de1088074a 100644 --- a/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml +++ b/python/api_v2/tests/bundles/cluster_all_concerns/config.yaml @@ -81,7 +81,6 @@ name: with_multiple_imports version: 2 - import: cluster_export: multibind: false @@ -95,3 +94,13 @@ component_1: config: *required_config component_2: + +- type: service + name: greedy + version: 4.3 + + config: *required_config + + components: + on_all: + constraint: [+] diff --git a/python/api_v2/tests/test_concerns.py b/python/api_v2/tests/test_concerns.py index 87f888ecff..6f1908c948 100644 --- a/python/api_v2/tests/test_concerns.py +++ b/python/api_v2/tests/test_concerns.py @@ -37,7 +37,7 @@ from django.contrib.contenttypes.models import ContentType from django.db.models import Q from rest_framework.response import Response -from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED +from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED, HTTP_204_NO_CONTENT from api_v2.tests.base import BaseAPITestCase @@ -1114,3 +1114,58 @@ def test_concerns_changes_on_import(self) -> None: self.check_concerns(host_2, concerns=host_2_cons) self.check_concerns_of_control_objects() + + def test_concerns_dis_appearance_on_move_cluster_host(self) -> None: + # prepare + host_1 = self.add_host(self.provider, fqdn="host-1") + mapped_host = self.add_host(self.provider, fqdn="mapped-host", cluster=self.cluster) + + greedy_s = self.add_services_to_cluster(["greedy"], cluster=self.cluster).get() + on_all_c = greedy_s.servicecomponent_set.get(prototype__name="on_all") + + # find concerns + provider_config_con = self.provider.get_own_issue(ConcernCause.CONFIG) + host_1_cons = (provider_config_con, host_1.get_own_issue(ConcernCause.CONFIG)) + mapped_host_cons = (provider_config_con, mapped_host.get_own_issue(ConcernCause.CONFIG)) + greedy_s_con = greedy_s.get_own_issue(ConcernCause.CONFIG) + + self.set_hostcomponent(cluster=self.cluster, entries=[(mapped_host, on_all_c)]) + self.assertIsNone(self.cluster.get_own_issue(ConcernCause.HOSTCOMPONENT)) + + cluster_own_cons = tuple( + ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) + ) + + # test + self.assertEqual( + self.client.v2[self.cluster, "hosts"].post(data={"hostId": host_1.id}).status_code, HTTP_201_CREATED + ) + + with self.subTest("Add Host To Cluster"): + hc_issue = self.cluster.get_own_issue(ConcernCause.HOSTCOMPONENT) + self.assertIsNotNone(hc_issue) + + self.check_concerns(self.provider, concerns=(provider_config_con,)) + self.check_concerns(host_1, concerns=host_1_cons) + self.check_concerns(mapped_host, concerns=(*cluster_own_cons, hc_issue, greedy_s_con, *mapped_host_cons)) + + self.check_concerns(self.cluster, concerns=(*cluster_own_cons, hc_issue, greedy_s_con, *mapped_host_cons)) + self.check_concerns(greedy_s, concerns=(*cluster_own_cons, hc_issue, greedy_s_con, *mapped_host_cons)) + self.check_concerns(on_all_c, concerns=(*cluster_own_cons, hc_issue, greedy_s_con, *mapped_host_cons)) + + self.check_concerns_of_control_objects() + + self.assertEqual(self.client.v2[self.cluster, "hosts", host_1].delete().status_code, HTTP_204_NO_CONTENT) + + with self.subTest("Remove Host From Cluster"): + self.assertIsNone(self.cluster.get_own_issue(ConcernCause.HOSTCOMPONENT)) + + self.check_concerns(self.provider, concerns=(provider_config_con,)) + self.check_concerns(host_1, concerns=host_1_cons) + self.check_concerns(mapped_host, concerns=(*cluster_own_cons, greedy_s_con, *mapped_host_cons)) + + self.check_concerns(self.cluster, concerns=(*cluster_own_cons, greedy_s_con, *mapped_host_cons)) + self.check_concerns(greedy_s, concerns=(*cluster_own_cons, greedy_s_con, *mapped_host_cons)) + self.check_concerns(on_all_c, concerns=(*cluster_own_cons, greedy_s_con, *mapped_host_cons)) + + self.check_concerns_of_control_objects() diff --git a/python/cm/api.py b/python/cm/api.py index ced521b348..39761bcfd8 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -294,10 +294,14 @@ def remove_host_from_cluster(host: Host) -> Host: for group in cluster.group_config.order_by("id"): group.hosts.remove(host) - update_hierarchy_issues(obj=host) remove_concern_from_object(object_=host, concern=CTX.lock) - update_hierarchy_issues(obj=cluster) + + if check_hostcomponent_issue(cluster): + delete_issue( + owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), cause=ConcernCause.HOSTCOMPONENT + ) + re_apply_object_policy(apply_object=cluster) reset_hc_map() diff --git a/python/cm/services/cluster.py b/python/cm/services/cluster.py index 78209117d0..bc505be6cd 100644 --- a/python/cm/services/cluster.py +++ b/python/cm/services/cluster.py @@ -21,11 +21,12 @@ MaintenanceModeOfObjects, ObjectMaintenanceModeState, ) -from core.types import ClusterID, HostID, ShortObjectInfo +from core.types import ADCMCoreType, ClusterID, CoreObjectDescriptor, HostID, ShortObjectInfo from django.db.transaction import atomic from rbac.models import re_apply_object_policy -from cm.models import Cluster, ClusterObject, Host, HostComponent, ServiceComponent +from cm.models import Cluster, ClusterObject, ConcernCause, Host, HostComponent, ServiceComponent +from cm.services.concern import delete_issue class ClusterDB: @@ -97,12 +98,26 @@ def reset_hc_map(self) -> None: def perform_host_to_cluster_map( cluster_id: int, hosts: Collection[int], status_service: _StatusServerService ) -> Collection[int]: - from cm.issue import update_hierarchy_issues # avoiding circular imports + # this import should be resolved later: + # concerns management should be passed in here the same way as `status_service`, + # because it's a dependency that shouldn't be directly set + from cm.issue import check_hc, create_issue + from cm.services.concern.distribution import distribute_concern_on_related_objects with atomic(): add_hosts_to_cluster(cluster_id=cluster_id, hosts=hosts, db=ClusterDB) cluster = Cluster.objects.get(id=cluster_id) - update_hierarchy_issues(obj=cluster) + + if check_hc(cluster=cluster): + delete_issue( + owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), cause=ConcernCause.HOSTCOMPONENT + ) + elif not cluster.get_own_issue(cause=ConcernCause.HOSTCOMPONENT): + concern = create_issue(obj=cluster, issue_cause=ConcernCause.HOSTCOMPONENT) + distribute_concern_on_related_objects( + owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), concern_id=concern.id + ) + re_apply_object_policy(apply_object=cluster) status_service.reset_hc_map() From 72fc7b2e8d1402af2578c8951110964e6c0ef1f5 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 16 Jul 2024 09:49:49 +0000 Subject: [PATCH 27/98] ADCM-5779 Change concerns calculation on service removal --- python/api_v2/service/utils.py | 5 ++--- python/api_v2/tests/test_concerns.py | 24 ++++++++++++++++++++++++ python/cm/api.py | 14 +++++++++++--- 3 files changed, 37 insertions(+), 6 deletions(-) diff --git a/python/api_v2/service/utils.py b/python/api_v2/service/utils.py index ff1ba6d4f2..0a3fda3352 100644 --- a/python/api_v2/service/utils.py +++ b/python/api_v2/service/utils.py @@ -53,14 +53,13 @@ def bulk_add_services_to_cluster(cluster: Cluster, prototypes: QuerySet[Prototyp components = ServiceComponent.objects.filter(cluster=cluster, service__in=services).select_related("prototype") bulk_init_config(objects=components) - new_concerns = recalculate_own_concerns_on_add_services( + recalculate_own_concerns_on_add_services( cluster=cluster, services=services.prefetch_related( "servicecomponent_set" ).all(), # refresh values from db to update `config` field ) - if new_concerns: # TODO: redistribute only new issues. See ADCM-5798 - redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((cluster.pk,)))) + redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((cluster.pk,)))) re_apply_object_policy(apply_object=cluster) reset_hc_map() diff --git a/python/api_v2/tests/test_concerns.py b/python/api_v2/tests/test_concerns.py index 6f1908c948..621a057c28 100644 --- a/python/api_v2/tests/test_concerns.py +++ b/python/api_v2/tests/test_concerns.py @@ -1169,3 +1169,27 @@ def test_concerns_dis_appearance_on_move_cluster_host(self) -> None: self.check_concerns(on_all_c, concerns=(*cluster_own_cons, greedy_s_con, *mapped_host_cons)) self.check_concerns_of_control_objects() + + def test_concerns_on_service_deletion(self) -> None: + # prepare + greedy_s = self.add_services_to_cluster(["greedy"], cluster=self.cluster).get() + + dummy_s = self.add_services_to_cluster(["dummy"], cluster=self.cluster).get() + dummy_c = dummy_s.servicecomponent_set.get(prototype__name="same_dummy") + + # test + self.assertIsNotNone(self.cluster.get_own_issue(ConcernCause.HOSTCOMPONENT)) + + self.assertEqual(self.client.v2[greedy_s].delete().status_code, HTTP_204_NO_CONTENT) + + hc_issue = self.cluster.get_own_issue(ConcernCause.HOSTCOMPONENT) + self.assertIsNone(hc_issue) + cluster_own_cons = tuple( + ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) + ) + + self.check_concerns(self.cluster, concerns=cluster_own_cons) + self.check_concerns(dummy_s, concerns=cluster_own_cons) + self.check_concerns(dummy_c, concerns=cluster_own_cons) + + self.check_concerns_of_control_objects() diff --git a/python/cm/api.py b/python/cm/api.py index 39761bcfd8..b02ab79745 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -234,8 +234,16 @@ def delete_service(service: ClusterObject) -> None: service_pk = service.pk service.delete() - update_issue_after_deleting() - update_hierarchy_issues(service.cluster) + cluster = service.cluster + if check_hostcomponent_issue(cluster=cluster): + delete_issue( + owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), cause=ConcernCause.HOSTCOMPONENT + ) + elif cluster.get_own_issue(cause=ConcernCause.HOSTCOMPONENT) is None: + concern = create_issue(obj=cluster, issue_cause=ConcernCause.HOSTCOMPONENT) + distribute_concern_on_related_objects( + owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), concern_id=concern.id + ) keep_objects = defaultdict(set) for task in TaskLog.objects.filter( @@ -247,7 +255,7 @@ def delete_service(service: ClusterObject) -> None: for log in job.logstorage_set.all(): keep_objects[log.__class__].add(log.pk) - re_apply_object_policy(apply_object=service.cluster, keep_objects=keep_objects) + re_apply_object_policy(apply_object=cluster, keep_objects=keep_objects) reset_hc_map() on_commit(func=partial(send_delete_service_event, service_id=service_pk)) From 7bf3efecb0316e02e19cbbd2165db343c9599f89 Mon Sep 17 00:00:00 2001 From: Aleksandr Alferov Date: Tue, 16 Jul 2024 12:35:56 +0000 Subject: [PATCH 28/98] ADCM-5710 Rework audit for Task and Jobs Views --- .../api_v2/generic/action_host_group/audit.py | 39 +-- python/api_v2/job/views.py | 5 +- python/api_v2/task/views.py | 5 +- python/api_v2/tests/base.py | 38 +++ python/api_v2/tests/test_audit/test_task.py | 55 +--- python/api_v2/tests/test_jobs.py | 257 +++++++----------- python/api_v2/utils/audit.py | 140 +++++++++- python/cm/converters.py | 28 ++ 8 files changed, 323 insertions(+), 244 deletions(-) diff --git a/python/api_v2/generic/action_host_group/audit.py b/python/api_v2/generic/action_host_group/audit.py index ced22dabf1..1c0bba695d 100644 --- a/python/api_v2/generic/action_host_group/audit.py +++ b/python/api_v2/generic/action_host_group/audit.py @@ -18,9 +18,9 @@ from audit.alt.hooks import AuditHook from audit.alt.object_retrievers import GeneralAuditObjectRetriever from audit.models import AuditObjectType -from cm.models import ActionHostGroup, Cluster, ClusterObject, Host, ServiceComponent +from cm.models import ActionHostGroup, Host -from api_v2.utils.audit import ExtractID, object_does_exist +from api_v2.utils.audit import ExtractID, get_audit_object_name, object_does_exist # hooks @@ -30,37 +30,16 @@ class ActionHostGroupAuditObjectCreator(IDBasedAuditObjectCreator): name_field = "prototype__display_name" def get_name(self, id_: str | int) -> str | None: - # retrieval of child names is not optimal, can be improved - # by avoiding prefetching object and just routing based on type - group = ActionHostGroup.objects.prefetch_related("object").filter(id=id_).first() - if not group: + try: + group_name, parent_object_id, parent_model_name = ( + ActionHostGroup.objects.filter(id=id_).values_list("name", "object_id", "object_type__model").first() + ) + except TypeError: # this error is returned to unpack None, which can return if the object is not found return None - parent = group.object - - # Also naming extraction can be unified maybe, but then no easy optimization probably - if isinstance(parent, Cluster): - names = (parent.name,) - elif isinstance(parent, ClusterObject): - names = ( - ClusterObject.objects.values_list("cluster__name", "prototype__display_name") - .filter(id=parent.id) - .first() - or () - ) - elif isinstance(parent, ServiceComponent): - names = ( - ServiceComponent.objects.values_list( - "cluster__name", "service__prototype__display_name", "prototype__display_name" - ) - .filter(id=parent.id) - .first() - or () - ) - else: - names = () + parent_name = get_audit_object_name(object_id=parent_object_id, model_name=parent_model_name) - return "/".join((*names, group.name)) + return "/".join((parent_name, group_name)) _extract_action_host_group = partial( diff --git a/python/api_v2/job/views.py b/python/api_v2/job/views.py index 6b50da6d5d..879c45f6fe 100644 --- a/python/api_v2/job/views.py +++ b/python/api_v2/job/views.py @@ -12,7 +12,7 @@ from adcm.permissions import VIEW_JOBLOG_PERMISSION from adcm.serializers import EmptySerializer -from audit.utils import audit +from audit.alt.api import audit_update from cm.models import JobLog from django.contrib.contenttypes.models import ContentType from drf_spectacular.utils import OpenApiParameter, OpenApiResponse, extend_schema, extend_schema_view @@ -32,6 +32,7 @@ from api_v2.job.permissions import JobPermissions from api_v2.job.serializers import JobRetrieveSerializer from api_v2.task.serializers import JobListSerializer +from api_v2.utils.audit import detect_object_for_job, set_job_name from api_v2.views import ADCMGenericViewSet @@ -92,7 +93,7 @@ def get_serializer_class(self): return JobListSerializer - @audit + @audit_update(name="{job_name} terminated", object_=detect_object_for_job).attach_hooks(on_collect=set_job_name) @action(methods=["post"], detail=True) def terminate(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG001, ARG002 job = self.get_object() diff --git a/python/api_v2/task/views.py b/python/api_v2/task/views.py index 3f4508eab2..40c4181797 100644 --- a/python/api_v2/task/views.py +++ b/python/api_v2/task/views.py @@ -11,7 +11,7 @@ # limitations under the License. from adcm.permissions import VIEW_TASKLOG_PERMISSION -from audit.utils import audit +from audit.alt.api import audit_update from cm.models import TaskLog from django.contrib.contenttypes.models import ContentType from django.http import HttpResponse @@ -37,6 +37,7 @@ from api_v2.task.filters import TaskFilter from api_v2.task.permissions import TaskPermissions from api_v2.task.serializers import TaskListSerializer +from api_v2.utils.audit import detect_object_for_task, set_task_name from api_v2.views import ADCMGenericViewSet @@ -112,7 +113,7 @@ def get_queryset(self, *args, **kwargs): queryset = queryset.exclude(object_type=ContentType.objects.get(app_label="cm", model="adcm")) return queryset - @audit + @audit_update(name="{task_name} cancelled", object_=detect_object_for_task).attach_hooks(on_collect=set_task_name) @action(methods=["post"], detail=True) def terminate(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG001, ARG002 task = self.get_object() diff --git a/python/api_v2/tests/base.py b/python/api_v2/tests/base.py index 2bf1d307c6..1255ed6cb9 100644 --- a/python/api_v2/tests/base.py +++ b/python/api_v2/tests/base.py @@ -21,6 +21,7 @@ from audit.models import AuditLog, AuditObjectType, AuditSession from cm.models import ( ADCM, + Action, ActionHostGroup, Bundle, Cluster, @@ -28,8 +29,12 @@ ConfigLog, Host, HostProvider, + JobLog, + JobStatus, ServiceComponent, + TaskLog, ) +from cm.tests.mocks.task_runner import RunTaskMock from django.conf import settings from django.http import HttpRequest from init_db import init @@ -209,3 +214,36 @@ def logout(self): request.session = engine.SessionStore() logout(request) self.cookies = SimpleCookie() + + def simulate_finished_task( + self, object_: Cluster | ClusterObject | ServiceComponent, action: Action + ) -> (TaskLog, JobLog): + with RunTaskMock() as run_task: + (self.client.v2[object_] / "actions" / action / "run").post( + data={"configuration": None, "isVerbose": True, "hostComponentMap": []} + ) + + run_task.run() + run_task.target_task.refresh_from_db() + + return run_task.target_task, run_task.target_task.joblog_set.last() + + def simulate_running_task( + self, object_: Cluster | ClusterObject | ServiceComponent, action: Action + ) -> (TaskLog, JobLog): + with RunTaskMock() as run_task: + (self.client.v2[object_] / "actions" / action / "run").post( + data={"configuration": None, "isVerbose": True, "hostComponentMap": []} + ) + + run_task.run() + run_task.target_task.refresh_from_db() + task = run_task.target_task + job = task.joblog_set.last() + task.status = JobStatus.RUNNING + task.save(update_fields=["status"]) + job.status = JobStatus.RUNNING + job.pid = 5_000_000 + job.save(update_fields=["status", "pid"]) + + return task, job diff --git a/python/api_v2/tests/test_audit/test_task.py b/python/api_v2/tests/test_audit/test_task.py index 8f670783b5..5d1f03d71d 100644 --- a/python/api_v2/tests/test_audit/test_task.py +++ b/python/api_v2/tests/test_audit/test_task.py @@ -14,10 +14,7 @@ from cm.models import ( Action, - Cluster, - ClusterObject, JobLog, - JobStatus, ObjectType, Prototype, ServiceComponent, @@ -48,39 +45,6 @@ def setUp(self) -> None: self.set_hostcomponent(cluster=self.cluster_1, entries=[(host, self.component)]) self.component_action = Action.objects.get(prototype=self.component.prototype, name="action_1_comp_1") - def simulate_finished_task( - self, object_: Cluster | ClusterObject | ServiceComponent, action: Action - ) -> (TaskLog, JobLog): - with RunTaskMock() as run_task: - (self.client.v2[object_] / "actions" / action / "run").post( - data={"configuration": None, "isVerbose": True, "hostComponentMap": []} - ) - - run_task.run() - run_task.target_task.refresh_from_db() - - return run_task.target_task, run_task.target_task.joblog_set.last() - - def simulate_running_task( - self, object_: Cluster | ClusterObject | ServiceComponent, action: Action - ) -> (TaskLog, JobLog): - with RunTaskMock() as run_task: - (self.client.v2[object_] / "actions" / action / "run").post( - data={"configuration": None, "isVerbose": True, "hostComponentMap": []} - ) - - run_task.run() - run_task.target_task.refresh_from_db() - task = run_task.target_task - job = task.joblog_set.last() - task.status = JobStatus.RUNNING - task.save(update_fields=["status"]) - job.status = JobStatus.RUNNING - job.pid = 5_000_000 - job.save(update_fields=["status", "pid"]) - - return task, job - def test_run_action_success(self): with RunTaskMock() as run_task: response = (self.client.v2[self.cluster_1] / "actions" / self.cluster_action / "run").post( @@ -199,7 +163,6 @@ def test_terminate_job_not_found_fail(self): ) def test_terminate_job_denied(self): - # TODO: This test discovered an issue with creating a new audit object, this needs to be fixed _, job = self.simulate_running_task(object_=self.component, action=self.component_action) self.client.login(**self.test_user_credentials) @@ -211,10 +174,7 @@ def test_terminate_job_denied(self): operation_name=f"{self.component_action.display_name} terminated", operation_type="update", operation_result="denied", - audit_object__object_id=self.component.id, - audit_object__object_name="component_1", # TODO: should be "cluster_1/service_1/component_1" - audit_object__object_type="service component", # TODO: should be "component" - audit_object__is_deleted=False, + **self.prepare_audit_object_arguments(expected_object=self.component), user__username="test_user_username", ) @@ -244,12 +204,11 @@ def test_terminate_task_not_found_fail(self): operation_name="Task cancelled", operation_type="update", operation_result="fail", - audit_object__isnull=True, + **self.prepare_audit_object_arguments(expected_object=None), user__username="admin", ) def test_terminate_task_denied(self): - # TODO: This test discovered an issue with creating a new audit object, this needs to be fixed task, _ = self.simulate_running_task(object_=self.component, action=self.component_action) self.client.login(**self.test_user_credentials) @@ -261,9 +220,7 @@ def test_terminate_task_denied(self): operation_name=f"{self.component_action.display_name} cancelled", operation_type="update", operation_result="denied", - audit_object__object_id=self.component.id, - audit_object__object_name="component_1", # TODO: should be "cluster_1/service_1/component_1" - audit_object__object_type="service component", # TODO should be "component" + **self.prepare_audit_object_arguments(expected_object=self.component), user__username="test_user_username", ) @@ -283,7 +240,6 @@ def test_terminate_finished_job_fail(self): ) def test_terminate_finished_task_fail(self): - # TODO: This test discovered an issue with creating a new audit object, this needs to be fixed task, _ = self.simulate_finished_task(object_=self.service, action=self.service_action) with patch("cm.models.os.kill"): @@ -294,10 +250,7 @@ def test_terminate_finished_task_fail(self): operation_name=f"{self.service_action.display_name} cancelled", operation_type="update", operation_result="fail", - audit_object__object_id=self.service.id, - audit_object__object_name="service_1", # TODO: should be "cluster_1/service_1" - audit_object__object_type="cluster object", # TODO: should be "service" - audit_object__is_deleted=False, + **self.prepare_audit_object_arguments(expected_object=self.service), user__username="admin", ) diff --git a/python/api_v2/tests/test_jobs.py b/python/api_v2/tests/test_jobs.py index 700aeed68a..ca7b17dd49 100644 --- a/python/api_v2/tests/test_jobs.py +++ b/python/api_v2/tests/test_jobs.py @@ -10,23 +10,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -from datetime import timedelta from unittest.mock import patch from cm.models import ( - ADCM, Action, - ActionType, JobLog, - JobStatus, LogStorage, - TaskLog, + ObjectType, + Prototype, + ServiceComponent, ) from django.conf import settings -from django.contrib.contenttypes.models import ContentType -from django.http import HttpResponse -from django.utils import timezone -from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_404_NOT_FOUND from api_v2.tests.base import BaseAPITestCase @@ -38,140 +32,84 @@ class TestJob(BaseAPITestCase): def setUp(self) -> None: super().setUp() - self.adcm = ADCM.objects.first() - self.action = Action.objects.create( - display_name="test_adcm_action", - prototype=self.adcm.prototype, - type=ActionType.JOB, - state_available="any", + self.cluster_1_action = Action.objects.get(prototype=self.cluster_1.prototype, name="action") + self.service = self.add_services_to_cluster(service_names=["service_1"], cluster=self.cluster_1)[0] + self.service_action = Action.objects.get(prototype=self.service.prototype, name="action") + self.host = self.add_host(provider=self.provider, fqdn="host-1", cluster=self.cluster_1) + component_prototype = Prototype.objects.get( + bundle=self.bundle_1, type=ObjectType.COMPONENT, name="component_1", parent=self.service.prototype ) - self.task = TaskLog.objects.create( - object_id=self.adcm.pk, - object_type=ContentType.objects.get(app_label="cm", model="adcm"), - start_date=timezone.now(), - finish_date=timezone.now(), - action=self.action, - ) - self.job_1 = JobLog.objects.create( - status=JobStatus.CREATED, - start_date=timezone.now(), - finish_date=timezone.now() + timedelta(days=1), - ) - self.job_2 = JobLog.objects.create( - status=JobStatus.RUNNING, - start_date=timezone.now() + timedelta(days=1), - finish_date=timezone.now() + timedelta(days=2), - task=self.task, - pid=9999, - allow_to_terminate=True, - ) - self.log_1 = LogStorage.objects.create( - job=self.job_1, - name="ansible", - type="stderr", - format="txt", - ) - - self.job_with_logs = JobLog.objects.create( - status=JobStatus.FAILED, - start_date=timezone.now() - timedelta(hours=2), - finish_date=timezone.now(), - ) - self.word_10_symbols = "logline908" - self.ansible_stdout_many_lines = LogStorage.objects.create( - job=self.job_with_logs, - name="ansible", - type="stdout", - format="txt", - body="\n".join(self.word_10_symbols for _ in range(200_000)), - ) - self.long_line = "word" * 1000 - self.short_line = "word" * 4 - self.ansible_stderr_long_lines = LogStorage.objects.create( - job=self.job_with_logs, - name="ansible", - type="stderr", - format="txt", - body=f"{self.long_line}\n{self.short_line}\n{self.long_line}\n" - f"{self.short_line}\n{self.short_line}\n{self.long_line}\n", - ) - many_lines_long_message = "\n".join( - ( - *[self.word_10_symbols for _ in range(200_000)], - "", - self.long_line, - self.short_line, - self.long_line, - "logline", - ) - ) - self.custom_log_long_and_many_lines = LogStorage.objects.create( - job=self.job_with_logs, - name="anythingelse", - type="custom", - format="txt", - body=many_lines_long_message, - ) - self.another_stdout_long_and_many_lines = LogStorage.objects.create( - job=self.job_with_logs, - name="anotherone", - type="stdout", - format="txt", - body=many_lines_long_message, - ) - self.long_one_liner_log = LogStorage.objects.create( - job=self.job_with_logs, - name="anotherone", - type="stderr", - format="txt", - body=many_lines_long_message.replace("\n", " "), + self.component = ServiceComponent.objects.get( + cluster=self.cluster_1, service=self.service, prototype=component_prototype ) + self.set_hostcomponent(cluster=self.cluster_1, entries=[(self.host, self.component)]) + self.component_action = Action.objects.get(prototype=self.component.prototype, name="action_1_comp_1") def test_job_list_success(self): - response: Response = (self.client.v2 / "jobs").get() + self.simulate_finished_task(object_=self.cluster_1, action=self.cluster_1_action) + + response = (self.client.v2 / "jobs").get() - self.assertEqual(len(response.data["results"]), 3) self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(len(response.data["results"]), 1) def test_job_retrieve_success(self): - response: Response = self.client.v2[self.job_2].get() + _, job = self.simulate_finished_task(object_=self.service, action=self.service_action) + + response = self.client.v2[job].get() - self.assertEqual(response.data["id"], self.job_2.pk) self.assertEqual(response.status_code, HTTP_200_OK) + self.assertEqual(response.data["id"], job.pk) def test_job_retrieve_not_found_fail(self): - response: Response = (self.client.v2 / "jobs" / self.get_non_existent_pk(JobLog)).get() + self.simulate_finished_task(object_=self.component, action=self.component_action) + + response = (self.client.v2 / "jobs" / self.get_non_existent_pk(JobLog)).get() self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) def test_job_log_list_success(self): - response: Response = self.client.v2[self.job_1, "logs"].get() + _, job = self.simulate_finished_task(object_=self.cluster_1, action=self.cluster_1_action) + + response = self.client.v2[job, "logs"].get() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(len(response.json()), 1) + self.assertEqual(len(response.json()), 2) def test_job_log_detail_success(self): - expected_truncated_line = ( - f"{self.long_line[:settings.STDOUT_STDERR_LOG_LINE_CUT_LENGTH]}{self.TRUNCATED_LOG_MESSAGE}" - ) + _, job = self.simulate_finished_task(object_=self.service, action=self.service_action) + log = job.logstorage_set.filter(type="stdout").last() + + self.long_line = "word" * 1000 + self.short_line = "word" * 4 with self.subTest("Many lines [CUT]"): - response = self.client.v2[self.ansible_stdout_many_lines].get() + log.body = "\n".join("logline908" for _ in range(200_000)) + log.save(update_fields=["body"]) + + response = self.client.v2[log].get() self.assertEqual(response.status_code, HTTP_200_OK) - log = response.json()["content"].splitlines() - self.assertEqual(log[0], self.TRUNCATED_LOG_MESSAGE) - self.assertEqual(log[-1], self.TRUNCATED_LOG_MESSAGE) - log_itself = log[1:-1] + content = response.json()["content"].splitlines() + self.assertEqual(content[0], self.TRUNCATED_LOG_MESSAGE) + self.assertEqual(content[-1], self.TRUNCATED_LOG_MESSAGE) + log_itself = content[1:-1] self.assertEqual(len(log_itself), settings.STDOUT_STDERR_LOG_CUT_LENGTH) - self.assertTrue(all(line == self.word_10_symbols for line in log_itself)) + self.assertTrue(all(line == "logline908" for line in log_itself)) with self.subTest("Long lines, less than cutoff [UNCUT]"): - response = self.client.v2[self.ansible_stderr_long_lines].get() + log.body = ( + f"{self.long_line}\n{self.short_line}\n{self.long_line}\n" + f"{self.short_line}\n{self.short_line}\n{self.long_line}\n" + ) + log.save(update_fields=["body"]) + + response = self.client.v2[log].get() + self.assertEqual(response.status_code, HTTP_200_OK) - log = response.json()["content"].splitlines() + content = response.json()["content"].splitlines() self.assertEqual( - log, + content, [ self.long_line, self.short_line, @@ -182,79 +120,84 @@ def test_job_log_detail_success(self): ], ) - with self.subTest("Custom log [UNCUT]"): - response = self.client.v2[self.custom_log_long_and_many_lines].get() - self.assertEqual(response.status_code, HTTP_200_OK) - log = response.json()["content"] - self.assertEqual(log, self.custom_log_long_and_many_lines.body) + with self.subTest("Long one line [CUT]"): + many_lines_long_message = "\n".join( + ( + *["logline908" for _ in range(200_000)], + "", + self.long_line, + self.short_line, + self.long_line, + "logline", + ) + ) + long_one_liner_log = many_lines_long_message.replace("\n", " ") + log.body = long_one_liner_log + log.save(update_fields=["body"]) - with self.subTest("Long both ways non-ansible stdout [CUT]"): - response = self.client.v2[self.another_stdout_long_and_many_lines].get() - self.assertEqual(response.status_code, HTTP_200_OK) - log = response.json()["content"].splitlines() - self.assertEqual(log[0], self.TRUNCATED_LOG_MESSAGE) - self.assertEqual(log[-1], self.TRUNCATED_LOG_MESSAGE) - expected_last_lines = [ - "", - expected_truncated_line, - self.short_line, - expected_truncated_line, - "logline", - ] - self.assertEqual(log[-6:-1], expected_last_lines) - main_log = log[1:-6] - self.assertEqual(len(main_log), settings.STDOUT_STDERR_LOG_CUT_LENGTH - 5) - self.assertTrue(all(line == self.word_10_symbols for line in main_log)) + response = self.client.v2[log].get() - with self.subTest("Long one line [CUT]"): - response = self.client.v2[self.long_one_liner_log].get() self.assertEqual(response.status_code, HTTP_200_OK) - log = response.json()["content"] + content = response.json()["content"] self.assertEqual( - log, + content, f"{self.TRUNCATED_LOG_MESSAGE}\n" - f"{self.long_one_liner_log.body[: settings.STDOUT_STDERR_LOG_LINE_CUT_LENGTH]}" + f"{long_one_liner_log[: settings.STDOUT_STDERR_LOG_LINE_CUT_LENGTH]}" f"{self.TRUNCATED_LOG_MESSAGE}\n" f"{self.TRUNCATED_LOG_MESSAGE}\n", ) def test_adcm_5212_retrieve_log_null_body_cut_success(self) -> None: - log_content = self.ansible_stdout_many_lines.body - self.ansible_stdout_many_lines.body = None - self.ansible_stdout_many_lines.save(update_fields=["body"]) + _, job = self.simulate_finished_task(object_=self.component, action=self.component_action) + log = job.logstorage_set.filter(type="stdout").last() + log_content = "\n".join("logline908" for _ in range(200_000)) with patch("api_v2.log_storage.serializers.extract_log_content_from_fs", return_value=log_content): - response = self.client.v2[self.ansible_stdout_many_lines].get() + response = self.client.v2[log].get() self.assertEqual(response.status_code, HTTP_200_OK) - log = response.json()["content"].splitlines() - self.assertEqual(log[0], self.TRUNCATED_LOG_MESSAGE) - self.assertEqual(log[-1], self.TRUNCATED_LOG_MESSAGE) - log_itself = log[1:-1] + content = response.json()["content"].splitlines() + self.assertEqual(content[0], self.TRUNCATED_LOG_MESSAGE) + self.assertEqual(content[-1], self.TRUNCATED_LOG_MESSAGE) + log_itself = content[1:-1] self.assertEqual(len(log_itself), settings.STDOUT_STDERR_LOG_CUT_LENGTH) - self.assertTrue(all(line == self.word_10_symbols for line in log_itself)) + self.assertTrue(all(line == "logline908" for line in log_itself)) def test_job_log_download_success(self): - response: Response = self.client.v2[self.log_1, "download"].get() + _, job = self.simulate_finished_task(object_=self.cluster_1, action=self.cluster_1_action) + log = job.logstorage_set.filter(type="stdout").last() + + response = self.client.v2[log, "download"].get() self.assertEqual(response.status_code, HTTP_200_OK) def test_adcm_5212_job_log_download_full_success(self) -> None: - response: HttpResponse = self.client.v2[self.ansible_stdout_many_lines, "download"].get() + _, job = self.simulate_finished_task(object_=self.service, action=self.service_action) + log = job.logstorage_set.filter(type="stdout").last() + body = "\n".join("logline908" for _ in range(200_000)) + log.body = body + log.save(update_fields=["body"]) + + response = self.client.v2[log, "download"].get() + self.assertEqual(response.status_code, HTTP_200_OK) - log = response.content.decode("utf-8") - self.assertNotIn(self.TRUNCATED_LOG_MESSAGE, log) - self.assertEqual(self.ansible_stdout_many_lines.body, log) + content = response.content.decode("utf-8") + self.assertNotIn(self.TRUNCATED_LOG_MESSAGE, content) + self.assertEqual(body, content) def test_job_log_not_found_download_fail(self): - response: Response = self.client.v2[self.job_1, "logs", self.get_non_existent_pk(LogStorage), "download"].get() + _, job = self.simulate_finished_task(object_=self.component, action=self.component_action) + + response = self.client.v2[job, "logs", self.get_non_existent_pk(LogStorage), "download"].get() self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) def test_job_terminate_success(self): + _, job = self.simulate_running_task(object_=self.cluster_1, action=self.cluster_1_action) + with patch("cm.models.os.kill") as kill_mock: - response: Response = self.client.v2[self.job_2, "terminate"].post(data={}) + response = self.client.v2[job, "terminate"].post(data={}) self.assertEqual(response.status_code, HTTP_200_OK) kill_mock.assert_called() diff --git a/python/api_v2/utils/audit.py b/python/api_v2/utils/audit.py index c036c43286..c0c0f91fef 100644 --- a/python/api_v2/utils/audit.py +++ b/python/api_v2/utils/audit.py @@ -20,7 +20,20 @@ from audit.alt.hooks import AuditHook from audit.alt.object_retrievers import GeneralAuditObjectRetriever from audit.models import AuditObject, AuditObjectType -from cm.models import ADCM, Bundle, Cluster, ClusterObject, Host, HostProvider, Prototype, ServiceComponent +from cm.converters import core_type_to_model, model_name_to_audit_object_type, model_name_to_core_type +from cm.models import ( + ADCM, + ADCMCoreType, + Bundle, + Cluster, + ClusterObject, + Host, + HostProvider, + JobLog, + Prototype, + ServiceComponent, + TaskLog, +) from cm.utils import get_obj_type from django.core.handlers.wsgi import WSGIRequest from django.db.models import Model, Prefetch @@ -88,7 +101,6 @@ def get_name(self, id_: str | int) -> str | None: create_audit_policy_object = IDBasedAuditObjectCreator(model=Policy) create_audit_role_object = IDBasedAuditObjectCreator(model=Role) - bundle_from_lookup = GeneralAuditObjectRetriever( audit_object_type=AuditObjectType.BUNDLE, create_new=IDBasedAuditObjectCreator(model=Bundle), @@ -515,3 +527,127 @@ def set_service_name_from_object( ) context.name = context.name.format(service_name=service_name).strip() + + +def detect_object_for_job( + context: OperationAuditContext, # noqa: ARG001 + call_arguments: AuditedCallArguments, + result: Response | None, # noqa: ARG001 + exception: Exception | None, # noqa: ARG001 +) -> AuditObject | None: + try: + object_id, model_name = ( + JobLog.objects.filter(id=call_arguments["pk"]) + .values_list("task__object_id", "task__object_type__model") + .first() + ) + except TypeError: # this error is returned to unpack None, which can return if the object is not found + return None + + model = core_type_to_model(core_type=model_name_to_core_type(model_name=model_name)) + + if not model.objects.filter(id=object_id).exists(): + return None + + name = get_audit_object_name(object_id=object_id, model_name=model_name) + + return AuditObject.objects.filter( + object_id=object_id, + object_type=model_name_to_audit_object_type(model_name=model_name), + object_name=name, + ).first() + + +def detect_object_for_task( + context: OperationAuditContext, # noqa: ARG001 + call_arguments: AuditedCallArguments, + result: Response | None, # noqa: ARG001 + exception: Exception | None, # noqa: ARG001 +) -> AuditObject | None: + try: + object_id, model_name = ( + TaskLog.objects.filter(id=call_arguments["pk"]).values_list("object_id", "object_type__model").first() + ) + except TypeError: # this error is returned to unpack None, which can return if the object is not found + return None + + model = core_type_to_model(core_type=model_name_to_core_type(model_name=model_name)) + + if not model.objects.filter(id=object_id).exists(): + return None + + name = get_audit_object_name(object_id=object_id, model_name=model_name) + + return AuditObject.objects.filter( + object_id=object_id, + object_type=model_name_to_audit_object_type(model_name=model_name), + object_name=name, + ).first() + + +def set_job_name( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Result | None, # noqa: ARG001 + exception: Exception | None, # noqa: ARG001 +) -> None: + job_name = ( + JobLog.objects.select_related("task__action") + .values_list("task__action__display_name", flat=True) + .filter(id=call_arguments["pk"]) + .first() + ) + + if job_name is None: + job_name = "Job" + + context.name = context.name.format(job_name=job_name).strip() + + +def set_task_name( + context: OperationAuditContext, + call_arguments: AuditedCallArguments, + result: Result | None, # noqa: ARG001 + exception: Exception | None, # noqa: ARG001 +) -> None: + task_name = ( + TaskLog.objects.select_related("action") + .values_list("action__display_name", flat=True) + .filter(id=call_arguments["pk"]) + .first() + ) + + if task_name is None: + task_name = "Task" + + context.name = context.name.format(task_name=task_name).strip() + + +def get_audit_object_name(object_id: int, model_name: str) -> str: + core_type = model_name_to_core_type(model_name=model_name) + + match core_type: + case ADCMCoreType.CLUSTER: + names = Cluster.objects.values_list("name").filter(id=object_id).first() + case ADCMCoreType.SERVICE: + names = ( + ClusterObject.objects.values_list("cluster__name", "prototype__display_name") + .filter(id=object_id) + .first() + ) + case ADCMCoreType.COMPONENT: + names = ( + ServiceComponent.objects.values_list( + "cluster__name", "service__prototype__display_name", "prototype__display_name" + ) + .filter(id=object_id) + .first() + ) + case ADCMCoreType.HOSTPROVIDER: + names = HostProvider.objects.values_list("name").filter(id=object_id).first() + case ADCMCoreType.HOST: + names = Host.objects.values_list("fqdn").filter(id=object_id).first() + case _: + raise ValueError(f"Unsupported core type: {core_type}") + + return "/".join(names or ()) diff --git a/python/cm/converters.py b/python/cm/converters.py index 6d0167b847..8c2bea25e2 100644 --- a/python/cm/converters.py +++ b/python/cm/converters.py @@ -12,6 +12,7 @@ from typing import TypeAlias +from audit.models import AuditObjectType from core.types import ADCMCoreType, ADCMHostGroupType, ExtraActionTargetType from django.db.models import Model @@ -108,3 +109,30 @@ def model_to_action_target_type(model: type[Model]) -> ADCMCoreType | ExtraActio def orm_object_to_action_target_type(object_: CoreObject | ActionHostGroup) -> ADCMCoreType | ExtraActionTargetType: return model_to_action_target_type(model=object_.__class__) + + +def model_name_to_audit_object_type(model_name: str) -> AuditObjectType: + # model_name is `model` field from ContentType model or str().lower() + audit_object_type = _model_name_to_audit_object_type_map.get(model_name) + + if audit_object_type is None: + raise ValueError(f"Can't convert {model_name} to audit object type") + + return audit_object_type + + +_model_name_to_audit_object_type_map = { + "cluster": AuditObjectType.CLUSTER, + "clusterobject": AuditObjectType.SERVICE, + "servicecomponent": AuditObjectType.COMPONENT, + "host": AuditObjectType.HOST, + "hostprovider": AuditObjectType.PROVIDER, + "bundle": AuditObjectType.BUNDLE, + "prototype": AuditObjectType.PROTOTYPE, + "adcm": AuditObjectType.ADCM, + "user": AuditObjectType.USER, + "group": AuditObjectType.GROUP, + "role": AuditObjectType.ROLE, + "policy": AuditObjectType.POLICY, + "actionhostgroup": AuditObjectType.ACTION_HOST_GROUP, +} From 9c2577d9e57752683d907231ac20e006b1186383 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Fri, 19 Jul 2024 09:01:35 +0000 Subject: [PATCH 29/98] ADCM-5793 Alternative config issue check --- python/cm/services/concern/cases.py | 16 +++--- python/cm/services/concern/checks.py | 74 ++++++++++++++++++++++++++++ 2 files changed, 82 insertions(+), 8 deletions(-) create mode 100644 python/cm/services/concern/checks.py diff --git a/python/cm/services/concern/cases.py b/python/cm/services/concern/cases.py index 3c26b3eead..12fb9b9c5e 100644 --- a/python/cm/services/concern/cases.py +++ b/python/cm/services/concern/cases.py @@ -16,15 +16,15 @@ from django.db.models import QuerySet from cm.issue import ( - check_config, check_hc, check_required_import, check_required_services, check_requires, create_issue, ) -from cm.models import Cluster, ClusterObject, ConcernCause +from cm.models import Cluster, ClusterObject, ConcernCause, ServiceComponent from cm.services.concern import delete_issue +from cm.services.concern.checks import object_configuration_has_issue from cm.services.concern.distribution import OwnObjectConcernMap @@ -32,7 +32,7 @@ def recalculate_own_concerns_on_add_clusters(cluster: Cluster) -> OwnObjectConce new_concerns: OwnObjectConcernMap = defaultdict(lambda: defaultdict(set)) cluster_checks = ( - (ConcernCause.CONFIG, check_config), + (ConcernCause.CONFIG, lambda obj: not object_configuration_has_issue(obj)), (ConcernCause.IMPORT, check_required_import), (ConcernCause.HOSTCOMPONENT, check_hc), (ConcernCause.SERVICE, check_required_services), @@ -57,7 +57,7 @@ def recalculate_own_concerns_on_add_services( new_concerns[ADCMCoreType.CLUSTER][cluster.pk].add(issue.pk) service_checks = ( - (ConcernCause.CONFIG, check_config), + (ConcernCause.CONFIG, lambda obj: not object_configuration_has_issue(obj)), (ConcernCause.IMPORT, check_required_import), (ConcernCause.REQUIREMENT, check_requires), ) @@ -67,10 +67,10 @@ def recalculate_own_concerns_on_add_services( issue = create_issue(obj=service, issue_cause=concern_cause) new_concerns[ADCMCoreType.SERVICE][service.pk].add(issue.pk) - for component in service.servicecomponent_set.all(): - if not check_config(component): - issue = create_issue(obj=component, issue_cause=ConcernCause.CONFIG) - new_concerns[ADCMCoreType.COMPONENT][component.pk].add(issue.pk) + for component in ServiceComponent.objects.filter(service__in=services): + if object_configuration_has_issue(component): + issue = create_issue(obj=component, issue_cause=ConcernCause.CONFIG) + new_concerns[ADCMCoreType.COMPONENT][component.pk].add(issue.pk) # remove gone concerns if check_required_services(cluster=cluster): diff --git a/python/cm/services/concern/checks.py b/python/cm/services/concern/checks.py new file mode 100644 index 0000000000..69c00d880b --- /dev/null +++ b/python/cm/services/concern/checks.py @@ -0,0 +1,74 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque +from operator import attrgetter +from typing import Iterable, TypeAlias + +from core.types import ConfigID, ObjectID + +from cm.models import ( + Cluster, + ClusterObject, + Host, + HostProvider, + ObjectConfig, + ServiceComponent, +) +from cm.services.config import retrieve_config_attr_pairs +from cm.services.config.spec import FlatSpec, retrieve_flat_spec_for_objects + +ObjectWithConfig: TypeAlias = Cluster | ClusterObject | ServiceComponent | HostProvider | Host +HasIssue: TypeAlias = bool + + +def object_configuration_has_issue(target: ObjectWithConfig) -> HasIssue: + config_spec = next(iter(retrieve_flat_spec_for_objects(prototypes=(target.prototype_id,)).values()), None) + if not config_spec: + return False + + return target.id in filter_objects_with_configuration_issues(config_spec, target) + + +def filter_objects_with_configuration_issues(config_spec: FlatSpec, *objects: ObjectWithConfig) -> Iterable[ObjectID]: + required_fields = tuple(name for name, spec in config_spec.items() if spec.required and spec.type != "group") + if not required_fields: + return () + + object_config_log_map: dict[int, ConfigID] = dict( + ObjectConfig.objects.values_list("id", "current").filter(id__in=map(attrgetter("config_id"), objects)) + ) + config_pairs = retrieve_config_attr_pairs(configurations=object_config_log_map.values()) + + objects_with_issues: deque[ObjectID] = deque() + for object_ in objects: + config, attr = config_pairs[object_config_log_map[object_.config_id]] + + for composite_name in required_fields: + group_name, field_name, *_ = composite_name.split("/") + if not field_name: + field_name = group_name + group_name = None + + if group_name: + if not attr.get(group_name, {}).get("active", False): + continue + + if config[group_name][field_name] is None: + objects_with_issues.append(object_.id) + break + + elif config[field_name] is None: + objects_with_issues.append(object_.id) + break + + return objects_with_issues From 08ba84b5bca35606c881cc3a47fbe9d6dfd4bfe0 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Mon, 22 Jul 2024 07:01:52 +0000 Subject: [PATCH 30/98] ADCM-5785: Rework delete concerns for delete host --- python/api_v2/tests/test_concerns.py | 47 ++++++++++++++++++++++++++++ python/cm/api.py | 2 +- 2 files changed, 48 insertions(+), 1 deletion(-) diff --git a/python/api_v2/tests/test_concerns.py b/python/api_v2/tests/test_concerns.py index 621a057c28..0e71429e6b 100644 --- a/python/api_v2/tests/test_concerns.py +++ b/python/api_v2/tests/test_concerns.py @@ -17,12 +17,14 @@ from cm.models import ( Action, ADCMEntity, + Bundle, Cluster, ClusterObject, ConcernCause, ConcernItem, ConcernType, Host, + HostProvider, JobLog, ObjectType, Prototype, @@ -1193,3 +1195,48 @@ def test_concerns_on_service_deletion(self) -> None: self.check_concerns(dummy_c, concerns=cluster_own_cons) self.check_concerns_of_control_objects() + + def test_remove_provider(self): + host_1 = self.add_host_via_api(self.provider, fqdn="host1") + host_2 = self.add_host_via_api(self.provider, fqdn="host2") + provider_pk, host_1_pk, host_2_pk = self.provider.pk, host_1.pk, host_2.pk + another_provider = self.add_provider( + bundle=Bundle.objects.get(name="provider_with_concerns"), name="Concerned HP 2" + ) + + self.client.v2[host_1].delete() + self.client.v2[host_2].delete() + self.client.v2[self.provider].delete() + + self.assertFalse(ConcernItem.objects.filter(owner_id=host_1_pk, owner_type=Host.class_content_type)) + self.assertFalse(ConcernItem.objects.filter(owner_id=host_2_pk, owner_type=Host.class_content_type)) + self.assertFalse(ConcernItem.objects.filter(owner_id=provider_pk, owner_type=HostProvider.class_content_type)) + self.assertEqual( + ConcernItem.objects.filter( + owner_id=another_provider.pk, owner_type=HostProvider.class_content_type + ).count(), + 1, + ) + + def test_remove_host(self): + host_1 = self.add_host_via_api(self.provider, fqdn="host1") + host_2 = self.add_host_via_api(self.provider, fqdn="host2") + host_1_pk = host_1.pk + another_provider = self.add_provider( + bundle=Bundle.objects.get(name="provider_with_concerns"), name="Concerned HP 2" + ) + host_3 = self.add_host_via_api(another_provider, fqdn="host3") + + self.client.v2[host_1].delete() + + self.assertFalse(ConcernItem.objects.filter(owner_id=host_1_pk, owner_type=Host.class_content_type)) + + self.assertEqual(host_2.concerns.count(), 2) + self.assertEqual(host_3.concerns.count(), 2) + self.assertEqual(self.provider.concerns.count(), 1) + self.assertEqual(another_provider.concerns.count(), 1) + + def add_host_via_api(self, provider: HostProvider, fqdn: str) -> Host: + response = (self.client.v2 / "hosts").post(data={"hostprovider_id": provider.pk, "name": fqdn}) + self.assertEqual(response.status_code, HTTP_201_CREATED) + return Host.objects.get(fqdn=fqdn) diff --git a/python/cm/api.py b/python/cm/api.py index b02ab79745..35cfed5c20 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -226,7 +226,7 @@ def delete_host(host: Host, cancel_tasks: bool = True) -> None: host.delete() reset_hc_map() reset_objects_in_mm() - update_issue_after_deleting() + # update_issue_after_deleting() logger.info("host #%s is deleted", host_pk) From 1c587f1a2e8b5a1cf142312855f0334843ad3f16 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Mon, 22 Jul 2024 07:50:44 +0000 Subject: [PATCH 31/98] ADCM-5777: Rework creation concerns for add host, ADCM-5778: Rework creation concerns for add provider --- python/api_v2/tests/test_concerns.py | 130 ++++++++++++++++++++++++- python/cm/api.py | 27 +++-- python/cm/services/concern/cases.py | 10 +- python/cm/services/concern/flags.py | 1 - python/cm/services/maintenance_mode.py | 11 --- 5 files changed, 154 insertions(+), 25 deletions(-) diff --git a/python/api_v2/tests/test_concerns.py b/python/api_v2/tests/test_concerns.py index 0e71429e6b..d0d29fda05 100644 --- a/python/api_v2/tests/test_concerns.py +++ b/python/api_v2/tests/test_concerns.py @@ -508,9 +508,9 @@ def check_concerns(self, object_: ADCMEntity, concerns: Iterable[ConcernItem]) - self.assertEqual(actual_amount, expected_amount, message) for concern in expected_concerns: - if concern not in expected_concerns: + if concern not in object_concerns: cur_concern = f"{concern.type} | {concern.cause} from {concern.owner}" - message = f"{cur_concern} not found in:\n{self.repr_concerns(concerns)}" + message = f"\n{cur_concern} not found in:\n{self.repr_concerns(object_concerns)}" self.assertIn(concern, object_concerns, message) def check_concerns_of_control_objects(self) -> None: @@ -1082,7 +1082,6 @@ def test_concerns_changes_on_import(self) -> None: cluster_own_cons = tuple( ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) ) - import_s_con = import_s.get_own_issue(ConcernCause.IMPORT) component_1_con = component_1.get_own_issue(ConcernCause.CONFIG) # test @@ -1107,7 +1106,12 @@ def test_concerns_changes_on_import(self) -> None: self.change_imports_via_api(import_s, imports=[{"source": {"type": "service", "id": export_service.id}}]) + # we need to reread it, so it will be correctly searched within the collection + import_s_con = import_s.get_own_issue(ConcernCause.IMPORT) + with self.subTest("Set 1/2 Required Imports On Service"): + self.assertIsNotNone(import_s_con) + self.check_concerns(self.cluster, concerns=(*cluster_own_cons, import_s_con, component_1_con, *host_1_cons)) self.check_concerns(import_s, concerns=(*cluster_own_cons, import_s_con, component_1_con, *host_1_cons)) self.check_concerns(component_1, concerns=(*cluster_own_cons, import_s_con, component_1_con)) @@ -1240,3 +1244,123 @@ def add_host_via_api(self, provider: HostProvider, fqdn: str) -> Host: response = (self.client.v2 / "hosts").post(data={"hostprovider_id": provider.pk, "name": fqdn}) self.assertEqual(response.status_code, HTTP_201_CREATED) return Host.objects.get(fqdn=fqdn) + + def test_host_config_issue(self): + host = self.add_host_via_api(self.provider, fqdn="host1") + + self.assertIsNotNone(host.get_own_issue(ConcernCause.CONFIG)) + self.assertIsNotNone(self.provider.get_own_issue(ConcernCause.CONFIG)) + + # host config issue resolved, provider remains + self.change_config_via_api(host) + + self.assertIsNotNone(self.provider.get_own_issue(ConcernCause.CONFIG)) + self.assertIsNone(host.get_own_issue(ConcernCause.CONFIG)) + + host_2 = self.add_host(self.provider, fqdn="host2") + host_2_config_issue = host_2.get_own_issue(ConcernCause.CONFIG) + self.assertIsNotNone(host_2_config_issue) + self.check_concerns(host_2, concerns=(host_2_config_issue, self.provider.get_own_issue(ConcernCause.CONFIG))) + + host_flag = host.concerns.filter( + type=ConcernType.FLAG, + owner_id=host.pk, + owner_type=host.content_type, + cause=ConcernCause.CONFIG, + ).first() + self.check_concerns(host, concerns=(self.provider.get_own_issue(ConcernCause.CONFIG), host_flag)) + + def test_two_hosts_config_issue_one_resolved(self): + host_1 = self.add_host_via_api(self.provider, fqdn="host1") + host_2 = self.add_host_via_api(self.provider, fqdn="host2") + + host_1_config_issue = host_1.get_own_issue(ConcernCause.CONFIG) + host_2_config_issue = host_2.get_own_issue(ConcernCause.CONFIG) + provider_config_issue = self.provider.get_own_issue(ConcernCause.CONFIG) + + self.assertIsNotNone(host_1_config_issue) + self.assertIsNotNone(host_2_config_issue) + self.assertIsNotNone(provider_config_issue) + + # host config issue resolved, provider remains + self.change_config_via_api(host_1) + + self.assertIsNotNone(self.provider.get_own_issue(ConcernCause.CONFIG)) + self.assertIsNone(host_1.get_own_issue(ConcernCause.CONFIG)) + self.assertIsNotNone(host_2.get_own_issue(ConcernCause.CONFIG)) + + host_1_flag = host_1.concerns.filter( + type=ConcernType.FLAG, + owner_id=host_1.pk, + owner_type=host_1.content_type, + cause=ConcernCause.CONFIG, + ).first() + + self.check_concerns(host_1, concerns=(provider_config_issue, host_1_flag)) + self.check_concerns(host_2, concerns=(provider_config_issue, host_2.get_own_issue(ConcernCause.CONFIG))) + + def test_host_config_issue_from_hostprovider(self): + host_1 = self.add_host_via_api(self.provider, fqdn="host1") + + host_config_issue = host_1.get_own_issue(ConcernCause.CONFIG) + provider_config_issue = self.provider.get_own_issue(ConcernCause.CONFIG) + + self.assertIsNotNone(host_config_issue) + self.assertIsNotNone(provider_config_issue) + + # hostprovider config issue resolved + self.change_config_via_api(self.provider) + + host_2 = self.add_host(self.provider, fqdn="host2") + + self.assertIsNone(self.provider.get_own_issue(ConcernCause.CONFIG)) + self.assertIsNotNone(host_1.get_own_issue(ConcernCause.CONFIG)) + self.assertIsNotNone(host_2.get_own_issue(ConcernCause.CONFIG)) + + provider_flag = self.provider.concerns.filter( + type=ConcernType.FLAG, + owner_id=self.provider.pk, + owner_type=self.provider.content_type, + cause=ConcernCause.CONFIG, + ).first() + + self.check_concerns( + host_1, + concerns=( + provider_flag, + host_1.get_own_issue(ConcernCause.CONFIG), + ), + ) + self.check_concerns( + host_2, + concerns=(host_2.get_own_issue(ConcernCause.CONFIG),), + ) + + def test_two_hosts_config_issue_from_hostprovider_resolved(self): + host_1 = self.add_host_via_api(self.provider, fqdn="host1") + host_2 = self.add_host_via_api(self.provider, fqdn="host2") + + host_1_config_issue = host_1.get_own_issue(ConcernCause.CONFIG) + host_2_config_issue = host_2.get_own_issue(ConcernCause.CONFIG) + provider_config_issue = self.provider.get_own_issue(ConcernCause.CONFIG) + + self.assertIsNotNone(host_1_config_issue) + self.assertIsNotNone(host_2_config_issue) + self.assertIsNotNone(provider_config_issue) + + # hostprovider config issue resolved + self.change_config_via_api(self.provider) + + self.assertIsNone(self.provider.get_own_issue(ConcernCause.CONFIG)) + self.assertIsNotNone(host_1.get_own_issue(ConcernCause.CONFIG)) + self.assertIsNotNone(host_2.get_own_issue(ConcernCause.CONFIG)) + + provider_flag = self.provider.concerns.filter( + type=ConcernType.FLAG, + owner_id=self.provider.pk, + owner_type=self.provider.content_type, + cause=ConcernCause.CONFIG, + ).first() + + self.check_concerns(host_1, concerns=(provider_flag, host_1.get_own_issue(ConcernCause.CONFIG))) + self.check_concerns(host_2, concerns=(provider_flag, host_2.get_own_issue(ConcernCause.CONFIG))) diff --git a/python/cm/api.py b/python/cm/api.py index 35cfed5c20..656373894c 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -74,7 +74,8 @@ ) from cm.services.cluster import retrieve_clusters_topology from cm.services.concern import delete_issue -from cm.services.concern.cases import recalculate_own_concerns_on_add_clusters +from cm.services.concern.cases import recalculate_own_concerns_on_add_clusters, recalculate_own_concerns_on_add_hosts +from cm.services.concern.checks import object_configuration_has_issue from cm.services.concern.distribution import distribute_concern_on_related_objects, redistribute_issues_and_flags from cm.services.concern.flags import BuiltInFlag, raise_flag from cm.services.status.notify import reset_hc_map, reset_objects_in_mm @@ -137,7 +138,6 @@ def add_cluster(prototype: Prototype, name: str, description: str = "") -> Clust object_type=ContentType.objects.get_for_model(Cluster), ) - # update_hierarchy_issues(cluster) if recalculate_own_concerns_on_add_clusters(cluster): # TODO: redistribute only new issues. See ADCM-5798 redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((cluster.pk,)))) @@ -166,7 +166,15 @@ def add_host(prototype: Prototype, provider: HostProvider, fqdn: str, descriptio host.config = obj_conf host.save() add_concern_to_object(object_=host, concern=CTX.lock) - update_hierarchy_issues(host.provider) + + if concerns := recalculate_own_concerns_on_add_hosts(host): # TODO: redistribute only new issues. See ADCM-5798 + distribute_concern_on_related_objects( + owner=CoreObjectDescriptor(id=host.id, type=ADCMCoreType.HOST), + concern_id=concerns[ADCMCoreType.HOST][host.id], + ) + if concern := provider.get_own_issue(ConcernCause.CONFIG): + host.concerns.add(concern) + re_apply_object_policy(provider) reset_hc_map() @@ -186,7 +194,13 @@ def add_host_provider(prototype: Prototype, name: str, description: str = ""): provider.config = obj_conf provider.save() add_concern_to_object(object_=provider, concern=CTX.lock) - update_hierarchy_issues(provider) + # update_hierarchy_issues(provider) + + if object_configuration_has_issue(provider): + concern = create_issue(obj=provider, issue_cause=ConcernCause.CONFIG) + distribute_concern_on_related_objects( + owner=CoreObjectDescriptor(id=provider.id, type=ADCMCoreType.HOSTPROVIDER), concern_id=concern.id + ) logger.info("host provider #%s %s is added", provider.pk, provider.name) @@ -451,11 +465,6 @@ def raise_outdated_config_flag_if_required(object_: MainObject): name=flag.name, type=ConcernType.FLAG, owner_id=object_.id, owner_type=object_.content_type ) distribute_concern_on_related_objects(owner=owner, concern_id=concern_id) - # update_hierarchy( - # concern=ConcernItem.objects.get( - # name=flag.name, type=ConcernType.FLAG, owner_id=object_.id, owner_type=object_.content_type - # ) - # ) def set_object_config_with_plugin(obj: ADCMEntity, config: dict, attr: dict) -> ConfigLog: diff --git a/python/cm/services/concern/cases.py b/python/cm/services/concern/cases.py index 12fb9b9c5e..e044d0f9ec 100644 --- a/python/cm/services/concern/cases.py +++ b/python/cm/services/concern/cases.py @@ -22,7 +22,7 @@ check_requires, create_issue, ) -from cm.models import Cluster, ClusterObject, ConcernCause, ServiceComponent +from cm.models import Cluster, ClusterObject, ConcernCause, Host, ServiceComponent from cm.services.concern import delete_issue from cm.services.concern.checks import object_configuration_has_issue from cm.services.concern.distribution import OwnObjectConcernMap @@ -83,3 +83,11 @@ def recalculate_own_concerns_on_add_services( ) return new_concerns + + +def recalculate_own_concerns_on_add_hosts(host: Host) -> OwnObjectConcernMap: + if object_configuration_has_issue(host): + issue = create_issue(obj=host, issue_cause=ConcernCause.CONFIG) + return {ADCMCoreType.HOST: {host.id: issue.id}} + + return {} diff --git a/python/cm/services/concern/flags.py b/python/cm/services/concern/flags.py index 77b1240dfa..b6a7178e29 100644 --- a/python/cm/services/concern/flags.py +++ b/python/cm/services/concern/flags.py @@ -129,7 +129,6 @@ def update_hierarchy_for_flag(flag: ConcernFlag, on_objects: Collection[CoreObje ): owner = CoreObjectDescriptor(id=concern.owner_id, type=model_name_to_core_type(concern.owner_type.model)) distribute_concern_on_related_objects(owner=owner, concern_id=concern.id) - # update_hierarchy(concern) def update_hierarchy(concern: ConcernItem) -> None: diff --git a/python/cm/services/maintenance_mode.py b/python/cm/services/maintenance_mode.py index 05c0e6460b..a2c93be4fd 100644 --- a/python/cm/services/maintenance_mode.py +++ b/python/cm/services/maintenance_mode.py @@ -54,17 +54,6 @@ def _change_mm_via_action( def _update_mm_hierarchy_issues(obj: Host | ClusterObject | ServiceComponent) -> None: redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((obj.cluster_id,)))) - - # if isinstance(obj, Host): - # update_hierarchy_issues(obj.provider) - # - # providers = {host_component.host.provider for host_component in HostComponent.objects.filter(cluster=obj.cluster)} - # for provider in providers: - # update_hierarchy_issues(provider) - # - # update_hierarchy_issues(obj.cluster) - # update_issue_after_deleting() - # _update_flags() reset_objects_in_mm() From 186b4ee4cf0d0425530eac823ae251dccf82ff97 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Wed, 24 Jul 2024 04:20:35 +0000 Subject: [PATCH 32/98] ADCM-5787 Concerns recalculation during upgrade rework --- python/cm/api.py | 15 +- python/cm/services/concern/cases.py | 72 +- .../test_inventory/test_before_upgrade.py | 10 +- python/cm/tests/test_upgrade.py | 4 +- python/cm/upgrade.py | 650 ++++++++++-------- 5 files changed, 446 insertions(+), 305 deletions(-) diff --git a/python/cm/api.py b/python/cm/api.py index 656373894c..6422c6ee91 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -74,7 +74,11 @@ ) from cm.services.cluster import retrieve_clusters_topology from cm.services.concern import delete_issue -from cm.services.concern.cases import recalculate_own_concerns_on_add_clusters, recalculate_own_concerns_on_add_hosts +from cm.services.concern.cases import ( + recalculate_own_concerns_on_add_clusters, + recalculate_own_concerns_on_add_hosts, + recalculate_own_concerns_on_add_services, +) from cm.services.concern.checks import object_configuration_has_issue from cm.services.concern.distribution import distribute_concern_on_related_objects, redistribute_issues_and_flags from cm.services.concern.flags import BuiltInFlag, raise_flag @@ -194,7 +198,6 @@ def add_host_provider(prototype: Prototype, name: str, description: str = ""): provider.config = obj_conf provider.save() add_concern_to_object(object_=provider, concern=CTX.lock) - # update_hierarchy_issues(provider) if object_configuration_has_issue(provider): concern = create_issue(obj=provider, issue_cause=ConcernCause.CONFIG) @@ -240,7 +243,7 @@ def delete_host(host: Host, cancel_tasks: bool = True) -> None: host.delete() reset_hc_map() reset_objects_in_mm() - # update_issue_after_deleting() + logger.info("host #%s is deleted", host_pk) @@ -360,7 +363,10 @@ def add_service_to_cluster(cluster: Cluster, proto: Prototype) -> ClusterObject: service.config = obj_conf service.save(update_fields=["config"]) add_components_to_service(cluster=cluster, service=service) - update_hierarchy_issues(obj=cluster) + + recalculate_own_concerns_on_add_services(cluster=cluster, services=(service,)) + redistribute_issues_and_flags(next(retrieve_clusters_topology((cluster.id,)))) + re_apply_object_policy(apply_object=cluster) reset_hc_map() @@ -381,7 +387,6 @@ def add_components_to_service(cluster: Cluster, service: ClusterObject) -> None: obj_conf = init_object_config(proto=comp, obj=service_component) service_component.config = obj_conf service_component.save(update_fields=["config"]) - update_hierarchy_issues(obj=service_component) def get_license(proto: Prototype) -> str | None: diff --git a/python/cm/services/concern/cases.py b/python/cm/services/concern/cases.py index e044d0f9ec..7afa8fd63a 100644 --- a/python/cm/services/concern/cases.py +++ b/python/cm/services/concern/cases.py @@ -11,9 +11,11 @@ # limitations under the License. from collections import defaultdict +from operator import attrgetter, itemgetter +from typing import Iterable from core.types import ADCMCoreType, CoreObjectDescriptor -from django.db.models import QuerySet +from django.contrib.contenttypes.models import ContentType from cm.issue import ( check_hc, @@ -22,7 +24,7 @@ check_requires, create_issue, ) -from cm.models import Cluster, ClusterObject, ConcernCause, Host, ServiceComponent +from cm.models import Cluster, ClusterObject, ConcernCause, ConcernItem, ConcernType, Host, ServiceComponent from cm.services.concern import delete_issue from cm.services.concern.checks import object_configuration_has_issue from cm.services.concern.distribution import OwnObjectConcernMap @@ -47,7 +49,7 @@ def recalculate_own_concerns_on_add_clusters(cluster: Cluster) -> OwnObjectConce def recalculate_own_concerns_on_add_services( - cluster: Cluster, services: QuerySet[ClusterObject] + cluster: Cluster, services: Iterable[ClusterObject] ) -> OwnObjectConcernMap: new_concerns: OwnObjectConcernMap = defaultdict(lambda: defaultdict(set)) @@ -91,3 +93,67 @@ def recalculate_own_concerns_on_add_hosts(host: Host) -> OwnObjectConcernMap: return {ADCMCoreType.HOST: {host.id: issue.id}} return {} + + +def recalculate_concerns_on_cluster_upgrade(cluster: Cluster) -> None: + cluster_checks = ( + (ConcernCause.CONFIG, lambda obj: not object_configuration_has_issue(obj)), + (ConcernCause.IMPORT, check_required_import), + (ConcernCause.HOSTCOMPONENT, check_hc), + (ConcernCause.SERVICE, check_required_services), + ) + + existing_cluster_concern_causes = set( + ConcernItem.objects.values_list("cause", flat=True).filter( + owner_id=cluster.id, + owner_type=ContentType.objects.get_for_model(Cluster), + type=ConcernType.ISSUE, + cause__in=map(itemgetter(0), cluster_checks), + ) + ) + + for cause, check in cluster_checks: + if cause in existing_cluster_concern_causes: + continue + + if not check(cluster): + create_issue(obj=cluster, issue_cause=cause) + + service_checks = ( + (ConcernCause.CONFIG, lambda obj: not object_configuration_has_issue(obj)), + (ConcernCause.IMPORT, check_required_import), + (ConcernCause.REQUIREMENT, check_requires), + ) + + services = tuple(ClusterObject.objects.select_related("prototype").filter(cluster=cluster)) + existing_service_concern_causes = set( + ConcernItem.objects.values_list("owner_id", "cause").filter( + owner_id__in=map(attrgetter("id"), services), + owner_type=ContentType.objects.get_for_model(ClusterObject), + type=ConcernType.ISSUE, + cause__in=map(itemgetter(0), service_checks), + ) + ) + for service in services: + for concern_cause, func in service_checks: + if (service.id, concern_cause) in existing_service_concern_causes: + continue + + if not func(service): + create_issue(obj=service, issue_cause=concern_cause) + + components_with_config_concerns = set( + ConcernItem.objects.values_list("owner_id", flat=True).filter( + owner_id__in=ServiceComponent.objects.values_list("id", flat=True).filter(service__in=services), + owner_type=ContentType.objects.get_for_model(ServiceComponent), + type=ConcernType.ISSUE, + cause=ConcernCause.CONFIG, + ) + ) + for component in ( + ServiceComponent.objects.select_related("prototype") + .filter(service__in=services) + .exclude(id__in=components_with_config_concerns) + ): + if object_configuration_has_issue(component): + create_issue(obj=component, issue_cause=ConcernCause.CONFIG) diff --git a/python/cm/tests/test_inventory/test_before_upgrade.py b/python/cm/tests/test_inventory/test_before_upgrade.py index 178971ca25..4ecbc44108 100644 --- a/python/cm/tests/test_inventory/test_before_upgrade.py +++ b/python/cm/tests/test_inventory/test_before_upgrade.py @@ -19,7 +19,7 @@ from cm.models import Action, ClusterObject, ObjectType, Prototype, ServiceComponent, Upgrade from cm.services.job.inventory import get_inventory_data from cm.tests.test_inventory.base import BaseInventoryTestCase -from cm.upgrade import bundle_switch, update_before_upgrade +from cm.upgrade import _update_before_upgrade, bundle_switch class TestBeforeUpgrade(BaseInventoryTestCase): @@ -56,7 +56,7 @@ def setUp(self) -> None: def test_provider_two_hosts(self): self.provider.before_upgrade["bundle_id"] = self.provider.prototype.bundle.pk - update_before_upgrade(obj=self.provider) + _update_before_upgrade(obj=self.provider) bundle_switch(obj=self.provider, upgrade=self.upgrade_for_provider) @@ -166,7 +166,7 @@ def test_2_components_2_hosts(self): ) self.cluster_1.before_upgrade["bundle_id"] = self.cluster_1.prototype.bundle.pk - update_before_upgrade(obj=self.cluster_1) + _update_before_upgrade(obj=self.cluster_1) bundle_switch(obj=self.cluster_1, upgrade=self.upgrade_for_cluster) @@ -314,7 +314,7 @@ def test_group_config_effect_on_before_upgrade(self) -> None: ) self.cluster_1.before_upgrade["bundle_id"] = self.cluster_1.prototype.bundle.pk - update_before_upgrade(obj=self.cluster_1) + _update_before_upgrade(obj=self.cluster_1) bundle_switch(obj=self.cluster_1, upgrade=self.upgrade_for_cluster) @@ -457,7 +457,7 @@ def test_adcm_5367_bug(self) -> None: ) self.cluster_1.before_upgrade["bundle_id"] = self.cluster_1.prototype.bundle.pk - update_before_upgrade(obj=self.cluster_1) + _update_before_upgrade(obj=self.cluster_1) bundle_switch(obj=self.cluster_1, upgrade=self.upgrade_for_cluster) diff --git a/python/cm/tests/test_upgrade.py b/python/cm/tests/test_upgrade.py index a60c895b0e..165e7669ca 100644 --- a/python/cm/tests/test_upgrade.py +++ b/python/cm/tests/test_upgrade.py @@ -37,7 +37,7 @@ Upgrade, ) from cm.tests.utils import gen_cluster -from cm.upgrade import bundle_revert, check_upgrade, do_upgrade, switch_components +from cm.upgrade import _switch_components, bundle_revert, check_upgrade, do_upgrade def cook_cluster_bundle(ver): @@ -520,7 +520,7 @@ def test_component(self): self.assertEqual(service_component_12.prototype.parent, service.prototype) new_service_proto = Prototype.objects.get(type="service", name="hadoop", bundle=bundle_2) - switch_components(cluster, service, new_service_proto) + _switch_components(cluster, service, new_service_proto) new_component_prototype_1 = Prototype.objects.get(name="server", type="component", parent=new_service_proto) service_component_21 = ServiceComponent.objects.get(cluster=cluster, service=service, prototype__name="server") diff --git a/python/cm/upgrade.py b/python/cm/upgrade.py index 0af92232ee..dc1bf80b2a 100644 --- a/python/cm/upgrade.py +++ b/python/cm/upgrade.py @@ -10,9 +10,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +from abc import ABC, abstractmethod +from collections import deque +from operator import itemgetter import functools from adcm_version import compare_prototype_versions +from core.types import ADCMCoreType, ClusterID, CoreObjectDescriptor from django.contrib.contenttypes.models import ContentType from django.db import transaction from rbac.models import Policy @@ -31,8 +35,8 @@ is_version_suitable, save_hc, ) -from cm.errors import raise_adcm_ex -from cm.issue import update_hierarchy_issues +from cm.errors import AdcmEx +from cm.issue import check_config, create_issue from cm.logger import logger from cm.models import ( ADCMEntity, @@ -40,6 +44,9 @@ Cluster, ClusterBind, ClusterObject, + ConcernCause, + ConcernItem, + ConcernType, ConfigLog, GroupConfig, Host, @@ -52,12 +59,198 @@ ServiceComponent, Upgrade, ) +from cm.services.cluster import retrieve_clusters_topology +from cm.services.concern.cases import ( + recalculate_concerns_on_cluster_upgrade, +) +from cm.services.concern.distribution import distribute_concern_on_related_objects, redistribute_issues_and_flags from cm.services.job.action import ActionRunPayload, run_action from cm.status_api import send_prototype_and_state_update_event from cm.utils import obj_ref -def switch_object(obj: Host | ClusterObject, new_prototype: Prototype) -> None: +def check_upgrade(obj: Cluster | HostProvider, upgrade: Upgrade) -> tuple[bool, str]: + if obj.locked: + concerns = [concern.name or "Action lock" for concern in obj.concerns.order_by("id")] + + return False, f"{obj} has blocking concerns to address: {concerns}" + + success, msg = _check_upgrade_version(prototype=obj.prototype, upgrade=upgrade) + if not success: + return False, msg + + success, msg = _check_upgrade_edition(prototype=obj.prototype, upgrade=upgrade) + if not success: + return False, msg + + if not upgrade.allowed(obj=obj): + return False, "no available states" + + if obj.prototype.type == ObjectType.CLUSTER: + success, msg = _check_upgrade_import(obj=obj, upgrade=upgrade) + if not success: + return False, msg + + return True, "" + + +def get_upgrade(obj: Cluster | HostProvider, order=None) -> list[Upgrade]: + res = [] + for upgrade in Upgrade.objects.filter(bundle__name=obj.prototype.bundle.name): + success, _ = _check_upgrade_version(prototype=obj.prototype, upgrade=upgrade) + if not success: + continue + + success, _ = _check_upgrade_edition(prototype=obj.prototype, upgrade=upgrade) + if not success: + continue + + if obj.locked or not upgrade.allowed(obj=obj): + continue + + upgrade_proto = Prototype.objects.filter(bundle=upgrade.bundle, name=upgrade.bundle.name).first() + upgrade.license = upgrade_proto.license + res.append(upgrade) + + if order: + if "name" in order: + return sorted( + res, + key=functools.cmp_to_key(mycmp=lambda obj1, obj2: compare_prototype_versions(obj1.name, obj2.name)), + ) + + if "-name" in order: + return sorted( + res, + key=functools.cmp_to_key(mycmp=lambda obj1, obj2: compare_prototype_versions(obj2.name, obj2.name)), + ) + + return res + + +def do_upgrade( + obj: Cluster | HostProvider, + upgrade: Upgrade, + config: dict, + attr: dict, + hostcomponent: list, + verbose: bool = False, +) -> dict: + check_license(prototype=obj.prototype) + upgrade_prototype = Prototype.objects.filter( + bundle=upgrade.bundle, name=upgrade.bundle.name, type__in=[ObjectType.CLUSTER, ObjectType.PROVIDER] + ).first() + check_license(prototype=upgrade_prototype) + + success, msg = check_upgrade(obj=obj, upgrade=upgrade) + if not success: + raise AdcmEx(code="UPGRADE_ERROR", msg=msg) + + logger.info("upgrade %s version %s (upgrade #%s)", obj_ref(obj), obj.prototype.version, upgrade.id) + + task_id = None + + obj.before_upgrade["bundle_id"] = obj.prototype.bundle.pk + _update_before_upgrade(obj=obj) + + if not upgrade.action: + bundle_switch(obj=obj, upgrade=upgrade) + + if upgrade.state_on_success: + obj.state = upgrade.state_on_success + obj.save(update_fields=["state"]) + + send_prototype_and_state_update_event(object_=obj) + else: + task = run_action( + action=upgrade.action, + obj=obj, + payload=ActionRunPayload(conf=config, attr=attr, hostcomponent=hostcomponent, verbose=verbose), + ) + task_id = task.id + + obj.refresh_from_db() + + return {"id": obj.id, "upgradable": bool(get_upgrade(obj=obj)), "task_id": task_id} + + +def bundle_switch(obj: Cluster | HostProvider, upgrade: Upgrade) -> None: + if isinstance(obj, Cluster): + switch = _ClusterBundleSwitch(target=obj, upgrade=upgrade) + elif isinstance(obj, HostProvider): + switch = _HostProviderBundleSwitch(target=obj, upgrade=upgrade) + else: + raise AdcmEx(code="UPGRADE_ERROR", msg="can upgrade only cluster or host provider") + + switch.perform() + + +def bundle_revert(obj: Cluster | HostProvider) -> None: + upgraded_bundle = obj.prototype.bundle + old_bundle = Bundle.objects.get(pk=obj.before_upgrade["bundle_id"]) + old_proto = Prototype.objects.filter(bundle=old_bundle, name=old_bundle.name).first() + before_upgrade_hc = obj.before_upgrade.get("hc") + service_names = obj.before_upgrade.get("services") + + _revert_object(obj=obj, old_proto=old_proto) + + if isinstance(obj, Cluster): + for service_prototype in Prototype.objects.filter(bundle=old_bundle, type="service"): + service = ClusterObject.objects.filter(cluster=obj, prototype__name=service_prototype.name).first() + if not service: + continue + + _revert_object(obj=service, old_proto=service_prototype) + for component_prototype in Prototype.objects.filter( + bundle=old_bundle, parent=service_prototype, type="component" + ): + component = ServiceComponent.objects.filter( + cluster=obj, + service=service, + prototype__name=component_prototype.name, + ).first() + + if component: + _revert_object(obj=component, old_proto=component_prototype) + else: + component = ServiceComponent.objects.create( + cluster=obj, + service=service, + prototype=component_prototype, + ) + obj_conf = init_object_config(proto=component_prototype, obj=component) + component.config = obj_conf + component.save(update_fields=["config"]) + + ClusterObject.objects.filter(cluster=obj, prototype__bundle=upgraded_bundle).delete() + ServiceComponent.objects.filter(cluster=obj, prototype__bundle=upgraded_bundle).delete() + + for service_name in service_names: + prototype = Prototype.objects.get(bundle=old_bundle, name=service_name, type="service") + + if not ClusterObject.objects.filter(prototype=prototype, cluster=obj).exists(): + add_service_to_cluster(cluster=obj, proto=prototype) + + host_comp_list = [] + for hostcomponent in before_upgrade_hc: + host = Host.objects.get(fqdn=hostcomponent["host"], cluster=obj) + service = ClusterObject.objects.get(prototype__name=hostcomponent["service"], cluster=obj) + component = ServiceComponent.objects.get( + prototype__name=hostcomponent["component"], + cluster=obj, + service=service, + ) + host_comp_list.append((service, host, component)) + + save_hc(cluster=obj, host_comp_list=host_comp_list) + + if isinstance(obj, HostProvider): + for host in Host.objects.filter(provider=obj): + old_host_proto = Prototype.objects.get(bundle=old_bundle, type="host", name=host.prototype.name) + _revert_object(obj=host, old_proto=old_host_proto) + + +def _switch_object(obj: Host | ClusterObject, new_prototype: Prototype) -> None: logger.info("upgrade switch from %s to %s", proto_ref(prototype=obj.prototype), proto_ref(prototype=new_prototype)) old_prototype = obj.prototype @@ -67,27 +260,13 @@ def switch_object(obj: Host | ClusterObject, new_prototype: Prototype) -> None: switch_config(obj=obj, new_prototype=new_prototype, old_prototype=old_prototype) -def switch_services(upgrade: Upgrade, cluster: Cluster) -> None: - for service in ClusterObject.objects.filter(cluster=cluster): - check_license(prototype=service.prototype) - try: - new_prototype = Prototype.objects.get(bundle=upgrade.bundle, type="service", name=service.prototype.name) - check_license(prototype=new_prototype) - switch_object(obj=service, new_prototype=new_prototype) - switch_components(cluster=cluster, service=service, new_component_prototype=new_prototype) - except Prototype.DoesNotExist: - service.delete() - - switch_hc(obj=cluster, upgrade=upgrade) - - -def switch_components(cluster: Cluster, service: ClusterObject, new_component_prototype: Prototype) -> None: +def _switch_components(cluster: Cluster, service: ClusterObject, new_component_prototype: Prototype) -> None: for component in ServiceComponent.objects.filter(cluster=cluster, service=service): try: new_comp_prototype = Prototype.objects.get( parent=new_component_prototype, type="component", name=component.prototype.name ) - switch_object(obj=component, new_prototype=new_comp_prototype) + _switch_object(obj=component, new_prototype=new_comp_prototype) except Prototype.DoesNotExist: component.delete() @@ -98,13 +277,7 @@ def switch_components(cluster: Cluster, service: ClusterObject, new_component_pr make_object_config(obj=component, prototype=component_prototype) -def switch_hosts(upgrade: Upgrade, provider: HostProvider) -> None: - for prototype in Prototype.objects.filter(bundle=upgrade.bundle, type="host"): - for host in Host.objects.filter(provider=provider, prototype__name=prototype.name): - switch_object(host, prototype) - - -def check_upgrade_version(prototype: Prototype, upgrade: Upgrade) -> tuple[bool, str]: +def _check_upgrade_version(prototype: Prototype, upgrade: Upgrade) -> tuple[bool, str]: if upgrade.min_strict: if compare_prototype_versions(prototype.version, upgrade.min_version) <= 0: return ( @@ -136,7 +309,7 @@ def check_upgrade_version(prototype: Prototype, upgrade: Upgrade) -> tuple[bool, return True, "" -def check_upgrade_edition(prototype: Prototype, upgrade: Upgrade) -> tuple[bool, str]: +def _check_upgrade_edition(prototype: Prototype, upgrade: Upgrade) -> tuple[bool, str]: if upgrade.from_edition == "any": return True, "" @@ -146,20 +319,7 @@ def check_upgrade_edition(prototype: Prototype, upgrade: Upgrade) -> tuple[bool, return True, "" -def check_upgrade_state(obj: Cluster | HostProvider, upgrade: Upgrade) -> tuple[bool, str]: - if obj.locked: - return False, "object is locked" - - if upgrade.allowed(obj): - return True, "" - else: # noqa: RET505 - return False, "no available states" - - -def check_upgrade_import( - obj: Cluster, - upgrade: Upgrade, -) -> tuple[bool, str]: +def _check_upgrade_import(obj: Cluster, upgrade: Upgrade) -> tuple[bool, str]: for cbind in ClusterBind.objects.filter(cluster=obj): export = cbind.source_service if cbind.source_service else cbind.source_cluster import_obj = cbind.service if cbind.service else cbind.cluster @@ -216,125 +376,7 @@ def check_upgrade_import( return True, "" -def check_upgrade(obj: Cluster | HostProvider, upgrade: Upgrade) -> tuple[bool, str]: - if obj.locked: - concerns = [concern.name or "Action lock" for concern in obj.concerns.order_by("id")] - - return False, f"{obj} has blocking concerns to address: {concerns}" - - success, msg = check_upgrade_version(prototype=obj.prototype, upgrade=upgrade) - if not success: - return False, msg - - success, msg = check_upgrade_edition(prototype=obj.prototype, upgrade=upgrade) - if not success: - return False, msg - - if not upgrade.allowed(obj=obj): - return False, "no available states" - - if obj.prototype.type == ObjectType.CLUSTER: - success, msg = check_upgrade_import(obj=obj, upgrade=upgrade) - if not success: - return False, msg - - return True, "" - - -def switch_hc(obj: Cluster, upgrade: Upgrade) -> None: - for hostcomponent in HostComponent.objects.filter(cluster=obj): - service_prototype = Prototype.objects.filter( - bundle=upgrade.bundle, - type="service", - name=hostcomponent.service.prototype.name, - ).first() - if not service_prototype: - hostcomponent.delete() - - continue - - if not Prototype.objects.filter( - parent=service_prototype, - type="component", - name=hostcomponent.component.prototype.name, - ).first(): - hostcomponent.delete() - - continue - - -def get_upgrade(obj: Cluster | HostProvider, order=None) -> list[Upgrade]: - res = [] - for upgrade in Upgrade.objects.filter(bundle__name=obj.prototype.bundle.name): - success, _ = check_upgrade_version(prototype=obj.prototype, upgrade=upgrade) - if not success: - continue - - success, _ = check_upgrade_edition(prototype=obj.prototype, upgrade=upgrade) - if not success: - continue - - if obj.locked or not upgrade.allowed(obj=obj): - continue - - upgrade_proto = Prototype.objects.filter(bundle=upgrade.bundle, name=upgrade.bundle.name).first() - upgrade.license = upgrade_proto.license - res.append(upgrade) - - if order: - if "name" in order: - return sorted( - res, - key=functools.cmp_to_key(mycmp=lambda obj1, obj2: compare_prototype_versions(obj1.name, obj2.name)), - ) - - if "-name" in order: - return sorted( - res, - key=functools.cmp_to_key(mycmp=lambda obj1, obj2: compare_prototype_versions(obj2.name, obj2.name)), - ) - - return res - - -def re_apply_policy_for_upgrade(obj: Cluster | HostProvider) -> None: - obj_type_map = {obj: ContentType.objects.get_for_model(obj)} - - if isinstance(obj, Cluster): - for service in ClusterObject.objects.filter(cluster=obj): - obj_type_map[service] = ContentType.objects.get_for_model(service) - for component in ServiceComponent.objects.filter(cluster=obj, service=service): - obj_type_map[component] = ContentType.objects.get_for_model(component) - elif isinstance(obj, HostProvider): - for host in Host.objects.filter(provider=obj): - obj_type_map[host] = ContentType.objects.get_for_model(host) - - for policy_object, content_type in obj_type_map.items(): - for policy in Policy.objects.filter(object__object_id=policy_object.id, object__content_type=content_type): - policy.apply() - - -def update_components_after_bundle_switch(cluster: Cluster, upgrade: Upgrade) -> None: - if upgrade.action and upgrade.action.hostcomponentmap: - logger.info("update component from %s after upgrade with hc_acl", cluster) - for hc_acl in upgrade.action.hostcomponentmap: - proto_service = Prototype.objects.filter( - type="service", - bundle=upgrade.bundle, - name=hc_acl["service"], - ).first() - if not proto_service: - continue - - try: - service = ClusterObject.objects.get(cluster=cluster, prototype=proto_service) - if not ServiceComponent.objects.filter(cluster=cluster, service=service).exists(): - add_components_to_service(cluster=cluster, service=service) - except ClusterObject.DoesNotExist: - add_service_to_cluster(cluster=cluster, proto=proto_service) - - -def revert_object(obj: ADCMEntity, old_proto: Prototype) -> None: +def _revert_object(obj: ADCMEntity, old_proto: Prototype) -> None: if obj.prototype == old_proto: return @@ -354,72 +396,7 @@ def revert_object(obj: ADCMEntity, old_proto: Prototype) -> None: obj.save(update_fields=["prototype", "config", "state", "before_upgrade"]) -def bundle_revert(obj: Cluster | HostProvider) -> None: - upgraded_bundle = obj.prototype.bundle - old_bundle = Bundle.objects.get(pk=obj.before_upgrade["bundle_id"]) - old_proto = Prototype.objects.filter(bundle=old_bundle, name=old_bundle.name).first() - before_upgrade_hc = obj.before_upgrade.get("hc") - service_names = obj.before_upgrade.get("services") - - revert_object(obj=obj, old_proto=old_proto) - - if isinstance(obj, Cluster): - for service_prototype in Prototype.objects.filter(bundle=old_bundle, type="service"): - service = ClusterObject.objects.filter(cluster=obj, prototype__name=service_prototype.name).first() - if not service: - continue - - revert_object(obj=service, old_proto=service_prototype) - for component_prototype in Prototype.objects.filter( - bundle=old_bundle, parent=service_prototype, type="component" - ): - component = ServiceComponent.objects.filter( - cluster=obj, - service=service, - prototype__name=component_prototype.name, - ).first() - - if component: - revert_object(obj=component, old_proto=component_prototype) - else: - component = ServiceComponent.objects.create( - cluster=obj, - service=service, - prototype=component_prototype, - ) - obj_conf = init_object_config(proto=component_prototype, obj=component) - component.config = obj_conf - component.save(update_fields=["config"]) - - ClusterObject.objects.filter(cluster=obj, prototype__bundle=upgraded_bundle).delete() - ServiceComponent.objects.filter(cluster=obj, prototype__bundle=upgraded_bundle).delete() - - for service_name in service_names: - prototype = Prototype.objects.get(bundle=old_bundle, name=service_name, type="service") - - if not ClusterObject.objects.filter(prototype=prototype, cluster=obj).exists(): - add_service_to_cluster(cluster=obj, proto=prototype) - - host_comp_list = [] - for hostcomponent in before_upgrade_hc: - host = Host.objects.get(fqdn=hostcomponent["host"], cluster=obj) - service = ClusterObject.objects.get(prototype__name=hostcomponent["service"], cluster=obj) - component = ServiceComponent.objects.get( - prototype__name=hostcomponent["component"], - cluster=obj, - service=service, - ) - host_comp_list.append((service, host, component)) - - save_hc(cluster=obj, host_comp_list=host_comp_list) - - if isinstance(obj, HostProvider): - for host in Host.objects.filter(provider=obj): - old_host_proto = Prototype.objects.get(bundle=old_bundle, type="host", name=host.prototype.name) - revert_object(obj=host, old_proto=old_host_proto) - - -def set_before_upgrade(obj: ADCMEntity) -> None: +def _set_before_upgrade(obj: ADCMEntity) -> None: obj.before_upgrade["state"] = obj.state if obj.config: obj.before_upgrade["config_id"] = obj.config.current @@ -449,92 +426,185 @@ def set_before_upgrade(obj: ADCMEntity) -> None: obj.save(update_fields=["before_upgrade"]) -def update_before_upgrade(obj: Cluster | HostProvider) -> None: - set_before_upgrade(obj=obj) +def _update_before_upgrade(obj: Cluster | HostProvider) -> None: + _set_before_upgrade(obj=obj) if isinstance(obj, Cluster): for service in ClusterObject.objects.filter(cluster=obj): - set_before_upgrade(obj=service) + _set_before_upgrade(obj=service) for component in ServiceComponent.objects.filter(service=service, cluster=obj): - set_before_upgrade(obj=component) + _set_before_upgrade(obj=component) if isinstance(obj, HostProvider): for host in Host.objects.filter(provider=obj): - set_before_upgrade(obj=host) + _set_before_upgrade(obj=host) -def do_upgrade( - obj: Cluster | HostProvider, - upgrade: Upgrade, - config: dict, - attr: dict, - hostcomponent: list, - verbose: bool = False, -) -> dict: - check_license(prototype=obj.prototype) - upgrade_prototype = Prototype.objects.filter( - bundle=upgrade.bundle, name=upgrade.bundle.name, type__in=[ObjectType.CLUSTER, ObjectType.PROVIDER] - ).first() - check_license(prototype=upgrade_prototype) +class _BundleSwitch(ABC): + def __init__(self, target: Cluster | HostProvider, upgrade: Upgrade): + self._target = target + self._upgrade = upgrade - success, msg = check_upgrade(obj=obj, upgrade=upgrade) - if not success: - return raise_adcm_ex("UPGRADE_ERROR", msg) + def perform(self) -> None: + with transaction.atomic(): + old_prototype = self._target.prototype + new_prototype = Prototype.objects.get( + bundle_id=self._upgrade.bundle_id, type__in=(ObjectType.CLUSTER, ObjectType.PROVIDER) + ) + self._target.prototype = new_prototype + self._target.save(update_fields=["prototype"]) + switch_config(obj=self._target, new_prototype=new_prototype, old_prototype=old_prototype) - logger.info("upgrade %s version %s (upgrade #%s)", obj_ref(obj), obj.prototype.version, upgrade.id) + self._target.refresh_from_db() - task_id = None + self._upgrade_children(old_prototype=old_prototype, new_prototype=new_prototype) + self._update_concerns() - obj.before_upgrade["bundle_id"] = obj.prototype.bundle.pk - update_before_upgrade(obj=obj) + for policy_object, content_type in self._get_objects_map_for_policy_update().items(): + for policy in Policy.objects.filter( + object__object_id=policy_object.id, object__content_type=content_type + ): + policy.apply() - if not upgrade.action: - bundle_switch(obj=obj, upgrade=upgrade) + logger.info("upgrade %s OK to version %s", obj_ref(obj=self._target), new_prototype.version) - if upgrade.state_on_success: - obj.state = upgrade.state_on_success - obj.save(update_fields=["state"]) + @abstractmethod + def _upgrade_children(self, old_prototype: Prototype, new_prototype: Prototype) -> None: + ... - send_prototype_and_state_update_event(object_=obj) - else: - task = run_action( - action=upgrade.action, - obj=obj, - payload=ActionRunPayload(conf=config, attr=attr, hostcomponent=hostcomponent, verbose=verbose), + @abstractmethod + def _update_concerns(self) -> None: + ... + + @abstractmethod + def _get_objects_map_for_policy_update(self) -> dict[ADCMEntity, ContentType]: + ... + + +class _ClusterBundleSwitch(_BundleSwitch): + def __init__(self, target: Cluster, upgrade: Upgrade): + super().__init__(target, upgrade) + + def _upgrade_children(self, old_prototype: Prototype, new_prototype: Prototype) -> None: + for service in ClusterObject.objects.select_related("prototype").filter(cluster=self._target): + check_license(prototype=service.prototype) + try: + new_service_prototype = Prototype.objects.get( + bundle_id=self._upgrade.bundle_id, type="service", name=service.prototype.name + ) + check_license(prototype=new_service_prototype) + _switch_object(obj=service, new_prototype=new_service_prototype) + _switch_components(cluster=self._target, service=service, new_component_prototype=new_service_prototype) + except Prototype.DoesNotExist: + service.delete() + + # remove HC entries that which components don't exist anymore + existing_names: set[tuple[str, str]] = set( + Prototype.objects.values_list("parent__name", "name").filter( + bundle_id=self._upgrade.bundle_id, type="component" + ) ) - task_id = task.id + entries_to_delete = deque() + for hc_id, service_name, component_name in HostComponent.objects.values_list( + "id", "service__prototype__name", "component__prototype__name" + ).filter(cluster=self._target): + if (service_name, component_name) not in existing_names: + entries_to_delete.append(hc_id) + + HostComponent.objects.filter(id__in=entries_to_delete).delete() + + if old_prototype.allow_maintenance_mode != new_prototype.allow_maintenance_mode: + Host.objects.filter(cluster=self._target).update(maintenance_mode=MaintenanceMode.OFF) + + if self._upgrade.action and self._upgrade.action.hostcomponentmap: + logger.info("update component from %s after upgrade with hc_acl", self._target) + services_in_new_hc = set(map(itemgetter("service"), self._upgrade.action.hostcomponentmap)) + for proto_service in Prototype.objects.filter( + type="service", + bundle_id=self._upgrade.bundle_id, + name__in=services_in_new_hc, + ): + # probably operations below can be performed in bulk for speed improvement + try: + service = ClusterObject.objects.select_related("prototype").get( + cluster=self._target, prototype=proto_service + ) + except ClusterObject.DoesNotExist: + # this code was taken from service creation from `cm.api` skipping checks, concerns, etc. + check_license(prototype=proto_service) + service = ClusterObject.objects.create(cluster=self._target, prototype=proto_service) + service.config = init_object_config(proto=proto_service, obj=service) + service.save(update_fields=["config"]) - obj.refresh_from_db() + if not ServiceComponent.objects.filter(cluster=self._target, service=service).exists(): + add_components_to_service(cluster=self._target, service=service) - return {"id": obj.id, "upgradable": bool(get_upgrade(obj=obj)), "task_id": task_id} + def _update_concerns(self) -> None: + recalculate_concerns_on_cluster_upgrade(cluster=self._target) + redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((self._target.id,)))) + def _get_objects_map_for_policy_update(self) -> dict[Cluster | ClusterObject | ServiceComponent, ContentType]: + obj_type_map = {self._target: ContentType.objects.get_for_model(Cluster)} -def bundle_switch(obj: Cluster | HostProvider, upgrade: Upgrade) -> None: - new_prototype = None - old_prototype = obj.prototype - if old_prototype.type == "cluster": - new_prototype = Prototype.objects.get(bundle=upgrade.bundle, type="cluster") - elif old_prototype.type == "provider": - new_prototype = Prototype.objects.get(bundle=upgrade.bundle, type="provider") - else: - raise_adcm_ex("UPGRADE_ERROR", "can upgrade only cluster or host provider") + service_content_type = ContentType.objects.get_for_model(ClusterObject) + for service in ClusterObject.objects.filter(cluster=self._target): + obj_type_map[service] = service_content_type - with transaction.atomic(): - obj.prototype = new_prototype - obj.save(update_fields=["prototype"]) - switch_config(obj=obj, new_prototype=new_prototype, old_prototype=old_prototype) + component_content_type = ContentType.objects.get_for_model(ServiceComponent) + for component in ServiceComponent.objects.filter(cluster=self._target): + obj_type_map[component] = component_content_type - if obj.prototype.type == "cluster": - switch_services(upgrade=upgrade, cluster=obj) - if old_prototype.allow_maintenance_mode != new_prototype.allow_maintenance_mode: - Host.objects.filter(cluster=obj).update(maintenance_mode=MaintenanceMode.OFF) - elif obj.prototype.type == "provider": - switch_hosts(upgrade=upgrade, provider=obj) + return obj_type_map - update_hierarchy_issues(obj=obj) - if isinstance(obj, Cluster): - update_components_after_bundle_switch(cluster=obj, upgrade=upgrade) - obj.refresh_from_db() - re_apply_policy_for_upgrade(obj=obj) - logger.info("upgrade %s OK to version %s", obj_ref(obj=obj), obj.prototype.version) +class _HostProviderBundleSwitch(_BundleSwitch): + def __init__(self, target: HostProvider, upgrade: Upgrade): + super().__init__(target, upgrade) + + def _upgrade_children(self, old_prototype: Prototype, new_prototype: Prototype) -> None: # noqa: ARG002 + for prototype in Prototype.objects.filter(bundle_id=self._upgrade.bundle_id, type="host"): + for host in Host.objects.filter(provider=self._target, prototype__name=prototype.name): + _switch_object(host, prototype) + + def _update_concerns(self) -> None: + if not self._target.get_own_issue(ConcernCause.CONFIG) and not check_config(self._target): + concern = create_issue(obj=self._target, issue_cause=ConcernCause.CONFIG) + distribute_concern_on_related_objects( + owner=CoreObjectDescriptor(id=self._target.id, type=ADCMCoreType.HOSTPROVIDER), concern_id=concern.id + ) + + clusters_for_redistribution: set[ClusterID] = set() + m2m_model = Host.concerns.through + host_own_concerns_to_link = deque() + + for host in ( + Host.objects.select_related("prototype__bundle") + .filter(provider=self._target) + .exclude( + id__in=ConcernItem.objects.values_list("owner_id", flat=True).filter( + owner_type=ContentType.objects.get_for_model(Host), + type=ConcernType.ISSUE, + cause=ConcernCause.CONFIG, + ) + ) + ): + if not check_config(host): + concern = create_issue(obj=host, issue_cause=ConcernCause.CONFIG) + clusters_for_redistribution.add(host.cluster_id) + host_own_concerns_to_link.append(m2m_model(host_id=host.id, concernitem_id=concern.id)) + + m2m_model.objects.bulk_create(objs=host_own_concerns_to_link) + + clusters_for_redistribution -= {None} + if clusters_for_redistribution: + for topology in retrieve_clusters_topology(cluster_ids=clusters_for_redistribution): + redistribute_issues_and_flags(topology=topology) + + def _get_objects_map_for_policy_update(self) -> dict[HostProvider | Host, ContentType]: + obj_type_map = {self._target: ContentType.objects.get_for_model(HostProvider)} + + host_content_type = ContentType.objects.get_for_model(Host) + for host in Host.objects.filter(provider=self._target): + obj_type_map[host] = host_content_type + + return obj_type_map From 4e2a7018406e9c5b430e9aa29ef01f8282896ac3 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Wed, 24 Jul 2024 06:19:30 +0000 Subject: [PATCH 33/98] ADCM-5797 Rework `cm.issue.check_requires` as `service_requirements_has_issue` --- python/api_v2/tests/test_concerns.py | 63 ++++++++++++++++++++++++++++ python/cm/services/concern/cases.py | 16 +++---- python/cm/services/concern/checks.py | 49 +++++++++++++++++++++- 3 files changed, 115 insertions(+), 13 deletions(-) diff --git a/python/api_v2/tests/test_concerns.py b/python/api_v2/tests/test_concerns.py index d0d29fda05..a4549c289b 100644 --- a/python/api_v2/tests/test_concerns.py +++ b/python/api_v2/tests/test_concerns.py @@ -1364,3 +1364,66 @@ def test_two_hosts_config_issue_from_hostprovider_resolved(self): self.check_concerns(host_1, concerns=(provider_flag, host_1.get_own_issue(ConcernCause.CONFIG))) self.check_concerns(host_2, concerns=(provider_flag, host_2.get_own_issue(ConcernCause.CONFIG))) + + def test_dis_appearance_of_require_concern_on_service(self) -> None: + require_dummy_proto_id = Prototype.objects.get( + bundle_id=self.cluster.prototype.bundle_id, type="service", name="require_dummy_service" + ).id + + response = self.client.v2[self.cluster, "services"].post(data={"prototypeId": require_dummy_proto_id}) + self.assertEqual(response.status_code, HTTP_201_CREATED) + + require_dummy_s = ClusterObject.objects.get(id=response.json()["id"]) + sir_c = require_dummy_s.servicecomponent_set.get(prototype__name="sir") + silent_c = require_dummy_s.servicecomponent_set.get(prototype__name="silent") + + requirement_con = require_dummy_s.get_own_issue(ConcernCause.REQUIREMENT) + component_config_con = sir_c.get_own_issue(ConcernCause.CONFIG) + cluster_own_cons = tuple( + ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) + ) + expected_concerns = (*cluster_own_cons, requirement_con, component_config_con) + + with self.subTest("Appeared On Add"): + self.assertIsNotNone(requirement_con) + self.check_concerns(self.cluster, concerns=expected_concerns) + self.check_concerns(require_dummy_s, concerns=expected_concerns) + self.check_concerns(sir_c, concerns=expected_concerns) + self.check_concerns(silent_c, concerns=(*cluster_own_cons, requirement_con)) + + service_proto_id = Prototype.objects.get( + bundle_id=self.cluster.prototype.bundle_id, type="service", name="required" + ).id + self.assertEqual( + self.client.v2[self.cluster, "services"].post(data={"prototypeId": service_proto_id}).status_code, + HTTP_201_CREATED, + ) + + # SERVICE concern on cluster is gone, need to reread + cluster_own_cons = tuple( + ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) + ) + expected_concerns = (*cluster_own_cons, requirement_con, component_config_con) + + with self.subTest("Stayed On Unrelated Service Add"): + self.assertIsNotNone(require_dummy_s.get_own_issue(ConcernCause.REQUIREMENT)) + self.check_concerns(self.cluster, concerns=expected_concerns) + self.check_concerns(require_dummy_s, concerns=expected_concerns) + self.check_concerns(sir_c, concerns=expected_concerns) + self.check_concerns(silent_c, concerns=(*cluster_own_cons, requirement_con)) + + dummy_proto_id = Prototype.objects.get( + bundle_id=self.cluster.prototype.bundle_id, type="service", name="dummy" + ).id + response = self.client.v2[self.cluster, "services"].post(data={"prototypeId": dummy_proto_id}) + self.assertEqual(response.status_code, HTTP_201_CREATED) + + dummy_s = ClusterObject.objects.get(id=response.json()["id"]) + + with self.subTest("Disappeared On Required Service Add"): + self.assertIsNone(require_dummy_s.get_own_issue(ConcernCause.REQUIREMENT)) + self.check_concerns(self.cluster, concerns=(*cluster_own_cons, component_config_con)) + self.check_concerns(require_dummy_s, concerns=(*cluster_own_cons, component_config_con)) + self.check_concerns(sir_c, concerns=(*cluster_own_cons, component_config_con)) + self.check_concerns(silent_c, concerns=cluster_own_cons) + self.check_concerns(dummy_s, concerns=cluster_own_cons) diff --git a/python/cm/services/concern/cases.py b/python/cm/services/concern/cases.py index 7afa8fd63a..c6de8d47f7 100644 --- a/python/cm/services/concern/cases.py +++ b/python/cm/services/concern/cases.py @@ -17,16 +17,10 @@ from core.types import ADCMCoreType, CoreObjectDescriptor from django.contrib.contenttypes.models import ContentType -from cm.issue import ( - check_hc, - check_required_import, - check_required_services, - check_requires, - create_issue, -) +from cm.issue import check_hc, check_required_import, check_required_services, create_issue from cm.models import Cluster, ClusterObject, ConcernCause, ConcernItem, ConcernType, Host, ServiceComponent from cm.services.concern import delete_issue -from cm.services.concern.checks import object_configuration_has_issue +from cm.services.concern.checks import object_configuration_has_issue, service_requirements_has_issue from cm.services.concern.distribution import OwnObjectConcernMap @@ -61,7 +55,7 @@ def recalculate_own_concerns_on_add_services( service_checks = ( (ConcernCause.CONFIG, lambda obj: not object_configuration_has_issue(obj)), (ConcernCause.IMPORT, check_required_import), - (ConcernCause.REQUIREMENT, check_requires), + (ConcernCause.REQUIREMENT, lambda obj: not service_requirements_has_issue(obj)), ) for service in services: for concern_cause, func in service_checks: @@ -79,7 +73,7 @@ def recalculate_own_concerns_on_add_services( delete_issue(owner=CoreObjectDescriptor(type=ADCMCoreType.CLUSTER, id=cluster.pk), cause=ConcernCause.SERVICE) for service in cluster.clusterobject_set.exclude(pk__in=(service.pk for service in services)): - if check_requires(service=service): + if not service_requirements_has_issue(service=service): delete_issue( owner=CoreObjectDescriptor(type=ADCMCoreType.SERVICE, id=service.pk), cause=ConcernCause.REQUIREMENT ) @@ -122,7 +116,7 @@ def recalculate_concerns_on_cluster_upgrade(cluster: Cluster) -> None: service_checks = ( (ConcernCause.CONFIG, lambda obj: not object_configuration_has_issue(obj)), (ConcernCause.IMPORT, check_required_import), - (ConcernCause.REQUIREMENT, check_requires), + (ConcernCause.REQUIREMENT, lambda obj: not service_requirements_has_issue(obj)), ) services = tuple(ClusterObject.objects.select_related("prototype").filter(cluster=cluster)) diff --git a/python/cm/services/concern/checks.py b/python/cm/services/concern/checks.py index 69c00d880b..a9c39dd2e1 100644 --- a/python/cm/services/concern/checks.py +++ b/python/cm/services/concern/checks.py @@ -12,9 +12,9 @@ from collections import deque from operator import attrgetter -from typing import Iterable, TypeAlias +from typing import Iterable, Literal, NamedTuple, TypeAlias -from core.types import ConfigID, ObjectID +from core.types import ClusterID, ConfigID, ObjectID from cm.models import ( Cluster, @@ -31,6 +31,11 @@ HasIssue: TypeAlias = bool +class MissingRequirement(NamedTuple): + type: Literal["service", "component"] + name: str + + def object_configuration_has_issue(target: ObjectWithConfig) -> HasIssue: config_spec = next(iter(retrieve_flat_spec_for_objects(prototypes=(target.prototype_id,)).values()), None) if not config_spec: @@ -72,3 +77,43 @@ def filter_objects_with_configuration_issues(config_spec: FlatSpec, *objects: Ob break return objects_with_issues + + +def service_requirements_has_issue(service: ClusterObject) -> HasIssue: + return bool(find_unsatisfied_requirements(cluster_id=service.cluster_id, requires=service.prototype.requires)) + + +def find_unsatisfied_requirements( + cluster_id: ClusterID, requires: list[dict[Literal["service", "component"], str]] +) -> tuple[MissingRequirement, ...]: + if not requires: + return () + + names_of_required_services: set[str] = set() + required_components: set[tuple[str, str]] = set() + + for requirement in requires: + service_name = requirement["service"] + + if component_name := requirement.get("component"): + required_components.add((service_name, component_name)) + else: + names_of_required_services.add(service_name) + + missing_requirements = deque() + + if names_of_required_services: + for missing_service_name in names_of_required_services.difference( + ClusterObject.objects.values_list("prototype__name", flat=True).filter(cluster_id=cluster_id) + ): + missing_requirements.append(MissingRequirement(type="service", name=missing_service_name)) + + if required_components: + for _, missing_component_name in required_components.difference( + ServiceComponent.objects.values_list("service__prototype__name", "prototype__name").filter( + cluster_id=cluster_id + ) + ): + missing_requirements.append(MissingRequirement(type="component", name=missing_component_name)) + + return tuple(missing_requirements) From e4781a927bd71ee0a9e8abaaf522175cbd8736d5 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Thu, 25 Jul 2024 09:43:39 +0000 Subject: [PATCH 34/98] ADCM-5794: Rework the issue detection function for cause IMPORT --- python/api_v2/tests/test_concerns.py | 9 +++++++++ python/cm/api.py | 5 ++--- python/cm/services/concern/cases.py | 16 ++++++++++------ python/cm/services/concern/checks.py | 25 +++++++++++++++++++++++++ 4 files changed, 46 insertions(+), 9 deletions(-) diff --git a/python/api_v2/tests/test_concerns.py b/python/api_v2/tests/test_concerns.py index a4549c289b..621e4a365c 100644 --- a/python/api_v2/tests/test_concerns.py +++ b/python/api_v2/tests/test_concerns.py @@ -254,12 +254,17 @@ def _check_concerns(self, object_: Cluster | ClusterObject | ServiceComponent, e def test_import_concern_resolved_after_saving_import(self): import_cluster = self.add_cluster(bundle=self.required_import_bundle, name="required_import_cluster") + unused_import_cluster = self.add_cluster(bundle=self.required_import_bundle, name="unused_import_cluster") export_cluster = self.cluster_1 response: Response = self.client.v2[import_cluster].get() self.assertEqual(len(response.json()["concerns"]), 1) self.assertEqual(import_cluster.concerns.count(), 1) + response: Response = self.client.v2[unused_import_cluster].get() + self.assertEqual(len(response.json()["concerns"]), 1) + self.assertEqual(import_cluster.concerns.count(), 1) + self.client.v2[import_cluster, "imports"].post( data=[{"source": {"id": export_cluster.pk, "type": ObjectType.CLUSTER}}], ) @@ -268,6 +273,10 @@ def test_import_concern_resolved_after_saving_import(self): self.assertEqual(len(response.json()["concerns"]), 0) self.assertEqual(import_cluster.concerns.count(), 0) + response: Response = self.client.v2[unused_import_cluster].get() + self.assertEqual(len(response.json()["concerns"]), 1) + self.assertEqual(unused_import_cluster.concerns.count(), 1) + def test_non_required_import_do_not_raises_concern(self): self.assertGreater(PrototypeImport.objects.filter(prototype=self.cluster_2.prototype).count(), 0) diff --git a/python/cm/api.py b/python/cm/api.py index 6422c6ee91..9873bdbe34 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -39,7 +39,6 @@ check_bound_components, check_component_constraint, check_hc_requires, - check_required_import, check_service_requires, create_issue, remove_concern_from_object, @@ -79,7 +78,7 @@ recalculate_own_concerns_on_add_hosts, recalculate_own_concerns_on_add_services, ) -from cm.services.concern.checks import object_configuration_has_issue +from cm.services.concern.checks import object_configuration_has_issue, object_imports_has_issue from cm.services.concern.distribution import distribute_concern_on_related_objects, redistribute_issues_and_flags from cm.services.concern.flags import BuiltInFlag, raise_flag from cm.services.status.notify import reset_hc_map, reset_objects_in_mm @@ -891,7 +890,7 @@ def multi_bind(cluster: Cluster, service: ClusterObject | None, bind_list: list[ logger.info("bind %s to %s", obj_ref(obj=export_obj), obj_ref(obj=import_obj)) import_target = CoreObjectDescriptor(id=import_obj.id, type=orm_object_to_core_type(import_obj)) - if check_required_import(obj=import_obj): + if not object_imports_has_issue(target=import_obj): delete_issue(owner=import_target, cause=ConcernCause.IMPORT) elif not import_obj.get_own_issue(ConcernCause.IMPORT): concern = create_issue(obj=import_obj, issue_cause=ConcernCause.IMPORT) diff --git a/python/cm/services/concern/cases.py b/python/cm/services/concern/cases.py index c6de8d47f7..97ecc2b31a 100644 --- a/python/cm/services/concern/cases.py +++ b/python/cm/services/concern/cases.py @@ -17,10 +17,14 @@ from core.types import ADCMCoreType, CoreObjectDescriptor from django.contrib.contenttypes.models import ContentType -from cm.issue import check_hc, check_required_import, check_required_services, create_issue +from cm.issue import check_hc, check_required_services, create_issue from cm.models import Cluster, ClusterObject, ConcernCause, ConcernItem, ConcernType, Host, ServiceComponent from cm.services.concern import delete_issue -from cm.services.concern.checks import object_configuration_has_issue, service_requirements_has_issue +from cm.services.concern.checks import ( + object_configuration_has_issue, + object_imports_has_issue, + service_requirements_has_issue, +) from cm.services.concern.distribution import OwnObjectConcernMap @@ -29,7 +33,7 @@ def recalculate_own_concerns_on_add_clusters(cluster: Cluster) -> OwnObjectConce cluster_checks = ( (ConcernCause.CONFIG, lambda obj: not object_configuration_has_issue(obj)), - (ConcernCause.IMPORT, check_required_import), + (ConcernCause.IMPORT, lambda obj: not object_imports_has_issue(obj)), (ConcernCause.HOSTCOMPONENT, check_hc), (ConcernCause.SERVICE, check_required_services), ) @@ -54,7 +58,7 @@ def recalculate_own_concerns_on_add_services( service_checks = ( (ConcernCause.CONFIG, lambda obj: not object_configuration_has_issue(obj)), - (ConcernCause.IMPORT, check_required_import), + (ConcernCause.IMPORT, lambda obj: not object_imports_has_issue(obj)), (ConcernCause.REQUIREMENT, lambda obj: not service_requirements_has_issue(obj)), ) for service in services: @@ -92,7 +96,7 @@ def recalculate_own_concerns_on_add_hosts(host: Host) -> OwnObjectConcernMap: def recalculate_concerns_on_cluster_upgrade(cluster: Cluster) -> None: cluster_checks = ( (ConcernCause.CONFIG, lambda obj: not object_configuration_has_issue(obj)), - (ConcernCause.IMPORT, check_required_import), + (ConcernCause.IMPORT, lambda obj: not object_imports_has_issue(obj)), (ConcernCause.HOSTCOMPONENT, check_hc), (ConcernCause.SERVICE, check_required_services), ) @@ -115,7 +119,7 @@ def recalculate_concerns_on_cluster_upgrade(cluster: Cluster) -> None: service_checks = ( (ConcernCause.CONFIG, lambda obj: not object_configuration_has_issue(obj)), - (ConcernCause.IMPORT, check_required_import), + (ConcernCause.IMPORT, lambda obj: not object_imports_has_issue(obj)), (ConcernCause.REQUIREMENT, lambda obj: not service_requirements_has_issue(obj)), ) diff --git a/python/cm/services/concern/checks.py b/python/cm/services/concern/checks.py index a9c39dd2e1..54301fd480 100644 --- a/python/cm/services/concern/checks.py +++ b/python/cm/services/concern/checks.py @@ -15,13 +15,16 @@ from typing import Iterable, Literal, NamedTuple, TypeAlias from core.types import ClusterID, ConfigID, ObjectID +from django.db.models import Q from cm.models import ( Cluster, + ClusterBind, ClusterObject, Host, HostProvider, ObjectConfig, + PrototypeImport, ServiceComponent, ) from cm.services.config import retrieve_config_attr_pairs @@ -44,6 +47,28 @@ def object_configuration_has_issue(target: ObjectWithConfig) -> HasIssue: return target.id in filter_objects_with_configuration_issues(config_spec, target) +def object_imports_has_issue(target: Cluster | ClusterObject) -> HasIssue: + prototype_id = target.prototype_id + prototype_imports = PrototypeImport.objects.filter(prototype_id=prototype_id) + required_import_names = set(prototype_imports.values_list("name", flat=True).filter(required=True)) + + if not required_import_names: + return False + + if not any(prototype_imports.values_list("required", flat=True)): + return False + + for cluster_name, service_name in ClusterBind.objects.values_list( + "source_cluster__prototype__name", "source_service__prototype__name" + ).filter(Q(cluster__prototype_id=prototype_id) | Q(service__prototype_id=prototype_id)): + if service_name: + required_import_names -= {service_name} + elif cluster_name: + required_import_names -= {cluster_name} + + return required_import_names != set() + + def filter_objects_with_configuration_issues(config_spec: FlatSpec, *objects: ObjectWithConfig) -> Iterable[ObjectID]: required_fields = tuple(name for name, spec in config_spec.items() if spec.required and spec.type != "group") if not required_fields: From 3a0c13e9db64700be8db116f2ac86837efb2f900 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Thu, 25 Jul 2024 11:00:18 +0000 Subject: [PATCH 35/98] ADCM-5795: Rework the issue detection function for cause SERVICE --- python/cm/services/concern/cases.py | 5 +++-- python/cm/services/concern/checks.py | 13 +++++++++++ python/cm/tests/test_issue.py | 33 +++++++++++++++++++++++++++- 3 files changed, 48 insertions(+), 3 deletions(-) diff --git a/python/cm/services/concern/cases.py b/python/cm/services/concern/cases.py index 97ecc2b31a..bdc32da063 100644 --- a/python/cm/services/concern/cases.py +++ b/python/cm/services/concern/cases.py @@ -22,6 +22,7 @@ from cm.services.concern import delete_issue from cm.services.concern.checks import ( object_configuration_has_issue, + object_has_required_services_issue, object_imports_has_issue, service_requirements_has_issue, ) @@ -35,7 +36,7 @@ def recalculate_own_concerns_on_add_clusters(cluster: Cluster) -> OwnObjectConce (ConcernCause.CONFIG, lambda obj: not object_configuration_has_issue(obj)), (ConcernCause.IMPORT, lambda obj: not object_imports_has_issue(obj)), (ConcernCause.HOSTCOMPONENT, check_hc), - (ConcernCause.SERVICE, check_required_services), + (ConcernCause.SERVICE, lambda obj: not object_has_required_services_issue(obj)), ) for cause, check in cluster_checks: @@ -73,7 +74,7 @@ def recalculate_own_concerns_on_add_services( new_concerns[ADCMCoreType.COMPONENT][component.pk].add(issue.pk) # remove gone concerns - if check_required_services(cluster=cluster): + if not object_has_required_services_issue(cluster=cluster): delete_issue(owner=CoreObjectDescriptor(type=ADCMCoreType.CLUSTER, id=cluster.pk), cause=ConcernCause.SERVICE) for service in cluster.clusterobject_set.exclude(pk__in=(service.pk for service in services)): diff --git a/python/cm/services/concern/checks.py b/python/cm/services/concern/checks.py index 54301fd480..856846e11c 100644 --- a/python/cm/services/concern/checks.py +++ b/python/cm/services/concern/checks.py @@ -24,6 +24,7 @@ Host, HostProvider, ObjectConfig, + Prototype, PrototypeImport, ServiceComponent, ) @@ -69,6 +70,18 @@ def object_imports_has_issue(target: Cluster | ClusterObject) -> HasIssue: return required_import_names != set() +def object_has_required_services_issue(cluster: Cluster) -> HasIssue: + bundle_id = cluster.prototype.bundle_id + + required_protos = Prototype.objects.filter(bundle_id=bundle_id, type="service", required=True) + + if (required_count := required_protos.count()) == 0: + return False + + existing_required_objects = ClusterObject.objects.filter(cluster=cluster, prototype__in=required_protos) + return existing_required_objects.count() != required_count + + def filter_objects_with_configuration_issues(config_spec: FlatSpec, *objects: ObjectWithConfig) -> Iterable[ObjectID]: required_fields = tuple(name for name, spec in config_spec.items() if spec.required and spec.type != "group") if not required_fields: diff --git a/python/cm/tests/test_issue.py b/python/cm/tests/test_issue.py index cb0d2c621c..755e8e1d78 100644 --- a/python/cm/tests/test_issue.py +++ b/python/cm/tests/test_issue.py @@ -33,12 +33,14 @@ ClusterBind, ClusterObject, ConcernCause, + ObjectType, Prototype, PrototypeImport, ) from cm.services.cluster import perform_host_to_cluster_map +from cm.services.concern.checks import object_has_required_services_issue from cm.services.status import notify -from cm.tests.utils import gen_job_log, gen_service, gen_task_log, generate_hierarchy +from cm.tests.utils import gen_cluster, gen_job_log, gen_service, gen_task_log, generate_hierarchy mock_issue_check_map = { ConcernCause.CONFIG: lambda x: False, @@ -135,6 +137,35 @@ def test_required_service_issue(self): self.assertEqual(cluster_issue.cause, ConcernCause.SERVICE) self.assertEqual(cluster_issue.reason["placeholder"]["target"]["name"], service_prototype.name) + def test_issue_detection_on_service(self): + cluster_2 = gen_cluster( + prototype=Prototype.objects.filter(type=ObjectType.CLUSTER, bundle=self.cluster.prototype.bundle).first() + ) + Prototype.objects.create( + type="service", bundle=self.cluster.prototype.bundle, required=False, name="required service" + ) + + with self.subTest("Clusters have no required services"): + self.assertFalse(object_has_required_services_issue(self.cluster)) + self.assertFalse(object_has_required_services_issue(cluster_2)) + + with self.subTest("Clusters have required services"): + prototype = Prototype.objects.create( + type="service", bundle=self.cluster.prototype.bundle, required=True, name="required service" + ) + self.assertTrue(object_has_required_services_issue(self.cluster)) + self.assertTrue(object_has_required_services_issue(cluster_2)) + + with self.subTest("Clusters have required services and the service is added to one of them cluster"): + service = add_service_to_cluster(self.cluster, prototype) + self.assertFalse(object_has_required_services_issue(self.cluster)) + self.assertTrue(object_has_required_services_issue(cluster_2)) + + with self.subTest("Clusters have no required services after prototype deleted"): + service.delete() + self.assertTrue(object_has_required_services_issue(self.cluster)) + self.assertTrue(object_has_required_services_issue(cluster_2)) + class RemoveIssueTest(BaseTestCase): def setUp(self) -> None: From 57673e800813eb1107a7455cb4f08f6c4208f02f Mon Sep 17 00:00:00 2001 From: Skrynnik Daniil Date: Tue, 30 Jul 2024 13:51:54 +0300 Subject: [PATCH 36/98] ADCM-5822: remove mm effect on concerns --- python/api/tests/test_component.py | 8 +- python/api/tests/test_host.py | 3 +- python/api_v2/tests/test_concerns.py | 345 ++++++++------------- python/cm/hierarchy.py | 10 +- python/cm/services/concern/distribution.py | 144 ++------- python/cm/services/maintenance_mode.py | 19 +- 6 files changed, 165 insertions(+), 364 deletions(-) diff --git a/python/api/tests/test_component.py b/python/api/tests/test_component.py index d3952f3a61..46b219cea5 100644 --- a/python/api/tests/test_component.py +++ b/python/api/tests/test_component.py @@ -80,7 +80,7 @@ def test_change_maintenance_mode_on_no_action_success(self): self.assertEqual(response.data["maintenance_mode"], "ON") self.assertEqual(self.component.maintenance_mode, MaintenanceMode.ON) - def test_change_maintenance_mode_on_no_service_issue_success(self): + def test_adcm_5822_change_maintenance_mode_on_does_not_affect_parent_issues(self): bundle = self.upload_and_load_bundle( path=Path( self.base_dir, @@ -95,6 +95,8 @@ def test_change_maintenance_mode_on_no_service_issue_success(self): ) cluster = Cluster.objects.get(pk=cluster_response.data["id"]) + self.assertFalse(cluster.concerns.exists()) + service_prototype = Prototype.objects.get(bundle=bundle, type="service") service_response: Response = self.client.post( path=reverse(viewname="v1:service", kwargs={"cluster_id": cluster.pk}), @@ -105,6 +107,7 @@ def test_change_maintenance_mode_on_no_service_issue_success(self): component_1 = ServiceComponent.objects.get(service=service, prototype__name="first_component") component_2 = ServiceComponent.objects.get(service=service, prototype__name="second_component") + self.assertTrue(cluster.concerns.exists()) self.assertTrue(service.concerns.exists()) self.assertTrue(component_2.concerns.exists()) self.assertFalse(component_1.concerns.exists()) @@ -120,7 +123,8 @@ def test_change_maintenance_mode_on_no_service_issue_success(self): self.assertEqual(response.status_code, HTTP_200_OK) self.assertEqual(response.data["maintenance_mode"], "ON") self.assertEqual(component_2.maintenance_mode, MaintenanceMode.ON) - self.assertFalse(service.concerns.exists()) + self.assertTrue(service.concerns.exists()) + self.assertTrue(cluster.concerns.exists()) def test_change_maintenance_mode_on_with_action_success(self): HostComponent.objects.create( diff --git a/python/api/tests/test_host.py b/python/api/tests/test_host.py index 43fe5a9350..7cf722ea73 100644 --- a/python/api/tests/test_host.py +++ b/python/api/tests/test_host.py @@ -294,7 +294,8 @@ def test_cluster_clear_issue_success(self): data={"maintenance_mode": "ON"}, ) - self.assertFalse(cluster.concerns.exists()) + # ADCM-5822 mm does not affect concerns + self.assertTrue(cluster.concerns.exists()) def test_mm_constraint_by_no_cluster_fail(self): self.host.cluster = None diff --git a/python/api_v2/tests/test_concerns.py b/python/api_v2/tests/test_concerns.py index 621e4a365c..97522e56c2 100644 --- a/python/api_v2/tests/test_concerns.py +++ b/python/api_v2/tests/test_concerns.py @@ -712,7 +712,7 @@ def test_concerns_swap_on_mapping_changes(self) -> None: self.check_concerns_of_control_objects() - def test_concerns_distribution_mm(self) -> None: + def test_mm_does_not_affect_concerns_distribution(self) -> None: # prepare second_provider = self.add_provider(bundle=self.provider.prototype.bundle, name="No Concerns HP") host_no_concerns = self.add_host(provider=second_provider, fqdn="no-concerns-host", cluster=self.cluster) @@ -751,26 +751,7 @@ def test_concerns_distribution_mm(self) -> None: single_con = single_c.get_own_issue(ConcernCause.CONFIG) main_and_single_cons = (main_service_own_con, single_con) - # test - with self.subTest("Unmapped Distribution Turn Service ON"): - self.change_mm_via_api(MM.ON, main_s) - - self.check_concerns(self.cluster, concerns=(*cluster_own_cons, no_components_service_own_con)) - self.check_concerns(main_s, concerns=(*cluster_own_cons, main_service_own_con)) - self.check_concerns(single_c, concerns=(*cluster_own_cons, single_con)) - self.check_concerns(free_c, concerns=cluster_own_cons) - self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_service_own_con)) - - self.check_concerns(host_1, concerns=(host_1_config_con, provider_config_con)) - self.check_concerns(host_2, concerns=(provider_config_con,)) - self.check_concerns(unmapped_host, concerns=(provider_config_con, unmapped_host_con)) - self.check_concerns(host_no_concerns, concerns=(second_provider_con,)) - - self.check_concerns_of_control_objects() - - with self.subTest("Unmapped Distribution Turn Service OFF"): - self.change_mm_via_api(MM.OFF, main_s) - + def check_concerns(): self.check_concerns( self.cluster, concerns=(*cluster_own_cons, *main_and_single_cons, no_components_service_own_con) ) @@ -786,27 +767,17 @@ def test_concerns_distribution_mm(self) -> None: self.check_concerns_of_control_objects() - self.set_hostcomponent( - cluster=self.cluster, - entries=((host_1, single_c), (host_1, free_c), (host_2, free_c), (host_no_concerns, free_c)), - ) - cluster_own_cons = tuple( - ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) - ) - - with self.subTest("Mapped Turn Component ON"): - self.change_mm_via_api(MM.ON, single_c) - + def check_concerns_after_mapping(): self.check_concerns( self.cluster, concerns=( *cluster_own_cons, - main_service_own_con, + *main_and_single_cons, no_components_service_own_con, *all_mapped_hosts_cons, ), ) - self.check_concerns(main_s, concerns=(*cluster_own_cons, main_service_own_con, *all_mapped_hosts_cons)) + self.check_concerns(main_s, concerns=(*cluster_own_cons, *main_and_single_cons, *all_mapped_hosts_cons)) self.check_concerns( single_c, concerns=(*cluster_own_cons, *main_and_single_cons, provider_config_con, host_1_config_con), @@ -815,7 +786,7 @@ def test_concerns_distribution_mm(self) -> None: self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_service_own_con)) self.check_concerns( - host_1, concerns=(*cluster_own_cons, main_service_own_con, host_1_config_con, provider_config_con) + host_1, concerns=(*cluster_own_cons, *main_and_single_cons, host_1_config_con, provider_config_con) ) self.check_concerns( host_2, @@ -828,119 +799,45 @@ def test_concerns_distribution_mm(self) -> None: self.check_concerns_of_control_objects() - with self.subTest("Mapped Turn Host ON"): - self.change_mm_via_api(MM.ON, host_1) - - self.check_concerns( - self.cluster, - concerns=(*cluster_own_cons, main_service_own_con, no_components_service_own_con, *provider_cons), - ) - self.check_concerns(main_s, concerns=(*cluster_own_cons, main_service_own_con, *provider_cons)) - self.check_concerns( - single_c, - concerns=(*cluster_own_cons, *main_and_single_cons, provider_config_con), - ) - self.check_concerns(free_c, concerns=(*cluster_own_cons, main_service_own_con, *provider_cons)) - self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_service_own_con)) + # test + with self.subTest("Initial state"): + check_concerns() - self.check_concerns( - host_1, concerns=(*cluster_own_cons, main_service_own_con, host_1_config_con, provider_config_con) - ) - self.check_concerns( - host_2, - concerns=(*cluster_own_cons, main_service_own_con, provider_config_con), - ) - self.check_concerns(unmapped_host, concerns=(provider_config_con, unmapped_host_con)) - self.check_concerns( - host_no_concerns, concerns=(*cluster_own_cons, main_service_own_con, second_provider_con) - ) + with self.subTest("Unmapped Distribution Turn Service ON"): + self.change_mm_via_api(MM.ON, main_s) + check_concerns() - self.check_concerns_of_control_objects() + with self.subTest("Unmapped Distribution Turn Service OFF"): + self.change_mm_via_api(MM.OFF, main_s) + check_concerns() - with self.subTest("Mapped Turn Second Host ON"): - self.change_mm_via_api(MM.ON, host_2) + self.set_hostcomponent( + cluster=self.cluster, + entries=((host_1, single_c), (host_1, free_c), (host_2, free_c), (host_no_concerns, free_c)), + ) + cluster_own_cons = tuple( + ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) + ) - self.check_concerns( - self.cluster, - concerns=(*cluster_own_cons, main_service_own_con, no_components_service_own_con, second_provider_con), - ) - self.check_concerns(main_s, concerns=(*cluster_own_cons, main_service_own_con, second_provider_con)) - self.check_concerns(single_c, concerns=(*cluster_own_cons, *main_and_single_cons)) - self.check_concerns(free_c, concerns=(*cluster_own_cons, main_service_own_con, second_provider_con)) - self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_service_own_con)) + with self.subTest("Mapped Turn Component ON"): + self.change_mm_via_api(MM.ON, single_c) + check_concerns_after_mapping() - self.check_concerns( - host_1, concerns=(*cluster_own_cons, main_service_own_con, host_1_config_con, provider_config_con) - ) - self.check_concerns( - host_2, - concerns=(*cluster_own_cons, main_service_own_con, provider_config_con), - ) - self.check_concerns(unmapped_host, concerns=(provider_config_con, unmapped_host_con)) - self.check_concerns( - host_no_concerns, concerns=(*cluster_own_cons, main_service_own_con, second_provider_con) - ) + with self.subTest("Mapped Turn Host ON"): + self.change_mm_via_api(MM.ON, host_1) + check_concerns_after_mapping() - self.check_concerns_of_control_objects() + with self.subTest("Mapped Turn Second Host ON"): + self.change_mm_via_api(MM.ON, host_2) + check_concerns_after_mapping() with self.subTest("Mapped Turn Service Without Components ON"): self.change_mm_via_api(MM.ON, no_components_s) - - self.check_concerns(self.cluster, concerns=(*cluster_own_cons, main_service_own_con, second_provider_con)) - self.check_concerns(main_s, concerns=(*cluster_own_cons, main_service_own_con, second_provider_con)) - self.check_concerns(single_c, concerns=(*cluster_own_cons, *main_and_single_cons)) - self.check_concerns(free_c, concerns=(*cluster_own_cons, main_service_own_con, second_provider_con)) - self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_service_own_con)) - - self.check_concerns( - host_1, concerns=(*cluster_own_cons, main_service_own_con, host_1_config_con, provider_config_con) - ) - self.check_concerns( - host_2, - concerns=(*cluster_own_cons, main_service_own_con, provider_config_con), - ) - self.check_concerns(unmapped_host, concerns=(provider_config_con, unmapped_host_con)) - self.check_concerns( - host_no_concerns, - concerns=(*cluster_own_cons, main_service_own_con, second_provider_con), - ) - - self.check_concerns_of_control_objects() + check_concerns_after_mapping() with self.subTest("Mapped Turn All OFF"): self.change_mm_via_api(MM.OFF, no_components_s, host_1, host_2, single_c) - - self.check_concerns( - self.cluster, - concerns=( - *cluster_own_cons, - *main_and_single_cons, - no_components_service_own_con, - *all_mapped_hosts_cons, - ), - ) - self.check_concerns(main_s, concerns=(*cluster_own_cons, *main_and_single_cons, *all_mapped_hosts_cons)) - self.check_concerns( - single_c, - concerns=(*cluster_own_cons, *main_and_single_cons, provider_config_con, host_1_config_con), - ) - self.check_concerns(free_c, concerns=(*cluster_own_cons, main_service_own_con, *all_mapped_hosts_cons)) - self.check_concerns(no_components_s, concerns=(*cluster_own_cons, no_components_service_own_con)) - - self.check_concerns( - host_1, concerns=(*cluster_own_cons, *main_and_single_cons, host_1_config_con, provider_config_con) - ) - self.check_concerns( - host_2, - concerns=(*cluster_own_cons, main_service_own_con, provider_config_con), - ) - self.check_concerns(unmapped_host, concerns=(provider_config_con, unmapped_host_con)) - self.check_concerns( - host_no_concerns, - concerns=(*cluster_own_cons, main_service_own_con, second_provider_con), - ) - - self.check_concerns_of_control_objects() + check_concerns_after_mapping() def test_concern_removal_with_flag_autogeneration_on_config_change(self) -> None: # prepare @@ -959,113 +856,133 @@ def test_concern_removal_with_flag_autogeneration_on_config_change(self) -> None cluster=self.cluster, entries=((host_1, single_c), (host_1, free_c), (host_2, free_c), (another_host, free_c)), ) - self.change_mm_via_api(MM.ON, host_2, single_c) + self.change_mm_via_api(MM.ON, host_2, single_c) # ADCM-5882: MM should not affect concerns # find own concerns - expected = {} - - no_components_s_own_con = no_components_s.get_own_issue(ConcernCause.IMPORT) - expected["main_s_own_con"] = main_s.get_own_issue(ConcernCause.CONFIG) - expected["cluster_own_cons"] = tuple( + no_components_s_own_concern = no_components_s.get_own_issue(ConcernCause.IMPORT) + main_s_own_concern = main_s.get_own_issue(ConcernCause.CONFIG) + cluster_own_concerns = tuple( ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) ) - expected["single_c_con"] = single_c.get_own_issue(ConcernCause.CONFIG) - - def check_concerns(): - mapped_hosts_concerns = (*expected["host_1_concerns"], *expected["another_host_concerns"]) - self.check_concerns( - self.cluster, - concerns=( - *expected["cluster_own_cons"], - expected["main_s_own_con"], - no_components_s_own_con, - *mapped_hosts_concerns, - ), + single_c_concern = single_c.get_own_issue(ConcernCause.CONFIG) + host_1_concern = host_1.get_own_issue(ConcernCause.CONFIG) + host_2_concern = host_2.get_own_issue(ConcernCause.CONFIG) + another_host_concern = another_host.get_own_issue(ConcernCause.CONFIG) + provider_concern = self.provider.get_own_issue(ConcernCause.CONFIG) + another_provider_concern = another_provider.get_own_issue(ConcernCause.CONFIG) + + expected_concerns = {} + + def _update_expected_concerns(): + mapped_hosts_concerns = ( + host_1_concern, + host_2_concern, + provider_concern, + another_host_concern, + another_provider_concern, ) - self.check_concerns(no_components_s, concerns=(*expected["cluster_own_cons"], no_components_s_own_con)) - self.check_concerns( - main_s, concerns=(*expected["cluster_own_cons"], expected["main_s_own_con"], *mapped_hosts_concerns) + + expected_concerns["cluster"] = ( + *cluster_own_concerns, + main_s_own_concern, + no_components_s_own_concern, + single_c_concern, + *mapped_hosts_concerns, ) - self.check_concerns( - free_c, concerns=(*expected["cluster_own_cons"], expected["main_s_own_con"], *mapped_hosts_concerns) + expected_concerns["no_components_s"] = (*cluster_own_concerns, no_components_s_own_concern) + expected_concerns["main_s"] = ( + *cluster_own_concerns, + main_s_own_concern, + single_c_concern, + *mapped_hosts_concerns, ) - self.check_concerns( - single_c, - concerns=( - *expected["cluster_own_cons"], - expected["main_s_own_con"], - expected["single_c_con"], - *expected["host_1_concerns"], - ), + expected_concerns["free_c"] = (*cluster_own_concerns, main_s_own_concern, *mapped_hosts_concerns) + expected_concerns["single_c"] = ( + *cluster_own_concerns, + main_s_own_concern, + single_c_concern, + host_1_concern, + provider_concern, ) - - self.check_concerns( - host_1, - concerns=(*expected["cluster_own_cons"], expected["main_s_own_con"], *expected["host_1_concerns"]), + expected_concerns["host_1"] = ( + *cluster_own_concerns, + main_s_own_concern, + single_c_concern, + host_1_concern, + provider_concern, ) - self.check_concerns( - host_2, - concerns=( - *expected["cluster_own_cons"], - expected["main_s_own_con"], - *self.get_config_issues_of(host_2, self.provider), - ), + expected_concerns["host_2"] = (*cluster_own_concerns, main_s_own_concern, host_2_concern, provider_concern) + expected_concerns["another_host"] = ( + *cluster_own_concerns, + main_s_own_concern, + another_host_concern, + another_provider_concern, ) - self.check_concerns( - another_host, - concerns=( - *expected["cluster_own_cons"], - expected["main_s_own_con"], - *expected["another_host_concerns"], - ), - ) - self.check_concerns(unmapped_host, concerns=self.get_config_issues_of(unmapped_host, self.provider)) - self.check_concerns(self.provider, concerns=self.get_config_issues_of(self.provider)) - self.check_concerns(another_provider, concerns=self.get_config_flags_of(another_provider)) + expected_concerns["unmapped_host"] = self.get_config_issues_of(unmapped_host, self.provider) + expected_concerns["provider"] = (provider_concern,) + expected_concerns["another_provider"] = (another_provider_concern,) + + def check_concerns(): + self.check_concerns(self.cluster, concerns=expected_concerns["cluster"]) + self.check_concerns(no_components_s, concerns=expected_concerns["no_components_s"]) + self.check_concerns(main_s, concerns=expected_concerns["main_s"]) + self.check_concerns(free_c, concerns=expected_concerns["free_c"]) + self.check_concerns(single_c, concerns=expected_concerns["single_c"]) + + self.check_concerns(host_1, concerns=expected_concerns["host_1"]) + self.check_concerns(host_2, concerns=expected_concerns["host_2"]) + self.check_concerns(another_host, concerns=expected_concerns["another_host"]) + self.check_concerns(unmapped_host, concerns=expected_concerns["unmapped_host"]) + self.check_concerns(self.provider, concerns=expected_concerns["provider"]) + self.check_concerns(another_provider, concerns=expected_concerns["another_provider"]) self.check_concerns_of_control_objects() # test - self.change_config_via_api(another_provider) - - expected["host_1_concerns"] = self.get_config_issues_of(host_1, self.provider) - expected["another_host_concerns"] = ( - *self.get_config_issues_of(another_host), - *self.get_config_flags_of(another_provider), - ) + with self.subTest("Initial concerns"): + _update_expected_concerns() + check_concerns() with self.subTest("Change HostProvider Config"): - check_concerns() + self.change_config_via_api(another_provider) - self.change_config_via_api(host_1) + another_provider_concern = self.get_config_flags_of(another_provider)[0] + _update_expected_concerns() - expected["host_1_concerns"] = (*self.get_config_issues_of(self.provider), *self.get_config_flags_of(host_1)) - expected["another_host_concerns"] = ( - *self.get_config_issues_of(another_host), - *self.get_config_flags_of(another_provider), - ) + check_concerns() with self.subTest("Change Host Config"): - check_concerns() + self.change_config_via_api(host_1) - self.change_config_via_api(single_c) - expected["single_c_con"] = self.get_config_flags_of(single_c)[0] + host_1_concern = self.get_config_flags_of(host_1)[0] + _update_expected_concerns() - with self.subTest("Change Component in MM Config"): check_concerns() - self.change_config_via_api(self.cluster) - expected["cluster_own_cons"] = tuple( - ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) - ) + with self.subTest("Change Component in MM Config"): + self.change_config_via_api(single_c) + + single_c_concern = self.get_config_flags_of(single_c)[0] + _update_expected_concerns() - with self.subTest("Change Cluster Config"): check_concerns() - self.change_config_via_api(main_s) - expected["main_s_own_con"] = self.get_config_flags_of(main_s)[0] + with self.subTest("Change Cluster Config"): + self.change_config_via_api(self.cluster) + + cluster_own_concerns = tuple( + ConcernItem.objects.filter(owner_id=self.cluster.id, owner_type=Cluster.class_content_type) + ) + _update_expected_concerns() + + check_concerns() with self.subTest("Change Service Config"): + self.change_config_via_api(main_s) + + main_s_own_concern = self.get_config_flags_of(main_s)[0] + _update_expected_concerns() + check_concerns() def test_concerns_changes_on_import(self) -> None: diff --git a/python/cm/hierarchy.py b/python/cm/hierarchy.py index e21503025d..4ed2737f60 100644 --- a/python/cm/hierarchy.py +++ b/python/cm/hierarchy.py @@ -16,7 +16,6 @@ ClusterObject, Host, HostComponent, - MaintenanceMode, ServiceComponent, ) @@ -164,16 +163,13 @@ def _build_tree_up(self, node: Node) -> None: if node.type == "cluster": parent_values = [None] elif node.type == "service": - parent_values = [node.value.cluster] if node.value.maintenance_mode == MaintenanceMode.OFF else [] + parent_values = [node.value.cluster] elif node.type == "component": - parent_values = [node.value.service] if node.value.maintenance_mode == MaintenanceMode.OFF else [] + parent_values = [node.value.service] elif node.type == "host": parent_values = [ hc.component - for hc in HostComponent.objects.filter(host=node.value) - .exclude(host__maintenance_mode=MaintenanceMode.ON) - .select_related("component") - .order_by("id") + for hc in HostComponent.objects.filter(host=node.value).select_related("component").order_by("id") ] elif node.type == "provider": parent_values = Host.objects.filter(provider=node.value).order_by("id") diff --git a/python/cm/services/concern/distribution.py b/python/cm/services/concern/distribution.py index bb20d48a24..2906e780e4 100644 --- a/python/cm/services/concern/distribution.py +++ b/python/cm/services/concern/distribution.py @@ -10,18 +10,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import defaultdict, deque +from collections import defaultdict from copy import copy from itertools import chain from operator import itemgetter from typing import Iterable, TypeAlias -from core.cluster.operations import ( - calculate_maintenance_mode_for_cluster_objects, - calculate_maintenance_mode_for_component, - calculate_maintenance_mode_for_service, -) -from core.cluster.types import ClusterTopology, ObjectMaintenanceModeState +from core.cluster.types import ClusterTopology from core.types import ( ADCMCoreType, ClusterID, @@ -47,7 +42,6 @@ HostProvider, ServiceComponent, ) -from cm.services.cluster import retrieve_clusters_objects_maintenance_mode # PUBLIC redistribute_issues_and_flags @@ -86,12 +80,7 @@ def redistribute_issues_and_flags(topology: ClusterTopology) -> None: topology=topology, objects_concerns=objects_concerns ) - # Step #3. Remove concerns from objects in MM - concern_links = _drop_concerns_from_objects_in_mm( - topology=topology, concern_links=concern_links, provider_host_map=provider_host_ids_mapping - ) - - # Step #4. Link objects to concerns + # Step #3. Link objects to concerns _relink_concerns_to_objects_in_db( concern_links=concern_links, topology_objects=topology_objects, @@ -162,63 +151,6 @@ def _calculate_concerns_distribution_for_topology( return concern_links -def _drop_concerns_from_objects_in_mm( - topology: ClusterTopology, concern_links: AffectedObjectConcernMap, provider_host_map: ProviderHostMap -) -> AffectedObjectConcernMap: - mm_of_objects = calculate_maintenance_mode_for_cluster_objects( - topology=topology, - own_maintenance_mode=retrieve_clusters_objects_maintenance_mode(cluster_ids=(topology.cluster_id,)), - ) - - hosts_in_mm = set(_keep_objects_in_mm(mm_of_objects.hosts)) - - objects_in_mm_own_concerns: OwnObjectConcernMap = _get_own_concerns_of_objects( - with_types=(ConcernType.ISSUE, ConcernType.FLAG), - hosts=hosts_in_mm, - services=_keep_objects_in_mm(mm_of_objects.services), - components=_keep_objects_in_mm(mm_of_objects.components), - ) - - if not objects_in_mm_own_concerns: - return concern_links - - unmapped_hosts = topology.unmapped_hosts - hostprovider_concerns_to_unlink = set() - hostproviders_to_exclude = deque() - for hostprovider_id, hosts in provider_host_map.items(): - # If all mapped hosts are in MM, then HP concerns should be removed from all objects that aren't hosts. - # If at least one mapped host is not in MM, concerns should be passed in a regular way. - mapped_hosts = hosts - unmapped_hosts - if mapped_hosts and mapped_hosts.issubset(hosts_in_mm): - hostproviders_to_exclude.append(hostprovider_id) - - if hostproviders_to_exclude: - hostprovider_concerns_to_unlink |= set( - chain.from_iterable( - _get_own_concerns_of_objects( - with_types=(ConcernType.ISSUE, ConcernType.FLAG), hostproviders=hostproviders_to_exclude - ) - .get(ADCMCoreType.HOSTPROVIDER, {}) - .values() - ) - ) - - own_concerns_to_keep: OwnObjectConcernMap = defaultdict(lambda: defaultdict(set)) - concerns_to_unlink: set[int] = copy(hostprovider_concerns_to_unlink) - - for core_type, concern_dict in objects_in_mm_own_concerns.items(): - hostprovider_concerns_to_keep = set() if core_type != ADCMCoreType.HOST else hostprovider_concerns_to_unlink - for object_id, concerns in concern_dict.items(): - own_concerns_to_keep[core_type][object_id] = concerns | hostprovider_concerns_to_keep - concerns_to_unlink |= concerns - - for core_type, concern_dict in concern_links.items(): - for object_id, concerns in concern_dict.items(): - concern_dict[object_id] = concerns - (concerns_to_unlink - own_concerns_to_keep[core_type][object_id]) - - return concern_links - - def _relink_concerns_to_objects_in_db( concern_links: dict[ADCMCoreType, dict[ObjectID, set[int]]], topology_objects: TopologyObjectMap, @@ -277,7 +209,7 @@ def distribute_concern_on_related_objects(owner: CoreObjectDescriptor, concern_i def _find_concern_distribution_targets(owner: CoreObjectDescriptor) -> ConcernRelatedObjects: """ - Find objects that should be affected by appeared concern on given objects considering HC and MM. + Find objects that should be affected by appeared concern on given objects considering HC. """ targets: ConcernRelatedObjects = defaultdict(set) @@ -296,63 +228,33 @@ def _find_concern_distribution_targets(owner: CoreObjectDescriptor) -> ConcernRe ) case ADCMCoreType.SERVICE: - hosts_info = HostComponent.objects.values_list("host_id", "host__maintenance_mode").filter( - service_id=owner.id - ) - components_info = ServiceComponent.objects.values_list("id", "_maintenance_mode").filter( - service_id=owner.id + targets[ADCMCoreType.HOST] |= set( + HostComponent.objects.values_list("host_id", flat=True).filter(service_id=owner.id) ) - - raw_own_mm, cluster_id = ClusterObject.objects.values_list("_maintenance_mode", "cluster_id").get( - id=owner.id + targets[ADCMCoreType.COMPONENT] |= set( + ServiceComponent.objects.values_list("id", flat=True).filter(service_id=owner.id) ) - - own_mm = calculate_maintenance_mode_for_service( - own_mm=ObjectMaintenanceModeState(raw_own_mm), - service_components_own_mm=( - ObjectMaintenanceModeState(component_mm) for _, component_mm in components_info - ), - service_hosts_mm=(ObjectMaintenanceModeState(host_mm) for _, host_mm in hosts_info), + targets[ADCMCoreType.CLUSTER].add( + ClusterObject.objects.values_list("cluster_id", flat=True).get(id=owner.id) ) - if own_mm == ObjectMaintenanceModeState.OFF: - targets[ADCMCoreType.CLUSTER].add(cluster_id) - targets[ADCMCoreType.COMPONENT].update(map(itemgetter(0), components_info)) - targets[ADCMCoreType.HOST].update(map(itemgetter(0), hosts_info)) - case ADCMCoreType.COMPONENT: - raw_own_mm, cluster_id, service_id, service_raw_own_mm = ServiceComponent.objects.values_list( - "_maintenance_mode", "cluster_id", "service_id", "service___maintenance_mode" - ).get(id=owner.id) - - hosts_info = HostComponent.objects.values_list("host_id", "host__maintenance_mode").filter( - component_id=owner.id - ) + cluster_id, service_id = ServiceComponent.objects.values_list("cluster_id", "service_id").get(id=owner.id) - own_mm = calculate_maintenance_mode_for_component( - own_mm=ObjectMaintenanceModeState(raw_own_mm), - service_mm=ObjectMaintenanceModeState(service_raw_own_mm), - component_hosts_mm=(ObjectMaintenanceModeState(host_mm) for _, host_mm in hosts_info), + targets[ADCMCoreType.CLUSTER].add(cluster_id) + targets[ADCMCoreType.SERVICE].add(service_id) + targets[ADCMCoreType.HOST] |= set( + HostComponent.objects.values_list("host_id", flat=True).filter(component_id=owner.id) ) - if own_mm == ObjectMaintenanceModeState.OFF: - targets[ADCMCoreType.CLUSTER].add(cluster_id) - targets[ADCMCoreType.SERVICE].add(service_id) - targets[ADCMCoreType.HOST].update(map(itemgetter(0), hosts_info)) - case ADCMCoreType.HOST: - own_mm = ObjectMaintenanceModeState( - Host.objects.values_list("maintenance_mode", flat=True).get(id=owner.id) + hc_records = tuple( + HostComponent.objects.values("cluster_id", "service_id", "component_id").filter(host_id=owner.id) ) - - if own_mm == ObjectMaintenanceModeState.OFF: - hc_records = tuple( - HostComponent.objects.values("cluster_id", "service_id", "component_id").filter(host_id=owner.id) - ) - if hc_records: - targets[ADCMCoreType.CLUSTER].add(hc_records[0]["cluster_id"]) - targets[ADCMCoreType.SERVICE].update(map(itemgetter("service_id"), hc_records)) - targets[ADCMCoreType.COMPONENT].update(map(itemgetter("component_id"), hc_records)) + if hc_records: + targets[ADCMCoreType.CLUSTER].add(hc_records[0]["cluster_id"]) + targets[ADCMCoreType.SERVICE].update(map(itemgetter("service_id"), hc_records)) + targets[ADCMCoreType.COMPONENT].update(map(itemgetter("component_id"), hc_records)) case ADCMCoreType.HOSTPROVIDER: targets[ADCMCoreType.HOST] |= set(Host.objects.values_list("id", flat=True).filter(provider_id=owner.id)) @@ -389,10 +291,6 @@ def _add_concern_links_to_objects_in_db(targets: ConcernRelatedObjects, concern_ # PROTECTED generic-purpose methods -def _keep_objects_in_mm(entries: dict[int, ObjectMaintenanceModeState]) -> Iterable[int]: - return (id_ for id_, mm in entries.items() if mm != ObjectMaintenanceModeState.OFF) - - def _get_own_concerns_of_objects( with_types: Iterable[ConcernType], *, diff --git a/python/cm/services/maintenance_mode.py b/python/cm/services/maintenance_mode.py index a2c93be4fd..18e3a6cc42 100644 --- a/python/cm/services/maintenance_mode.py +++ b/python/cm/services/maintenance_mode.py @@ -18,17 +18,12 @@ from cm.models import ( Action, ClusterObject, - ConcernItem, - ConcernType, Host, HostComponent, MaintenanceMode, Prototype, ServiceComponent, ) -from cm.services.cluster import retrieve_clusters_topology -from cm.services.concern.distribution import redistribute_issues_and_flags -from cm.services.concern.flags import update_hierarchy from cm.services.job.action import ActionRunPayload, run_action from cm.services.status.notify import reset_objects_in_mm from cm.status_api import send_object_update_event @@ -52,16 +47,6 @@ def _change_mm_via_action( return serializer -def _update_mm_hierarchy_issues(obj: Host | ClusterObject | ServiceComponent) -> None: - redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((obj.cluster_id,)))) - reset_objects_in_mm() - - -def _update_flags() -> None: - for flag in ConcernItem.objects.filter(type=ConcernType.FLAG): - update_hierarchy(concern=flag) - - def get_maintenance_mode_response( obj: Host | ClusterObject | ServiceComponent, serializer: Serializer, @@ -134,7 +119,7 @@ def get_maintenance_mode_response( serializer.validated_data["maintenance_mode"] = MaintenanceMode.ON serializer.save() - _update_mm_hierarchy_issues(obj=obj) + reset_objects_in_mm() send_object_update_event(object_=obj, changes={"maintenanceMode": obj.maintenance_mode}) return Response() @@ -162,7 +147,7 @@ def get_maintenance_mode_response( serializer.validated_data["maintenance_mode"] = MaintenanceMode.OFF serializer.save() - _update_mm_hierarchy_issues(obj=obj) + reset_objects_in_mm() send_object_update_event(object_=obj, changes={"maintenanceMode": obj.maintenance_mode}) return Response() From 59fc34a3f875852b97529788ac013e998690c064 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 30 Jul 2024 12:28:45 +0000 Subject: [PATCH 37/98] ADCM-5796 Rework `check_hc` function --- python/cm/api.py | 2 +- python/cm/services/cluster.py | 5 +- python/cm/services/concern/cases.py | 51 ++++---- python/cm/services/concern/checks.py | 189 ++++++++++++++++++++++++++- 4 files changed, 216 insertions(+), 31 deletions(-) diff --git a/python/cm/api.py b/python/cm/api.py index 9873bdbe34..18586e8423 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -173,7 +173,7 @@ def add_host(prototype: Prototype, provider: HostProvider, fqdn: str, descriptio if concerns := recalculate_own_concerns_on_add_hosts(host): # TODO: redistribute only new issues. See ADCM-5798 distribute_concern_on_related_objects( owner=CoreObjectDescriptor(id=host.id, type=ADCMCoreType.HOST), - concern_id=concerns[ADCMCoreType.HOST][host.id], + concern_id=next(iter(concerns[ADCMCoreType.HOST][host.id])), ) if concern := provider.get_own_issue(ConcernCause.CONFIG): host.concerns.add(concern) diff --git a/python/cm/services/cluster.py b/python/cm/services/cluster.py index bc505be6cd..05d561dd61 100644 --- a/python/cm/services/cluster.py +++ b/python/cm/services/cluster.py @@ -101,14 +101,15 @@ def perform_host_to_cluster_map( # this import should be resolved later: # concerns management should be passed in here the same way as `status_service`, # because it's a dependency that shouldn't be directly set - from cm.issue import check_hc, create_issue + from cm.issue import create_issue + from cm.services.concern.checks import cluster_mapping_has_issue from cm.services.concern.distribution import distribute_concern_on_related_objects with atomic(): add_hosts_to_cluster(cluster_id=cluster_id, hosts=hosts, db=ClusterDB) cluster = Cluster.objects.get(id=cluster_id) - if check_hc(cluster=cluster): + if not cluster_mapping_has_issue(cluster=cluster): delete_issue( owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), cause=ConcernCause.HOSTCOMPONENT ) diff --git a/python/cm/services/concern/cases.py b/python/cm/services/concern/cases.py index bdc32da063..5bf8e10d71 100644 --- a/python/cm/services/concern/cases.py +++ b/python/cm/services/concern/cases.py @@ -17,10 +17,11 @@ from core.types import ADCMCoreType, CoreObjectDescriptor from django.contrib.contenttypes.models import ContentType -from cm.issue import check_hc, check_required_services, create_issue +from cm.issue import create_issue from cm.models import Cluster, ClusterObject, ConcernCause, ConcernItem, ConcernType, Host, ServiceComponent from cm.services.concern import delete_issue from cm.services.concern.checks import ( + cluster_mapping_has_issue, object_configuration_has_issue, object_has_required_services_issue, object_imports_has_issue, @@ -33,14 +34,14 @@ def recalculate_own_concerns_on_add_clusters(cluster: Cluster) -> OwnObjectConce new_concerns: OwnObjectConcernMap = defaultdict(lambda: defaultdict(set)) cluster_checks = ( - (ConcernCause.CONFIG, lambda obj: not object_configuration_has_issue(obj)), - (ConcernCause.IMPORT, lambda obj: not object_imports_has_issue(obj)), - (ConcernCause.HOSTCOMPONENT, check_hc), - (ConcernCause.SERVICE, lambda obj: not object_has_required_services_issue(obj)), + (ConcernCause.CONFIG, object_configuration_has_issue), + (ConcernCause.IMPORT, object_imports_has_issue), + (ConcernCause.HOSTCOMPONENT, cluster_mapping_has_issue), + (ConcernCause.SERVICE, object_has_required_services_issue), ) - for cause, check in cluster_checks: - if not check(cluster): + for cause, has_issue in cluster_checks: + if has_issue(cluster): issue = create_issue(obj=cluster, issue_cause=cause) new_concerns[ADCMCoreType.CLUSTER][cluster.pk].add(issue.pk) @@ -53,18 +54,18 @@ def recalculate_own_concerns_on_add_services( new_concerns: OwnObjectConcernMap = defaultdict(lambda: defaultdict(set)) # create new concerns - if not check_hc(cluster=cluster) and cluster.get_own_issue(cause=ConcernCause.HOSTCOMPONENT) is None: + if cluster_mapping_has_issue(cluster=cluster) and cluster.get_own_issue(cause=ConcernCause.HOSTCOMPONENT) is None: issue = create_issue(obj=cluster, issue_cause=ConcernCause.HOSTCOMPONENT) new_concerns[ADCMCoreType.CLUSTER][cluster.pk].add(issue.pk) service_checks = ( - (ConcernCause.CONFIG, lambda obj: not object_configuration_has_issue(obj)), - (ConcernCause.IMPORT, lambda obj: not object_imports_has_issue(obj)), - (ConcernCause.REQUIREMENT, lambda obj: not service_requirements_has_issue(obj)), + (ConcernCause.CONFIG, object_configuration_has_issue), + (ConcernCause.IMPORT, object_imports_has_issue), + (ConcernCause.REQUIREMENT, service_requirements_has_issue), ) for service in services: - for concern_cause, func in service_checks: - if not func(service): + for concern_cause, has_issue in service_checks: + if has_issue(service): issue = create_issue(obj=service, issue_cause=concern_cause) new_concerns[ADCMCoreType.SERVICE][service.pk].add(issue.pk) @@ -89,17 +90,17 @@ def recalculate_own_concerns_on_add_services( def recalculate_own_concerns_on_add_hosts(host: Host) -> OwnObjectConcernMap: if object_configuration_has_issue(host): issue = create_issue(obj=host, issue_cause=ConcernCause.CONFIG) - return {ADCMCoreType.HOST: {host.id: issue.id}} + return {ADCMCoreType.HOST: {host.id: {issue.id}}} return {} def recalculate_concerns_on_cluster_upgrade(cluster: Cluster) -> None: cluster_checks = ( - (ConcernCause.CONFIG, lambda obj: not object_configuration_has_issue(obj)), - (ConcernCause.IMPORT, lambda obj: not object_imports_has_issue(obj)), - (ConcernCause.HOSTCOMPONENT, check_hc), - (ConcernCause.SERVICE, check_required_services), + (ConcernCause.CONFIG, object_configuration_has_issue), + (ConcernCause.IMPORT, object_imports_has_issue), + (ConcernCause.HOSTCOMPONENT, cluster_mapping_has_issue), + (ConcernCause.SERVICE, object_has_required_services_issue), ) existing_cluster_concern_causes = set( @@ -111,17 +112,17 @@ def recalculate_concerns_on_cluster_upgrade(cluster: Cluster) -> None: ) ) - for cause, check in cluster_checks: + for cause, has_issue in cluster_checks: if cause in existing_cluster_concern_causes: continue - if not check(cluster): + if has_issue(cluster): create_issue(obj=cluster, issue_cause=cause) service_checks = ( - (ConcernCause.CONFIG, lambda obj: not object_configuration_has_issue(obj)), - (ConcernCause.IMPORT, lambda obj: not object_imports_has_issue(obj)), - (ConcernCause.REQUIREMENT, lambda obj: not service_requirements_has_issue(obj)), + (ConcernCause.CONFIG, object_configuration_has_issue), + (ConcernCause.IMPORT, object_imports_has_issue), + (ConcernCause.REQUIREMENT, service_requirements_has_issue), ) services = tuple(ClusterObject.objects.select_related("prototype").filter(cluster=cluster)) @@ -134,11 +135,11 @@ def recalculate_concerns_on_cluster_upgrade(cluster: Cluster) -> None: ) ) for service in services: - for concern_cause, func in service_checks: + for concern_cause, has_issue in service_checks: if (service.id, concern_cause) in existing_service_concern_causes: continue - if not func(service): + if has_issue(service): create_issue(obj=service, issue_cause=concern_cause) components_with_config_concerns = set( diff --git a/python/cm/services/concern/checks.py b/python/cm/services/concern/checks.py index 856846e11c..6cbcc6bda8 100644 --- a/python/cm/services/concern/checks.py +++ b/python/cm/services/concern/checks.py @@ -11,11 +11,14 @@ # limitations under the License. from collections import deque +from functools import partial from operator import attrgetter -from typing import Iterable, Literal, NamedTuple, TypeAlias +from typing import Callable, Iterable, Literal, NamedTuple, TypeAlias -from core.types import ClusterID, ConfigID, ObjectID +from core.cluster.types import ServiceTopology +from core.types import ClusterID, ComponentID, ConfigID, HostID, ObjectID, PrototypeID, ServiceID from django.db.models import Q +from typing_extensions import Self from cm.models import ( Cluster, @@ -24,15 +27,19 @@ Host, HostProvider, ObjectConfig, + ObjectType, Prototype, PrototypeImport, ServiceComponent, ) +from cm.services.cluster import retrieve_clusters_topology from cm.services.config import retrieve_config_attr_pairs from cm.services.config.spec import FlatSpec, retrieve_flat_spec_for_objects ObjectWithConfig: TypeAlias = Cluster | ClusterObject | ServiceComponent | HostProvider | Host HasIssue: TypeAlias = bool +RequiresEntry: TypeAlias = dict[Literal["service", "component"], str] +ConstraintDBFormat: TypeAlias = tuple[str] | tuple[int | str, int | str] class MissingRequirement(NamedTuple): @@ -40,6 +47,64 @@ class MissingRequirement(NamedTuple): name: str +class Constraint(NamedTuple): + internal: ConstraintDBFormat + checks: tuple[Callable[[int, int], bool], ...] + + @classmethod + def from_db_repr(cls, constraint: ConstraintDBFormat) -> Self: + match constraint: + case [0, "+"]: + # no checks actually required, it's the "default" + checks = () + case ["+"]: + checks = (check_on_all,) + case ["odd"]: + checks = (check_is_odd,) + case [int(exact)]: + checks = (partial(check_exact, argument=exact),) + case [int(min_), "odd"]: + checks = (partial(check_equal_or_greater, argument=min_), check_is_odd) + case [int(min_), "+"]: + checks = (partial(check_equal_or_greater, argument=min_),) + case [int(min_), int(max_)]: + checks = (partial(check_equal_or_greater, argument=min_), partial(check_equal_or_less, argument=max_)) + case _: + checks = () + + return Constraint(internal=constraint, checks=checks) + + def is_met_for(self, mapped_hosts: int, hosts_in_cluster: int) -> bool: + return all(check(mapped_hosts, hosts_in_cluster) for check in self.checks) + + +class ServiceExternalRequirement(NamedTuple): + name: str + + +class ComponentExternalRequirement(NamedTuple): + name: str + service_name: str + + +class ComponentMappingRequirements(NamedTuple): + constraint: Constraint + requires: tuple[ServiceExternalRequirement | ComponentExternalRequirement, ...] + bound_to: ComponentExternalRequirement | None + + @property + def is_constraint_check_required(self) -> bool: + return len(self.constraint.checks) > 0 + + @property + def is_requires_check_required(self) -> bool: + return len(self.requires) > 0 + + @property + def is_bound_to_check_required(self) -> bool: + return self.bound_to is not None + + def object_configuration_has_issue(target: ObjectWithConfig) -> HasIssue: config_spec = next(iter(retrieve_flat_spec_for_objects(prototypes=(target.prototype_id,)).values()), None) if not config_spec: @@ -122,7 +187,7 @@ def service_requirements_has_issue(service: ClusterObject) -> HasIssue: def find_unsatisfied_requirements( - cluster_id: ClusterID, requires: list[dict[Literal["service", "component"], str]] + cluster_id: ClusterID, requires: list[RequiresEntry] ) -> tuple[MissingRequirement, ...]: if not requires: return () @@ -155,3 +220,121 @@ def find_unsatisfied_requirements( missing_requirements.append(MissingRequirement(type="component", name=missing_component_name)) return tuple(missing_requirements) + + +def cluster_mapping_has_issue(cluster: Cluster) -> HasIssue: + """ + Checks: + - requires (components only) + - constraint + - bound_to + """ + + # extract requirements + + bundle_id = cluster.prototype.bundle_id + + requirements_from_components: dict[PrototypeID, ComponentMappingRequirements] = {} + + for prototype_id, constraint, requires, bound_to in Prototype.objects.values_list( + "id", "constraint", "requires", "bound_to" + ).filter(bundle_id=bundle_id, type=ObjectType.COMPONENT): + prepared_requires = deque() + for requirement in requires: + service_name = requirement["service"] + if component_name := requirement.get("component"): + prepared_requires.append(ComponentExternalRequirement(name=component_name, service_name=service_name)) + else: + prepared_requires.append(ServiceExternalRequirement(name=service_name)) + + requirements_from_components[prototype_id] = ComponentMappingRequirements( + constraint=Constraint.from_db_repr(constraint), + requires=tuple(prepared_requires), + bound_to=ComponentExternalRequirement(name=bound_to["component"], service_name=bound_to["service"]) + if bound_to + else None, + ) + + # prepare data for check + + topology = next(retrieve_clusters_topology((cluster.id,))) + + component_prototype_map: dict[ComponentID, tuple[PrototypeID, ServiceID]] = {} + existing_objects_map: dict[ComponentExternalRequirement | ServiceExternalRequirement, ComponentID | ServiceID] = { + ServiceExternalRequirement(name=service_name): service_id + for service_id, service_name in ClusterObject.objects.values_list("id", "prototype__name").filter( + cluster=cluster + ) + } + + for component_id, prototype_id, service_id, component_name, service_name in ServiceComponent.objects.values_list( + "id", "prototype_id", "service_id", "prototype__name", "service__prototype__name" + ).filter(id__in=topology.component_ids): + component_prototype_map[component_id] = (prototype_id, service_id) + existing_objects_map[ + ComponentExternalRequirement(name=component_name, service_name=service_name) + ] = component_id + + hosts_amount = len(topology.hosts) + + existing_objects = set(existing_objects_map.keys()) + + # run checks + + for component_id, (prototype_id, service_id) in component_prototype_map.items(): + requirements = requirements_from_components[prototype_id] + + if requirements.is_requires_check_required and not existing_objects.issuperset(requirements.requires): + return True + + if requirements.is_constraint_check_required and not requirements.constraint.is_met_for( + mapped_hosts=len(topology.services[service_id].components[component_id].hosts), + hosts_in_cluster=hosts_amount, + ): + return True + + if requirements.is_bound_to_check_required: + bound_component_id = existing_objects_map.get(requirements.bound_to) + if not bound_component_id: + return True + + service_id_of_bound_component = existing_objects_map.get( + ServiceExternalRequirement(name=requirements.bound_to.service_name) + ) + if not service_id_of_bound_component: + return True + + bound_service_topology: ServiceTopology | None = topology.services.get(service_id_of_bound_component) + if not bound_service_topology: + return True + + bound_component_hosts: set[HostID] = set(bound_service_topology.components[bound_component_id].hosts) + current_component_hosts: set[HostID] = set(topology.services[service_id].components[component_id].hosts) + + if bound_component_hosts != current_component_hosts: + return True + + return False + + +# constraint check functions + + +def check_equal_or_less(mapped_hosts: int, _: int, argument: int): + return mapped_hosts <= argument + + +def check_equal_or_greater(mapped_hosts: int, _: int, argument: int): + return mapped_hosts >= argument + + +def check_exact(mapped_hosts: int, _: int, argument: int): + return mapped_hosts == argument + + +def check_is_odd(mapped_hosts: int, _: int): + return mapped_hosts % 2 == 1 + + +def check_on_all(mapped_hosts: int, hosts_in_cluster: int): + return mapped_hosts > 0 and mapped_hosts == hosts_in_cluster From 163cd40c0e01393014d18ca9a729d6c55c50e1e9 Mon Sep 17 00:00:00 2001 From: Egor Araslanov Date: Thu, 1 Aug 2024 11:36:23 +0500 Subject: [PATCH 38/98] ADCM-5680 Post-merge fixes + API documentation --- python/api_v2/generic/action/views.py | 1 - python/api_v2/generic/action_host_group/api_schema.py | 8 ++++++++ python/api_v2/generic/action_host_group/views.py | 2 +- python/api_v2/generic/group_config/api_schema.py | 7 +++++++ python/api_v2/generic/group_config/views.py | 3 +-- 5 files changed, 17 insertions(+), 4 deletions(-) diff --git a/python/api_v2/generic/action/views.py b/python/api_v2/generic/action/views.py index 8ee2cc728a..14f9401727 100644 --- a/python/api_v2/generic/action/views.py +++ b/python/api_v2/generic/action/views.py @@ -15,7 +15,6 @@ from adcm.mixins import GetParentObjectMixin from cm.errors import AdcmEx from cm.models import ( - ADCM, Action, ADCMEntity, ConcernType, diff --git a/python/api_v2/generic/action_host_group/api_schema.py b/python/api_v2/generic/action_host_group/api_schema.py index 4aefb4e673..2bde96abf8 100644 --- a/python/api_v2/generic/action_host_group/api_schema.py +++ b/python/api_v2/generic/action_host_group/api_schema.py @@ -64,6 +64,14 @@ def document_action_host_group_viewset(object_type: str): description=f"Return list of {object_type}'s hosts that can be added to action host group.", responses=responses(success=ShortHostSerializer(many=True), errors=HTTP_404_NOT_FOUND), ), + owner_host_candidate=extend_schema( + operation_id=f"get{capitalized_type}ActionHostGroupOwnCandidates", + summary=f"GET {object_type}'s host candidates for new Action Host Group", + description=f"Return list of {object_type}'s hosts that can be added to newly created action host group.", + responses=responses( + success=ShortHostSerializer(many=True), errors=(HTTP_403_FORBIDDEN, HTTP_404_NOT_FOUND) + ), + ), ) diff --git a/python/api_v2/generic/action_host_group/views.py b/python/api_v2/generic/action_host_group/views.py index f91dd37b1b..59ff74e5e5 100644 --- a/python/api_v2/generic/action_host_group/views.py +++ b/python/api_v2/generic/action_host_group/views.py @@ -39,7 +39,6 @@ from django.db.models import F, Model, QuerySet from django.db.transaction import atomic from django_filters.rest_framework import DjangoFilterBackend -from drf_spectacular.utils import extend_schema, extend_schema_view from guardian.shortcuts import get_objects_for_user from rbac.models import User from rest_framework.decorators import action @@ -62,6 +61,7 @@ set_group_and_host_names, set_group_and_host_names_from_response, ) +from api_v2.generic.action_host_group.filters import ActionHostGroupFilter from api_v2.generic.action_host_group.serializers import ( ActionHostGroupCreateResultSerializer, ActionHostGroupCreateSerializer, diff --git a/python/api_v2/generic/group_config/api_schema.py b/python/api_v2/generic/group_config/api_schema.py index d8509788ef..695f09ab6b 100644 --- a/python/api_v2/generic/group_config/api_schema.py +++ b/python/api_v2/generic/group_config/api_schema.py @@ -23,6 +23,7 @@ from api_v2.api_schema import ErrorSerializer, responses from api_v2.generic.group_config.serializers import GroupConfigSerializer, HostGroupConfigSerializer +from api_v2.host.serializers import HostShortSerializer def document_group_config_viewset(object_type: str): @@ -65,6 +66,12 @@ def document_group_config_viewset(object_type: str): description=f"Get a list of hosts available for adding to {object_type}'s config group.", responses={HTTP_200_OK: HostGroupConfigSerializer(many=True), HTTP_404_NOT_FOUND: ErrorSerializer}, ), + owner_host_candidates=extend_schema( + operation_id=f"get{capitalized_type}ConfigGroupHostOwnCandidates", + summary=f"GET {object_type}'s host candidates for new config group", + description=f"Get a list of hosts available for adding to {object_type}'s new config group.", + responses={HTTP_200_OK: HostShortSerializer(many=True), HTTP_404_NOT_FOUND: ErrorSerializer}, + ), ) diff --git a/python/api_v2/generic/group_config/views.py b/python/api_v2/generic/group_config/views.py index a90c1da7fa..6ba2065f1d 100644 --- a/python/api_v2/generic/group_config/views.py +++ b/python/api_v2/generic/group_config/views.py @@ -30,12 +30,11 @@ HTTP_204_NO_CONTENT, ) -from api_v2.api_schema import ErrorSerializer from api_v2.generic.config.utils import ConfigSchemaMixin from api_v2.generic.group_config.permissions import GroupConfigHostsPermissions, GroupConfigPermissions from api_v2.generic.group_config.serializers import GroupConfigSerializer, HostGroupConfigSerializer from api_v2.host.filters import HostMemberFilter -from api_v2.host.serializers import HostGroupConfigSerializer, HostShortSerializer +from api_v2.host.serializers import HostAddSerializer, HostShortSerializer from api_v2.views import ADCMGenericViewSet From 891a780e305057519c44c947de5a40f831f686b8 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Fri, 2 Aug 2024 04:23:13 +0000 Subject: [PATCH 39/98] ADCM-5840 Fix concern recalculation errors from integration tests --- python/cm/api.py | 11 +++++++++++ python/cm/services/concern/checks.py | 24 +++++++++++++++++++----- 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/python/cm/api.py b/python/cm/api.py index 18586e8423..aad2453929 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -20,6 +20,7 @@ from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.core.exceptions import MultipleObjectsReturned +from django.db.models import Q from django.db.transaction import atomic, on_commit from rbac.models import Policy, re_apply_object_policy from rbac.roles import apply_policy_for_new_config @@ -248,6 +249,16 @@ def delete_host(host: Host, cancel_tasks: bool = True) -> None: def delete_service(service: ClusterObject) -> None: service_pk = service.pk + + # need to remove concerns of components manually, because they aren't cleared otherwise + ConcernItem.objects.filter( + Q( + owner_id__in=ServiceComponent.objects.values_list("id", flat=True).filter(service=service), + owner_type=ServiceComponent.class_content_type, + ) + | Q(owner_id=service_pk, owner_type=service.content_type) + ).delete() + service.delete() cluster = service.cluster diff --git a/python/cm/services/concern/checks.py b/python/cm/services/concern/checks.py index 6cbcc6bda8..652c907083 100644 --- a/python/cm/services/concern/checks.py +++ b/python/cm/services/concern/checks.py @@ -243,9 +243,8 @@ def cluster_mapping_has_issue(cluster: Cluster) -> HasIssue: for requirement in requires: service_name = requirement["service"] if component_name := requirement.get("component"): + # "service" requirements aren't checked for mapping issue prepared_requires.append(ComponentExternalRequirement(name=component_name, service_name=service_name)) - else: - prepared_requires.append(ServiceExternalRequirement(name=service_name)) requirements_from_components[prototype_id] = ComponentMappingRequirements( constraint=Constraint.from_db_repr(constraint), @@ -284,15 +283,30 @@ def cluster_mapping_has_issue(cluster: Cluster) -> HasIssue: for component_id, (prototype_id, service_id) in component_prototype_map.items(): requirements = requirements_from_components[prototype_id] - if requirements.is_requires_check_required and not existing_objects.issuperset(requirements.requires): - return True - if requirements.is_constraint_check_required and not requirements.constraint.is_met_for( mapped_hosts=len(topology.services[service_id].components[component_id].hosts), hosts_in_cluster=hosts_amount, ): return True + # only mapped components should be checked for requires and bound_to + if not topology.services[service_id].components[component_id].hosts: + continue + + if requirements.is_requires_check_required: + # all required components should be added + if not existing_objects.issuperset(requirements.requires): + return True + + for required_component in requirements.requires: + required_component_id = existing_objects_map[required_component] + required_service_id = existing_objects_map[ + ServiceExternalRequirement(name=required_component.service_name) + ] + # if required component is unmapped - that's mapping issue + if not topology.services[required_service_id].components[required_component_id].hosts: + return True + if requirements.is_bound_to_check_required: bound_component_id = existing_objects_map.get(requirements.bound_to) if not bound_component_id: From 0630363f54b35983a55d299a1ceaabb8da48f351 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Fri, 2 Aug 2024 11:11:49 +0000 Subject: [PATCH 40/98] ADCM-5837: Rework `create_issue` and `get_own_issue` --- python/api_v2/host/utils.py | 5 +- python/cm/api.py | 40 +++++----- python/cm/issue.py | 80 ++------------------ python/cm/services/cluster.py | 14 ++-- python/cm/services/concern/__init__.py | 4 +- python/cm/services/concern/_operaitons.py | 90 ++++++++++++++++++++++- python/cm/services/concern/cases.py | 31 +++++--- python/cm/tests/test_adcm_entity.py | 8 +- python/cm/tests/test_flag.py | 8 +- python/cm/upgrade.py | 18 +++-- 10 files changed, 168 insertions(+), 130 deletions(-) diff --git a/python/api_v2/host/utils.py b/python/api_v2/host/utils.py index 4622a07418..9f576fa436 100644 --- a/python/api_v2/host/utils.py +++ b/python/api_v2/host/utils.py @@ -21,8 +21,10 @@ ) from cm.logger import logger from cm.models import Cluster, Host, HostProvider, ObjectType, Prototype +from cm.services.concern import retrieve_issue from cm.services.maintenance_mode import get_maintenance_mode_response from cm.services.status.notify import reset_hc_map +from core.types import ADCMCoreType, CoreObjectDescriptor from rbac.models import re_apply_object_policy from rest_framework.request import Request from rest_framework.response import Response @@ -47,8 +49,9 @@ def _recheck_new_host_issues(host: Host): recheck_issues(obj=host) # only host itself is directly affected # propagate issues from provider only to this host + hostprovider = CoreObjectDescriptor(id=host.provider_id, type=ADCMCoreType.HOSTPROVIDER) for issue_cause in _prototype_issue_map.get(ObjectType.PROVIDER, []): - add_concern_to_object(object_=host, concern=host.provider.get_own_issue(cause=issue_cause)) + add_concern_to_object(object_=host, concern=retrieve_issue(owner=hostprovider, cause=issue_cause)) def process_config_issues_policies_hc(host: Host) -> None: diff --git a/python/cm/api.py b/python/cm/api.py index aad2453929..e1305fa051 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -41,7 +41,6 @@ check_component_constraint, check_hc_requires, check_service_requires, - create_issue, remove_concern_from_object, update_hierarchy_issues, update_issue_after_deleting, @@ -73,7 +72,7 @@ TaskLog, ) from cm.services.cluster import retrieve_clusters_topology -from cm.services.concern import delete_issue +from cm.services.concern import create_issue, delete_issue, retrieve_issue from cm.services.concern.cases import ( recalculate_own_concerns_on_add_clusters, recalculate_own_concerns_on_add_hosts, @@ -176,7 +175,9 @@ def add_host(prototype: Prototype, provider: HostProvider, fqdn: str, descriptio owner=CoreObjectDescriptor(id=host.id, type=ADCMCoreType.HOST), concern_id=next(iter(concerns[ADCMCoreType.HOST][host.id])), ) - if concern := provider.get_own_issue(ConcernCause.CONFIG): + if concern := retrieve_issue( + owner=CoreObjectDescriptor(id=provider.id, type=ADCMCoreType.HOSTPROVIDER), cause=ConcernCause.CONFIG + ): host.concerns.add(concern) re_apply_object_policy(provider) @@ -199,11 +200,10 @@ def add_host_provider(prototype: Prototype, name: str, description: str = ""): provider.save() add_concern_to_object(object_=provider, concern=CTX.lock) + provider_cod = CoreObjectDescriptor(id=provider.id, type=ADCMCoreType.HOSTPROVIDER) if object_configuration_has_issue(provider): - concern = create_issue(obj=provider, issue_cause=ConcernCause.CONFIG) - distribute_concern_on_related_objects( - owner=CoreObjectDescriptor(id=provider.id, type=ADCMCoreType.HOSTPROVIDER), concern_id=concern.id - ) + concern = create_issue(owner=provider_cod, cause=ConcernCause.CONFIG) + distribute_concern_on_related_objects(owner=provider_cod, concern_id=concern.id) logger.info("host provider #%s %s is added", provider.pk, provider.name) @@ -262,15 +262,12 @@ def delete_service(service: ClusterObject) -> None: service.delete() cluster = service.cluster + cluster_cod = CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER) if check_hostcomponent_issue(cluster=cluster): - delete_issue( - owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), cause=ConcernCause.HOSTCOMPONENT - ) - elif cluster.get_own_issue(cause=ConcernCause.HOSTCOMPONENT) is None: - concern = create_issue(obj=cluster, issue_cause=ConcernCause.HOSTCOMPONENT) - distribute_concern_on_related_objects( - owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), concern_id=concern.id - ) + delete_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) + elif retrieve_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) is None: + concern = create_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) + distribute_concern_on_related_objects(owner=cluster_cod, concern_id=concern.id) keep_objects = defaultdict(set) for task in TaskLog.objects.filter( @@ -628,12 +625,11 @@ def save_hc( # HC may break # We can't be sure this method is called after some sort of "check" + cluster_cod = CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER) if check_hostcomponent_issue(cluster=cluster): - delete_issue( - owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), cause=ConcernCause.HOSTCOMPONENT - ) - elif not cluster.get_own_issue(cause=ConcernCause.HOSTCOMPONENT): - create_issue(obj=cluster, issue_cause=ConcernCause.HOSTCOMPONENT) + delete_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) + elif retrieve_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) is None: + create_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((cluster.id,)))) @@ -903,8 +899,8 @@ def multi_bind(cluster: Cluster, service: ClusterObject | None, bind_list: list[ import_target = CoreObjectDescriptor(id=import_obj.id, type=orm_object_to_core_type(import_obj)) if not object_imports_has_issue(target=import_obj): delete_issue(owner=import_target, cause=ConcernCause.IMPORT) - elif not import_obj.get_own_issue(ConcernCause.IMPORT): - concern = create_issue(obj=import_obj, issue_cause=ConcernCause.IMPORT) + elif retrieve_issue(owner=import_target, cause=ConcernCause.IMPORT) is None: + concern = create_issue(owner=import_target, cause=ConcernCause.IMPORT) distribute_concern_on_related_objects(owner=import_target, concern_id=concern.id) return get_import(cluster=cluster, service=service) diff --git a/python/cm/issue.py b/python/cm/issue.py index 8707c9fe93..18d2398df8 100755 --- a/python/cm/issue.py +++ b/python/cm/issue.py @@ -14,12 +14,14 @@ from typing import Iterable from api_v2.concern.serializers import ConcernSerializer +from core.types import CoreObjectDescriptor from django.conf import settings from django.db.transaction import on_commit from djangorestframework_camel_case.util import camelize from cm.adcm_config.config import get_prototype_config from cm.adcm_config.utils import proto_ref +from cm.converters import orm_object_to_core_type from cm.data_containers import PrototypeData from cm.errors import AdcmEx from cm.hierarchy import Tree @@ -43,7 +45,8 @@ ServiceComponent, TaskLog, ) -from cm.services.concern.messages import ConcernMessage, PlaceholderObjectsDTO, PlaceholderTypeDTO, build_concern_reason +from cm.services.concern import create_issue, retrieve_issue +from cm.services.concern.messages import ConcernMessage, PlaceholderObjectsDTO, build_concern_reason from cm.status_api import send_concern_creation_event, send_concern_delete_event from cm.utils import obj_ref @@ -378,80 +381,13 @@ def check_component_constraint( ObjectType.PROVIDER: (ConcernCause.CONFIG,), ObjectType.HOST: (ConcernCause.CONFIG,), } -_issue_template_map = { - ConcernCause.CONFIG: ConcernMessage.CONFIG_ISSUE, - ConcernCause.IMPORT: ConcernMessage.REQUIRED_IMPORT_ISSUE, - ConcernCause.SERVICE: ConcernMessage.REQUIRED_SERVICE_ISSUE, - ConcernCause.HOSTCOMPONENT: ConcernMessage.HOST_COMPONENT_ISSUE, - ConcernCause.REQUIREMENT: ConcernMessage.UNSATISFIED_REQUIREMENT_ISSUE, -} - - -def _gen_issue_name(cause: ConcernCause) -> str: - return f"{ConcernType.ISSUE}_{cause.value}" - - -def _get_kwargs_for_issue(concern_name: ConcernMessage, source: ADCMEntity) -> tuple[dict, dict]: - kwargs_for_objects = {"source": source} - kwargs_for_objects_types = {"source": None, "target": None} - target = None - - if concern_name == ConcernMessage.REQUIRED_SERVICE_ISSUE: - kwargs_for_objects_types["source"] = "cluster_services" - kwargs_for_objects_types["target"] = "prototype" - bundle = source.prototype.bundle - # source is expected to be Cluster here - target = ( - Prototype.objects.filter( - bundle=bundle, - type="service", - required=True, - ) - .exclude(id__in=ClusterObject.objects.values_list("prototype_id", flat=True).filter(cluster=source)) - .first() - ) - - elif concern_name == ConcernMessage.UNSATISFIED_REQUIREMENT_ISSUE: - kwargs_for_objects_types["source"] = "cluster_services" - kwargs_for_objects_types["target"] = "prototype" - for require in source.prototype.requires: - try: - ClusterObject.objects.get(prototype__name=require["service"], cluster=source.cluster) - except ClusterObject.DoesNotExist: - target = Prototype.objects.get(name=require["service"], type="service", bundle=source.prototype.bundle) - break - - elif concern_name == ConcernMessage.CONFIG_ISSUE: - kwargs_for_objects_types["source"] = f"{source.prototype.type}_config" - - elif concern_name == ConcernMessage.HOST_COMPONENT_ISSUE: - kwargs_for_objects_types["source"] = "cluster_mapping" - - elif concern_name == ConcernMessage.REQUIRED_IMPORT_ISSUE: - kwargs_for_objects_types["source"] = "cluster_import" - - kwargs_for_objects["target"] = target - return kwargs_for_objects, kwargs_for_objects_types - - -def create_issue(obj: ADCMEntity, issue_cause: ConcernCause) -> ConcernItem: - concern_message = _issue_template_map[issue_cause] - kwargs_for_objects, kwargs_for_objects_types = _get_kwargs_for_issue(concern_name=concern_message, source=obj) - reason = build_concern_reason( - template=concern_message.template, - placeholder_objects=PlaceholderObjectsDTO(**kwargs_for_objects), - placeholder_types=PlaceholderTypeDTO(**kwargs_for_objects_types), - ) - type_: str = ConcernType.ISSUE.value - cause: str = issue_cause.value - return ConcernItem.objects.create( - type=type_, name=f"{cause or ''}_{type_}".strip("_"), reason=reason, owner=obj, cause=cause - ) def add_issue_on_linked_objects(obj: ADCMEntity, issue_cause: ConcernCause) -> None: """Create newly discovered issue and add it to linked objects concerns""" - issue = obj.get_own_issue(cause=issue_cause) or create_issue(obj=obj, issue_cause=issue_cause) + object_cod = CoreObjectDescriptor(id=obj.id, type=orm_object_to_core_type(obj)) + object_own_issue = retrieve_issue(owner=object_cod, cause=issue_cause) + issue = object_own_issue or create_issue(owner=object_cod, cause=issue_cause) tree = Tree(obj) affected_nodes = tree.get_directly_affected(node=tree.built_from) @@ -462,7 +398,7 @@ def add_issue_on_linked_objects(obj: ADCMEntity, issue_cause: ConcernCause) -> N def remove_issue(obj: ADCMEntity, issue_cause: ConcernCause) -> None: """Remove outdated issue from other's concerns""" - issue = obj.get_own_issue(cause=issue_cause) + issue = retrieve_issue(owner=CoreObjectDescriptor(id=obj.id, type=orm_object_to_core_type(obj)), cause=issue_cause) if not issue: return issue.delete() diff --git a/python/cm/services/cluster.py b/python/cm/services/cluster.py index 05d561dd61..a192e6042b 100644 --- a/python/cm/services/cluster.py +++ b/python/cm/services/cluster.py @@ -26,7 +26,7 @@ from rbac.models import re_apply_object_policy from cm.models import Cluster, ClusterObject, ConcernCause, Host, HostComponent, ServiceComponent -from cm.services.concern import delete_issue +from cm.services.concern import create_issue, delete_issue class ClusterDB: @@ -101,23 +101,19 @@ def perform_host_to_cluster_map( # this import should be resolved later: # concerns management should be passed in here the same way as `status_service`, # because it's a dependency that shouldn't be directly set - from cm.issue import create_issue from cm.services.concern.checks import cluster_mapping_has_issue from cm.services.concern.distribution import distribute_concern_on_related_objects with atomic(): add_hosts_to_cluster(cluster_id=cluster_id, hosts=hosts, db=ClusterDB) cluster = Cluster.objects.get(id=cluster_id) + cluster_cod = CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER) if not cluster_mapping_has_issue(cluster=cluster): - delete_issue( - owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), cause=ConcernCause.HOSTCOMPONENT - ) + delete_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) elif not cluster.get_own_issue(cause=ConcernCause.HOSTCOMPONENT): - concern = create_issue(obj=cluster, issue_cause=ConcernCause.HOSTCOMPONENT) - distribute_concern_on_related_objects( - owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), concern_id=concern.id - ) + concern = create_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) + distribute_concern_on_related_objects(owner=cluster_cod, concern_id=concern.id) re_apply_object_policy(apply_object=cluster) diff --git a/python/cm/services/concern/__init__.py b/python/cm/services/concern/__init__.py index 31267ac0ad..8034d70abc 100644 --- a/python/cm/services/concern/__init__.py +++ b/python/cm/services/concern/__init__.py @@ -10,6 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ._operaitons import delete_issue +from ._operaitons import create_issue, delete_issue, retrieve_issue -__all__ = ["delete_issue"] +__all__ = ["delete_issue", "retrieve_issue", "create_issue"] diff --git a/python/cm/services/concern/_operaitons.py b/python/cm/services/concern/_operaitons.py index 1a59a10592..067b7fbd75 100644 --- a/python/cm/services/concern/_operaitons.py +++ b/python/cm/services/concern/_operaitons.py @@ -14,9 +14,97 @@ from django.contrib.contenttypes.models import ContentType from cm.converters import core_type_to_model -from cm.models import ConcernCause, ConcernItem, ConcernType +from cm.models import ClusterObject, ConcernCause, ConcernItem, ConcernType, ObjectType, Prototype +from cm.services.concern.messages import ConcernMessage, PlaceholderObjectsDTO, PlaceholderTypeDTO, build_concern_reason + +_issue_template_map = { + ConcernCause.CONFIG: ConcernMessage.CONFIG_ISSUE, + ConcernCause.IMPORT: ConcernMessage.REQUIRED_IMPORT_ISSUE, + ConcernCause.SERVICE: ConcernMessage.REQUIRED_SERVICE_ISSUE, + ConcernCause.HOSTCOMPONENT: ConcernMessage.HOST_COMPONENT_ISSUE, + ConcernCause.REQUIREMENT: ConcernMessage.UNSATISFIED_REQUIREMENT_ISSUE, +} def delete_issue(owner: CoreObjectDescriptor, cause: ConcernCause) -> None: owner_type = ContentType.objects.get_for_model(core_type_to_model(core_type=owner.type)) ConcernItem.objects.filter(owner_id=owner.id, owner_type=owner_type, cause=cause, type=ConcernType.ISSUE).delete() + + +def retrieve_issue(owner: CoreObjectDescriptor, cause: ConcernCause) -> ConcernItem | None: + owner_type = ContentType.objects.get_for_model(core_type_to_model(core_type=owner.type)) + return ConcernItem.objects.filter( + owner_id=owner.id, owner_type=owner_type, cause=cause, type=ConcernType.ISSUE + ).first() + + +def create_issue(owner: CoreObjectDescriptor, cause: ConcernCause) -> ConcernItem: + concern_message = _issue_template_map[cause] + target, placeholder_types_dto = _get_target_and_placeholder_types(concern_message=concern_message, owner=owner) + reason = build_concern_reason( + template=concern_message.template, + placeholder_objects=PlaceholderObjectsDTO( + source=core_type_to_model(owner.type).objects.get(pk=owner.id), target=target + ), + placeholder_types=placeholder_types_dto, + ) + name = f"{cause or ''}_{ConcernType.ISSUE}".strip("_") + owner_type = ContentType.objects.get_for_model(core_type_to_model(core_type=owner.type)) + + return ConcernItem.objects.create( + type=ConcernType.ISSUE, name=name, reason=reason, owner_id=owner.id, owner_type=owner_type, cause=cause + ) + + +def _get_target_and_placeholder_types( + concern_message: ConcernMessage, owner: CoreObjectDescriptor +) -> tuple[Prototype | None, PlaceholderTypeDTO]: + owner_prototype = Prototype.objects.values("type", "bundle_id", "requires").get( + pk=core_type_to_model(owner.type).objects.values_list("prototype_id", flat=True).get(pk=owner.id) + ) + target = None + + match concern_message: + case ConcernMessage.CONFIG_ISSUE: + placeholder_type_dto = PlaceholderTypeDTO(source=f"{owner_prototype['type']}_config") + + case ConcernMessage.REQUIRED_IMPORT_ISSUE: + placeholder_type_dto = PlaceholderTypeDTO(source="cluster_import") + + case ConcernMessage.REQUIRED_SERVICE_ISSUE: + # owner type = cluster + + placeholder_type_dto = PlaceholderTypeDTO(source="cluster_services", target="prototype") + target = ( + Prototype.objects.filter( + bundle_id=owner_prototype["bundle_id"], + type=ObjectType.SERVICE, + required=True, + ) + .exclude( + id__in=ClusterObject.objects.values_list("prototype_id", flat=True).filter(cluster_id=owner.id) + ) + .first() + ) + + case ConcernMessage.HOST_COMPONENT_ISSUE: + placeholder_type_dto = PlaceholderTypeDTO(source="cluster_mapping") + + case ConcernMessage.UNSATISFIED_REQUIREMENT_ISSUE: + # owner type = service + + cluster_id = ClusterObject.objects.values_list("cluster_id", flat=True).get(pk=owner.id) + placeholder_type_dto = PlaceholderTypeDTO(source="cluster_services", target="prototype") + + required_services_names = {require["service"] for require in owner_prototype["requires"]} + existing_required_services = set( + ClusterObject.objects.values_list("prototype__name", flat=True).filter( + cluster_id=cluster_id, prototype__name__in=required_services_names + ) + ) + if absent_services_names := required_services_names.difference(existing_required_services): + target = Prototype.objects.filter( + name__in=absent_services_names, type=ObjectType.SERVICE, bundle_id=owner_prototype["bundle_id"] + ).first() + + return target, placeholder_type_dto diff --git a/python/cm/services/concern/cases.py b/python/cm/services/concern/cases.py index 5bf8e10d71..ebba24f822 100644 --- a/python/cm/services/concern/cases.py +++ b/python/cm/services/concern/cases.py @@ -17,9 +17,8 @@ from core.types import ADCMCoreType, CoreObjectDescriptor from django.contrib.contenttypes.models import ContentType -from cm.issue import create_issue from cm.models import Cluster, ClusterObject, ConcernCause, ConcernItem, ConcernType, Host, ServiceComponent -from cm.services.concern import delete_issue +from cm.services.concern import create_issue, delete_issue, retrieve_issue from cm.services.concern.checks import ( cluster_mapping_has_issue, object_configuration_has_issue, @@ -40,9 +39,10 @@ def recalculate_own_concerns_on_add_clusters(cluster: Cluster) -> OwnObjectConce (ConcernCause.SERVICE, object_has_required_services_issue), ) + cluster_cod = CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER) for cause, has_issue in cluster_checks: if has_issue(cluster): - issue = create_issue(obj=cluster, issue_cause=cause) + issue = create_issue(owner=cluster_cod, cause=cause) new_concerns[ADCMCoreType.CLUSTER][cluster.pk].add(issue.pk) return new_concerns @@ -52,10 +52,12 @@ def recalculate_own_concerns_on_add_services( cluster: Cluster, services: Iterable[ClusterObject] ) -> OwnObjectConcernMap: new_concerns: OwnObjectConcernMap = defaultdict(lambda: defaultdict(set)) + cluster_cod = CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER) # create new concerns - if cluster_mapping_has_issue(cluster=cluster) and cluster.get_own_issue(cause=ConcernCause.HOSTCOMPONENT) is None: - issue = create_issue(obj=cluster, issue_cause=ConcernCause.HOSTCOMPONENT) + cluster_own_hc_issue = retrieve_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) + if cluster_own_hc_issue is None and cluster_mapping_has_issue(cluster=cluster): + issue = create_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) new_concerns[ADCMCoreType.CLUSTER][cluster.pk].add(issue.pk) service_checks = ( @@ -64,14 +66,17 @@ def recalculate_own_concerns_on_add_services( (ConcernCause.REQUIREMENT, service_requirements_has_issue), ) for service in services: + service_cod = CoreObjectDescriptor(id=service.id, type=ADCMCoreType.SERVICE) for concern_cause, has_issue in service_checks: if has_issue(service): - issue = create_issue(obj=service, issue_cause=concern_cause) + issue = create_issue(owner=service_cod, cause=concern_cause) new_concerns[ADCMCoreType.SERVICE][service.pk].add(issue.pk) for component in ServiceComponent.objects.filter(service__in=services): if object_configuration_has_issue(component): - issue = create_issue(obj=component, issue_cause=ConcernCause.CONFIG) + issue = create_issue( + owner=CoreObjectDescriptor(id=component.id, type=ADCMCoreType.COMPONENT), cause=ConcernCause.CONFIG + ) new_concerns[ADCMCoreType.COMPONENT][component.pk].add(issue.pk) # remove gone concerns @@ -89,7 +94,7 @@ def recalculate_own_concerns_on_add_services( def recalculate_own_concerns_on_add_hosts(host: Host) -> OwnObjectConcernMap: if object_configuration_has_issue(host): - issue = create_issue(obj=host, issue_cause=ConcernCause.CONFIG) + issue = create_issue(owner=CoreObjectDescriptor(id=host.id, type=ADCMCoreType.HOST), cause=ConcernCause.CONFIG) return {ADCMCoreType.HOST: {host.id: {issue.id}}} return {} @@ -112,12 +117,13 @@ def recalculate_concerns_on_cluster_upgrade(cluster: Cluster) -> None: ) ) + cluster_cod = CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER) for cause, has_issue in cluster_checks: if cause in existing_cluster_concern_causes: continue if has_issue(cluster): - create_issue(obj=cluster, issue_cause=cause) + create_issue(owner=cluster_cod, cause=cause) service_checks = ( (ConcernCause.CONFIG, object_configuration_has_issue), @@ -135,12 +141,13 @@ def recalculate_concerns_on_cluster_upgrade(cluster: Cluster) -> None: ) ) for service in services: + service_cod = CoreObjectDescriptor(id=service.id, type=ADCMCoreType.SERVICE) for concern_cause, has_issue in service_checks: if (service.id, concern_cause) in existing_service_concern_causes: continue if has_issue(service): - create_issue(obj=service, issue_cause=concern_cause) + create_issue(owner=service_cod, cause=concern_cause) components_with_config_concerns = set( ConcernItem.objects.values_list("owner_id", flat=True).filter( @@ -156,4 +163,6 @@ def recalculate_concerns_on_cluster_upgrade(cluster: Cluster) -> None: .exclude(id__in=components_with_config_concerns) ): if object_configuration_has_issue(component): - create_issue(obj=component, issue_cause=ConcernCause.CONFIG) + create_issue( + owner=CoreObjectDescriptor(id=component.id, type=ADCMCoreType.COMPONENT), cause=ConcernCause.CONFIG + ) diff --git a/python/cm/tests/test_adcm_entity.py b/python/cm/tests/test_adcm_entity.py index 250b2c8dc6..fe5692b494 100644 --- a/python/cm/tests/test_adcm_entity.py +++ b/python/cm/tests/test_adcm_entity.py @@ -11,9 +11,11 @@ # limitations under the License. from adcm.tests.base import BaseTestCase +from core.types import ADCMCoreType, CoreObjectDescriptor -from cm.issue import add_concern_to_object, create_issue, remove_concern_from_object +from cm.issue import add_concern_to_object, remove_concern_from_object from cm.models import ConcernCause, ConcernItem, ConcernType +from cm.services.concern import create_issue from cm.tests.utils import gen_concern_item, generate_hierarchy @@ -99,7 +101,7 @@ def test_get_own_issue__others(self): cluster = self.hierarchy["cluster"] service = self.hierarchy["service"] issue_cause = ConcernCause.CONFIG - issue = create_issue(obj=cluster, issue_cause=issue_cause) + issue = create_issue(owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), cause=issue_cause) add_concern_to_object(object_=cluster, concern=issue) add_concern_to_object(object_=service, concern=issue) @@ -108,7 +110,7 @@ def test_get_own_issue__others(self): def test_get_own_issue__exists(self): cluster = self.hierarchy["cluster"] issue_cause = ConcernCause.CONFIG - issue = create_issue(obj=cluster, issue_cause=issue_cause) + issue = create_issue(owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), cause=issue_cause) add_concern_to_object(object_=cluster, concern=issue) self.assertIsNotNone(cluster.get_own_issue(issue_cause)) diff --git a/python/cm/tests/test_flag.py b/python/cm/tests/test_flag.py index 554dff8cee..caba63720b 100644 --- a/python/cm/tests/test_flag.py +++ b/python/cm/tests/test_flag.py @@ -17,7 +17,7 @@ from core.types import ADCMCoreType, CoreObjectDescriptor from cm.converters import orm_object_to_core_type -from cm.issue import create_issue, create_lock +from cm.issue import create_lock from cm.models import ( ADCM, Cluster, @@ -31,6 +31,7 @@ ServiceComponent, TaskLog, ) +from cm.services.concern import create_issue from cm.services.concern.flags import BuiltInFlag, ConcernFlag, lower_all_flags, lower_flag, raise_flag @@ -166,7 +167,10 @@ def test_lower_flag_does_not_interfere_with_other_concerns_success(self) -> None dummy_job = JobLog(name="cool", task=TaskLog(id=10)) for object_ in (*clusters, *components, *hosts): - create_issue(obj=object_, issue_cause=ConcernCause.CONFIG) + create_issue( + owner=CoreObjectDescriptor(id=object_.id, type=orm_object_to_core_type(object_)), + cause=ConcernCause.CONFIG, + ) create_lock(owner=object_, job=dummy_job) self.assertEqual(ConcernItem.objects.count(), 12) self.assertEqual(ConcernItem.objects.filter(type=ConcernType.FLAG).count(), 0) diff --git a/python/cm/upgrade.py b/python/cm/upgrade.py index dc1bf80b2a..cb3b005fb5 100644 --- a/python/cm/upgrade.py +++ b/python/cm/upgrade.py @@ -35,8 +35,9 @@ is_version_suitable, save_hc, ) +from cm.converters import orm_object_to_core_type from cm.errors import AdcmEx -from cm.issue import check_config, create_issue +from cm.issue import check_config from cm.logger import logger from cm.models import ( ADCMEntity, @@ -60,6 +61,7 @@ Upgrade, ) from cm.services.cluster import retrieve_clusters_topology +from cm.services.concern import create_issue, retrieve_issue from cm.services.concern.cases import ( recalculate_concerns_on_cluster_upgrade, ) @@ -567,11 +569,11 @@ def _upgrade_children(self, old_prototype: Prototype, new_prototype: Prototype) _switch_object(host, prototype) def _update_concerns(self) -> None: - if not self._target.get_own_issue(ConcernCause.CONFIG) and not check_config(self._target): - concern = create_issue(obj=self._target, issue_cause=ConcernCause.CONFIG) - distribute_concern_on_related_objects( - owner=CoreObjectDescriptor(id=self._target.id, type=ADCMCoreType.HOSTPROVIDER), concern_id=concern.id - ) + target_cod = CoreObjectDescriptor(id=self._target.id, type=orm_object_to_core_type(self._target)) + target_own_config_issue = retrieve_issue(owner=target_cod, cause=ConcernCause.CONFIG) + if target_own_config_issue is None and not check_config(self._target): + concern = create_issue(owner=target_cod, cause=ConcernCause.CONFIG) + distribute_concern_on_related_objects(owner=target_cod, concern_id=concern.id) clusters_for_redistribution: set[ClusterID] = set() m2m_model = Host.concerns.through @@ -589,7 +591,9 @@ def _update_concerns(self) -> None: ) ): if not check_config(host): - concern = create_issue(obj=host, issue_cause=ConcernCause.CONFIG) + concern = create_issue( + owner=CoreObjectDescriptor(id=host.id, type=ADCMCoreType.HOST), cause=ConcernCause.CONFIG + ) clusters_for_redistribution.add(host.cluster_id) host_own_concerns_to_link.append(m2m_model(host_id=host.id, concernitem_id=concern.id)) From 31c242bdaf9ec16b0242f06040ccfe5e94bde001 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Thu, 8 Aug 2024 10:45:59 +0000 Subject: [PATCH 41/98] =?UTF-8?q?ADCM-5838:=20Remove=20usages=20of=20old?= =?UTF-8?q?=20=E2=80=9Ccheck=5F*=E2=80=9D=20functions?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- python/adcm/tests/base.py | 8 ++ python/api/tests/test_adcm.py | 12 ++- python/api/tests/test_api.py | 12 +-- python/api_v2/tests/test_cluster.py | 10 ++- python/cm/api.py | 17 ++-- python/cm/issue.py | 114 +++------------------------ python/cm/services/concern/checks.py | 3 +- python/cm/tests/test_host.py | 5 +- python/cm/tests/test_issue.py | 27 ++++--- python/cm/upgrade.py | 6 +- 10 files changed, 75 insertions(+), 139 deletions(-) diff --git a/python/adcm/tests/base.py b/python/adcm/tests/base.py index 04cbaeaaa4..75232f37ba 100644 --- a/python/adcm/tests/base.py +++ b/python/adcm/tests/base.py @@ -39,6 +39,7 @@ Host, HostComponent, HostProvider, + ObjectConfig, ObjectType, Prototype, ServiceComponent, @@ -416,6 +417,13 @@ def create_host_in_cluster(self, provider_pk: int, name: str, cluster_pk: int) - return host + def create_new_config(self, config_data: dict) -> ObjectConfig: + config = ObjectConfig.objects.create(current=1, previous=0) + config_log = ConfigLog.objects.create(obj_ref=config, config=config_data) + config.current = config_log.pk + config.save(update_fields=["current"]) + return config + @staticmethod def get_hostcomponent_data(service_pk: int, host_pk: int) -> list[dict[str, int]]: hostcomponent_data = [] diff --git a/python/api/tests/test_adcm.py b/python/api/tests/test_adcm.py index 1d2561a8cd..2933810b52 100644 --- a/python/api/tests/test_adcm.py +++ b/python/api/tests/test_adcm.py @@ -11,7 +11,11 @@ # limitations under the License. from adcm.tests.base import BaseTestCase -from cm.models import ADCM, ConcernItem +from cm.converters import orm_object_to_core_type +from cm.issue import add_concern_to_object +from cm.models import ADCM, ConcernCause +from cm.services.concern import create_issue +from core.types import CoreObjectDescriptor from django.urls import reverse from rest_framework.response import Response from rest_framework.status import HTTP_200_OK @@ -22,7 +26,11 @@ def setUp(self) -> None: super().setUp() self.adcm = ADCM.objects.select_related("prototype").last() - self.concern = ConcernItem.objects.last() + self.concern = create_issue( + owner=CoreObjectDescriptor(id=self.adcm.id, type=orm_object_to_core_type(self.adcm)), + cause=ConcernCause.CONFIG, + ) + add_concern_to_object(object_=self.adcm, concern=self.concern) def test_list(self): test_data = { diff --git a/python/api/tests/test_api.py b/python/api/tests/test_api.py index fd6d413dfb..53aa666192 100755 --- a/python/api/tests/test_api.py +++ b/python/api/tests/test_api.py @@ -917,9 +917,9 @@ def test_save_hc__big_update__locked_hierarchy( host_2 is unlocked host_3 became locked """ - service = gen_service(self.cluster) - component_1 = gen_component(service) - component_2 = gen_component(service) + service = gen_service(self.cluster, bundle=self.bundle) + component_1 = gen_component(service, bundle=self.bundle) + component_2 = gen_component(service, bundle=self.bundle) provider = gen_provider() host_1 = gen_host(provider, cluster=self.cluster) host_2 = gen_host(provider, cluster=self.cluster) @@ -969,9 +969,9 @@ def test_save_hc__big_update__unlocked_hierarchy(self, mock_update, mock_load): host_2 remains unlocked host_3 remains unlocked """ - service = gen_service(self.cluster) - component_1 = gen_component(service) - component_2 = gen_component(service) + service = gen_service(self.cluster, bundle=self.bundle) + component_1 = gen_component(service, bundle=self.bundle) + component_2 = gen_component(service, bundle=self.bundle) provider = gen_provider() host_1 = gen_host(provider, cluster=self.cluster) host_2 = gen_host(provider, cluster=self.cluster) diff --git a/python/api_v2/tests/test_cluster.py b/python/api_v2/tests/test_cluster.py index a58a30dd02..ab9cee399c 100644 --- a/python/api_v2/tests/test_cluster.py +++ b/python/api_v2/tests/test_cluster.py @@ -17,6 +17,7 @@ Action, ADCMEntityStatus, AnsibleConfig, + Bundle, Cluster, ClusterObject, Prototype, @@ -770,12 +771,13 @@ def setUp(self) -> None: hierarchy_1 = generate_hierarchy() self.cluster_1 = hierarchy_1["cluster"] + cluster_bundle = Bundle.objects.get(pk=self.cluster_1.bundle_id) self.service_11 = hierarchy_1["service"] self.component_111 = hierarchy_1["component"] - self.component_112 = gen_component(service=self.service_11) - self.service_12 = gen_service(cluster=self.cluster_1) - self.component_121 = gen_component(service=self.service_12) - self.component_122 = gen_component(service=self.service_12) + self.component_112 = gen_component(service=self.service_11, bundle=cluster_bundle) + self.service_12 = gen_service(cluster=self.cluster_1, bundle=cluster_bundle) + self.component_121 = gen_component(service=self.service_12, bundle=cluster_bundle) + self.component_122 = gen_component(service=self.service_12, bundle=cluster_bundle) self.host_1 = hierarchy_1["host"] self.host_2 = gen_host(provider=hierarchy_1["provider"], cluster=self.cluster_1) self.set_hostcomponent( diff --git a/python/cm/api.py b/python/cm/api.py index e1305fa051..d5fceed13d 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -45,7 +45,6 @@ update_hierarchy_issues, update_issue_after_deleting, ) -from cm.issue import check_hc as check_hostcomponent_issue from cm.logger import logger from cm.models import ( ADCM, @@ -78,7 +77,11 @@ recalculate_own_concerns_on_add_hosts, recalculate_own_concerns_on_add_services, ) -from cm.services.concern.checks import object_configuration_has_issue, object_imports_has_issue +from cm.services.concern.checks import ( + cluster_mapping_has_issue, + object_configuration_has_issue, + object_imports_has_issue, +) from cm.services.concern.distribution import distribute_concern_on_related_objects, redistribute_issues_and_flags from cm.services.concern.flags import BuiltInFlag, raise_flag from cm.services.status.notify import reset_hc_map, reset_objects_in_mm @@ -263,8 +266,10 @@ def delete_service(service: ClusterObject) -> None: cluster = service.cluster cluster_cod = CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER) - if check_hostcomponent_issue(cluster=cluster): - delete_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) + if not cluster_mapping_has_issue(cluster=cluster): + delete_issue( + owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), cause=ConcernCause.HOSTCOMPONENT + ) elif retrieve_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) is None: concern = create_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) distribute_concern_on_related_objects(owner=cluster_cod, concern_id=concern.id) @@ -329,7 +334,7 @@ def remove_host_from_cluster(host: Host) -> Host: remove_concern_from_object(object_=host, concern=CTX.lock) - if check_hostcomponent_issue(cluster): + if not cluster_mapping_has_issue(cluster): delete_issue( owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), cause=ConcernCause.HOSTCOMPONENT ) @@ -626,7 +631,7 @@ def save_hc( # HC may break # We can't be sure this method is called after some sort of "check" cluster_cod = CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER) - if check_hostcomponent_issue(cluster=cluster): + if not cluster_mapping_has_issue(cluster=cluster): delete_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) elif retrieve_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) is None: create_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) diff --git a/python/cm/issue.py b/python/cm/issue.py index 18d2398df8..2b00f6fc24 100755 --- a/python/cm/issue.py +++ b/python/cm/issue.py @@ -30,22 +30,26 @@ ADCMEntity, Bundle, Cluster, - ClusterBind, ClusterObject, ConcernCause, ConcernItem, ConcernType, ConfigLog, Host, - HostComponent, JobLog, ObjectType, Prototype, - PrototypeImport, ServiceComponent, TaskLog, ) from cm.services.concern import create_issue, retrieve_issue +from cm.services.concern.checks import ( + cluster_mapping_has_issue, + object_configuration_has_issue, + object_has_required_services_issue, + object_imports_has_issue, + service_requirements_has_issue, +) from cm.services.concern.messages import ConcernMessage, PlaceholderObjectsDTO, build_concern_reason from cm.status_api import send_concern_creation_event, send_concern_delete_event from cm.utils import obj_ref @@ -79,30 +83,6 @@ def check_config(obj: ADCMEntity) -> bool: return True -def check_required_services(cluster: Cluster) -> bool: - bundle = cluster.prototype.bundle - for proto in Prototype.objects.filter(bundle=bundle, type="service", required=True): - try: - ClusterObject.objects.get(cluster=cluster, prototype=proto) - except ClusterObject.DoesNotExist: - logger.debug("required service %s of %s is missing", proto_ref(prototype=proto), obj_ref(obj=cluster)) - return False - return True - - -def check_required_import(obj: [Cluster, ClusterObject]) -> bool: - if obj.prototype.type == ObjectType.CLUSTER: - cluster = obj - service = None - elif obj.prototype.type == ObjectType.SERVICE: - service = obj - cluster = obj.cluster - else: - raise AdcmEx(code="ISSUE_INTEGRITY_ERROR", msg=f"Could not check import for {obj}") - - return do_check_import(cluster=cluster, service=service) - - def check_service_requires(cluster: Cluster, proto: Prototype) -> None: if not proto.requires: return @@ -126,74 +106,6 @@ def check_service_requires(cluster: Cluster, proto: Prototype) -> None: ) -def check_requires(service: ClusterObject) -> bool: - try: - check_service_requires(cluster=service.cluster, proto=service.prototype) - except AdcmEx: - logger.debug("requirements not satisfied for %s", proto_ref(prototype=service.prototype)) - - return False - - return True - - -def do_check_import(cluster: Cluster, service: ClusterObject | None = None) -> bool: - proto = cluster.prototype - - if service: - proto = service.prototype - - prototype_imports = PrototypeImport.objects.filter(prototype=proto) - if not prototype_imports.exists(): - return True - - if not any(prototype_imports.values_list("required", flat=True)): - return True - - required_import_names = set(prototype_imports.values_list("name", flat=True).filter(required=True)) - - for cluster_name, service_name in ClusterBind.objects.values_list( - "source_cluster__prototype__name", "source_service__prototype__name" - ).filter(cluster=cluster, service=service): - if service_name: - required_import_names -= {service_name} - elif cluster_name: - required_import_names -= {cluster_name} - - return required_import_names == set() - - -def check_hc(cluster: Cluster) -> bool: - shc_list = [] - for hostcomponent in HostComponent.objects.filter(cluster=cluster): - shc_list.append((hostcomponent.service, hostcomponent.host, hostcomponent.component)) - - if not shc_list: - for service in ClusterObject.objects.filter(cluster=cluster): - for comp in Prototype.objects.filter(parent=service.prototype, type="component"): - const = comp.constraint - if len(const) == 2 and const[0] == 0: - continue - logger.debug("void host components for %s", proto_ref(prototype=service.prototype)) - return False - - for service in ClusterObject.objects.filter(cluster=cluster): - try: - check_component_constraint( - cluster=cluster, service_prototype=service.prototype, hc_in=[i for i in shc_list if i[0] == service] - ) - except AdcmEx: - return False - - try: - check_hc_requires(shc_list=shc_list) - check_bound_components(shc_list=shc_list) - except AdcmEx: - return False - - return True - - def check_hc_requires(shc_list: list[tuple[ClusterObject, Host, ServiceComponent]]) -> None: for serv_host_comp in [i for i in shc_list if i[2].prototype.requires or i[0].prototype.requires]: for require in [*serv_host_comp[2].prototype.requires, *serv_host_comp[0].prototype.requires]: @@ -362,11 +274,11 @@ def check_component_constraint( _issue_check_map = { - ConcernCause.CONFIG: check_config, - ConcernCause.IMPORT: check_required_import, - ConcernCause.SERVICE: check_required_services, - ConcernCause.HOSTCOMPONENT: check_hc, - ConcernCause.REQUIREMENT: check_requires, + ConcernCause.CONFIG: object_configuration_has_issue, + ConcernCause.IMPORT: object_imports_has_issue, + ConcernCause.SERVICE: object_has_required_services_issue, + ConcernCause.HOSTCOMPONENT: cluster_mapping_has_issue, + ConcernCause.REQUIREMENT: service_requirements_has_issue, } _prototype_issue_map = { ObjectType.ADCM: (ConcernCause.CONFIG,), @@ -408,7 +320,7 @@ def recheck_issues(obj: ADCMEntity) -> None: """Re-check for object's type-specific issues""" issue_causes = _prototype_issue_map.get(obj.prototype.type, []) for issue_cause in issue_causes: - if not _issue_check_map[issue_cause](obj): + if _issue_check_map[issue_cause](obj): add_issue_on_linked_objects(obj=obj, issue_cause=issue_cause) else: remove_issue(obj=obj, issue_cause=issue_cause) diff --git a/python/cm/services/concern/checks.py b/python/cm/services/concern/checks.py index 652c907083..9a8ae939ba 100644 --- a/python/cm/services/concern/checks.py +++ b/python/cm/services/concern/checks.py @@ -230,10 +230,9 @@ def cluster_mapping_has_issue(cluster: Cluster) -> HasIssue: - bound_to """ - # extract requirements - bundle_id = cluster.prototype.bundle_id + # extract requirements requirements_from_components: dict[PrototypeID, ComponentMappingRequirements] = {} for prototype_id, constraint, requires, bound_to in Prototype.objects.values_list( diff --git a/python/cm/tests/test_host.py b/python/cm/tests/test_host.py index 25269de9df..4155c3669a 100644 --- a/python/cm/tests/test_host.py +++ b/python/cm/tests/test_host.py @@ -49,15 +49,16 @@ def setUp(self) -> None: } self.upload_and_load_bundle(path=Path(self.base_dir, "python", "cm", "tests", "files", "ssh.1.0.tar")) + config = self.create_new_config(config_data={"entry": "test"}) self.provider = HostProvider.objects.create( - name="test_provider", - prototype=Prototype.objects.filter(type="provider").first(), + name="test_provider", prototype=Prototype.objects.filter(type="provider").first(), config=config ) self.host = Host.objects.create( fqdn="test-fqdn", prototype=Prototype.objects.filter(type="host").first(), provider=self.provider, maintenance_mode=MaintenanceMode.ON, + config=config, ) def get_host_proto_id(self): diff --git a/python/cm/tests/test_issue.py b/python/cm/tests/test_issue.py index 755e8e1d78..5f316df52d 100644 --- a/python/cm/tests/test_issue.py +++ b/python/cm/tests/test_issue.py @@ -22,7 +22,6 @@ add_concern_to_object, add_issue_on_linked_objects, create_lock, - do_check_import, recheck_issues, remove_issue, update_hierarchy_issues, @@ -38,16 +37,16 @@ PrototypeImport, ) from cm.services.cluster import perform_host_to_cluster_map -from cm.services.concern.checks import object_has_required_services_issue +from cm.services.concern.checks import object_has_required_services_issue, object_imports_has_issue from cm.services.status import notify from cm.tests.utils import gen_cluster, gen_job_log, gen_service, gen_task_log, generate_hierarchy mock_issue_check_map = { - ConcernCause.CONFIG: lambda x: False, - ConcernCause.IMPORT: lambda x: True, + ConcernCause.CONFIG: lambda x: True, + ConcernCause.IMPORT: lambda x: False, ConcernCause.SERVICE: lambda x: False, - ConcernCause.HOSTCOMPONENT: lambda x: True, - ConcernCause.REQUIREMENT: lambda x: True, + ConcernCause.HOSTCOMPONENT: lambda x: False, + ConcernCause.REQUIREMENT: lambda x: False, } @@ -231,19 +230,19 @@ def cook_cluster(proto_name, cluster_name): def test_no_import(self): _, _, cluster = self.cook_cluster("Hadoop", "Cluster1") - self.assertTrue(do_check_import(cluster)) + self.assertFalse(object_imports_has_issue(cluster)) def test_import_required(self): _, proto1, cluster = self.cook_cluster("Hadoop", "Cluster1") PrototypeImport.objects.create(prototype=proto1, name="Monitoring", required=True) - self.assertFalse(do_check_import(cluster)) + self.assertTrue(object_imports_has_issue(cluster)) def test_import_not_required(self): _, proto1, cluster = self.cook_cluster("Hadoop", "Cluster1") PrototypeImport.objects.create(prototype=proto1, name="Monitoring", required=False) - self.assertTrue(do_check_import(cluster)) + self.assertFalse(object_imports_has_issue(cluster)) def test_cluster_imported(self): _, proto1, cluster1 = self.cook_cluster("Hadoop", "Cluster1") @@ -252,7 +251,7 @@ def test_cluster_imported(self): _, _, cluster2 = self.cook_cluster("Monitoring", "Cluster2") ClusterBind.objects.create(cluster=cluster1, source_cluster=cluster2) - self.assertTrue(do_check_import(cluster1)) + self.assertFalse(object_imports_has_issue(cluster1)) def test_service_imported(self): _, proto1, cluster1 = self.cook_cluster("Hadoop", "Cluster1") @@ -263,7 +262,7 @@ def test_service_imported(self): service = add_service_to_cluster(cluster2, proto3) ClusterBind.objects.create(cluster=cluster1, source_cluster=cluster2, source_service=service) - self.assertTrue(do_check_import(cluster1)) + self.assertFalse(object_imports_has_issue(cluster1)) def test_import_to_service(self): bundle_1, _, cluster1 = self.cook_cluster("Hadoop", "Cluster1") @@ -274,7 +273,8 @@ def test_import_to_service(self): _, _, cluster2 = self.cook_cluster("Monitoring", "Cluster2") ClusterBind.objects.create(cluster=cluster1, service=service, source_cluster=cluster2) - self.assertTrue(do_check_import(cluster1, service)) + self.assertFalse(object_imports_has_issue(cluster1)) + self.assertFalse(object_imports_has_issue(service)) def test_import_service_to_service(self): bundle_1, _, cluster1 = self.cook_cluster("Hadoop", "Cluster1") @@ -292,7 +292,8 @@ def test_import_service_to_service(self): source_service=service2, ) - self.assertTrue(do_check_import(cluster1, service1)) + self.assertFalse(object_imports_has_issue(cluster1)) + self.assertFalse(object_imports_has_issue(service1)) def test_issue_cluster_required_import(self): _, proto1, cluster1 = self.cook_cluster("Hadoop", "Cluster1") diff --git a/python/cm/upgrade.py b/python/cm/upgrade.py index cb3b005fb5..615baaeaf7 100644 --- a/python/cm/upgrade.py +++ b/python/cm/upgrade.py @@ -37,7 +37,6 @@ ) from cm.converters import orm_object_to_core_type from cm.errors import AdcmEx -from cm.issue import check_config from cm.logger import logger from cm.models import ( ADCMEntity, @@ -65,6 +64,7 @@ from cm.services.concern.cases import ( recalculate_concerns_on_cluster_upgrade, ) +from cm.services.concern.checks import object_configuration_has_issue from cm.services.concern.distribution import distribute_concern_on_related_objects, redistribute_issues_and_flags from cm.services.job.action import ActionRunPayload, run_action from cm.status_api import send_prototype_and_state_update_event @@ -571,7 +571,7 @@ def _upgrade_children(self, old_prototype: Prototype, new_prototype: Prototype) def _update_concerns(self) -> None: target_cod = CoreObjectDescriptor(id=self._target.id, type=orm_object_to_core_type(self._target)) target_own_config_issue = retrieve_issue(owner=target_cod, cause=ConcernCause.CONFIG) - if target_own_config_issue is None and not check_config(self._target): + if target_own_config_issue is None and object_configuration_has_issue(self._target): concern = create_issue(owner=target_cod, cause=ConcernCause.CONFIG) distribute_concern_on_related_objects(owner=target_cod, concern_id=concern.id) @@ -590,7 +590,7 @@ def _update_concerns(self) -> None: ) ) ): - if not check_config(host): + if object_configuration_has_issue(host): concern = create_issue( owner=CoreObjectDescriptor(id=host.id, type=ADCMCoreType.HOST), cause=ConcernCause.CONFIG ) From 746bc2385de596bd9b55074671a479bf301df5a4 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Fri, 9 Aug 2024 07:36:16 +0000 Subject: [PATCH 42/98] ADCM-5846 Remove all concerns on cluster/service deletion --- python/cm/api.py | 34 +++++++++++++++-------- python/cm/services/concern/_operaitons.py | 14 +++++++++- 2 files changed, 36 insertions(+), 12 deletions(-) diff --git a/python/cm/api.py b/python/cm/api.py index d5fceed13d..3bcdbf9278 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -20,7 +20,6 @@ from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.core.exceptions import MultipleObjectsReturned -from django.db.models import Q from django.db.transaction import atomic, on_commit from rbac.models import Policy, re_apply_object_policy from rbac.roles import apply_policy_for_new_config @@ -43,7 +42,6 @@ check_service_requires, remove_concern_from_object, update_hierarchy_issues, - update_issue_after_deleting, ) from cm.logger import logger from cm.models import ( @@ -72,6 +70,7 @@ ) from cm.services.cluster import retrieve_clusters_topology from cm.services.concern import create_issue, delete_issue, retrieve_issue +from cm.services.concern._operaitons import delete_concerns_of_removed_objects from cm.services.concern.cases import ( recalculate_own_concerns_on_add_clusters, recalculate_own_concerns_on_add_hosts, @@ -253,14 +252,14 @@ def delete_host(host: Host, cancel_tasks: bool = True) -> None: def delete_service(service: ClusterObject) -> None: service_pk = service.pk - # need to remove concerns of components manually, because they aren't cleared otherwise - ConcernItem.objects.filter( - Q( - owner_id__in=ServiceComponent.objects.values_list("id", flat=True).filter(service=service), - owner_type=ServiceComponent.class_content_type, - ) - | Q(owner_id=service_pk, owner_type=service.content_type) - ).delete() + delete_concerns_of_removed_objects( + objects={ + ADCMCoreType.SERVICE: (service_pk,), + ADCMCoreType.COMPONENT: tuple( + ServiceComponent.objects.values_list("id", flat=True).filter(service_id=service_pk) + ), + } + ) service.delete() @@ -306,8 +305,21 @@ def delete_cluster(cluster: Cluster) -> None: MaintenanceMode.OFF, ", ".join(host_pks), ) + + delete_concerns_of_removed_objects( + objects={ + ADCMCoreType.CLUSTER: (cluster.id,), + ADCMCoreType.SERVICE: tuple( + ClusterObject.objects.values_list("id", flat=True).filter(cluster_id=cluster.id) + ), + ADCMCoreType.COMPONENT: tuple( + ServiceComponent.objects.values_list("id", flat=True).filter(cluster_id=cluster.id) + ), + } + ) + cluster.delete() - update_issue_after_deleting() + reset_hc_map() reset_objects_in_mm() diff --git a/python/cm/services/concern/_operaitons.py b/python/cm/services/concern/_operaitons.py index 067b7fbd75..a760e310e9 100644 --- a/python/cm/services/concern/_operaitons.py +++ b/python/cm/services/concern/_operaitons.py @@ -10,8 +10,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from core.types import CoreObjectDescriptor +from typing import Iterable + +from core.types import ADCMCoreType, CoreObjectDescriptor, ObjectID from django.contrib.contenttypes.models import ContentType +from django.db.models import Q from cm.converters import core_type_to_model from cm.models import ClusterObject, ConcernCause, ConcernItem, ConcernType, ObjectType, Prototype @@ -26,6 +29,15 @@ } +def delete_concerns_of_removed_objects(objects: dict[ADCMCoreType, Iterable[ObjectID]]) -> None: + query = Q() + + for type_, ids in objects.items(): + query |= Q(owner_type=core_type_to_model(type_).class_content_type, owner_id__in=ids) + + ConcernItem.objects.filter(query).delete() + + def delete_issue(owner: CoreObjectDescriptor, cause: ConcernCause) -> None: owner_type = ContentType.objects.get_for_model(core_type_to_model(core_type=owner.type)) ConcernItem.objects.filter(owner_id=owner.id, owner_type=owner_type, cause=cause, type=ConcernType.ISSUE).delete() From dddd067c10e7f0a301c4a181444425f7aefdf6dd Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Thu, 22 Aug 2024 07:23:39 +0000 Subject: [PATCH 43/98] =?UTF-8?q?ADCM-5839:=20Generalize=20=E2=80=9Ccheck?= =?UTF-8?q?=E2=80=9D=20related=20functions?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- python/api/tests/test_api.py | 75 ++++-- python/api_v2/cluster/utils.py | 31 +-- python/api_v2/tests/test_cluster.py | 25 +- python/api_v2/tests/test_mapping.py | 99 ++++---- python/cm/api.py | 45 +++- python/cm/issue.py | 210 ++-------------- python/cm/services/cluster.py | 8 +- python/cm/services/concern/checks.py | 350 +++++++++++++++++++++------ python/cm/services/job/checks.py | 34 ++- python/cm/services/types.py | 17 -- python/cm/tests/utils.py | 6 +- python/core/cluster/operations.py | 15 +- python/core/types.py | 4 +- 13 files changed, 519 insertions(+), 400 deletions(-) delete mode 100644 python/cm/services/types.py diff --git a/python/api/tests/test_api.py b/python/api/tests/test_api.py index 53aa666192..e803ef35d7 100755 --- a/python/api/tests/test_api.py +++ b/python/api/tests/test_api.py @@ -24,6 +24,7 @@ Host, HostComponent, ObjectConfig, + ObjectType, Prototype, ServiceComponent, ) @@ -33,6 +34,7 @@ gen_host, gen_host_component, gen_job_log, + gen_prototype, gen_provider, gen_service, gen_task_log, @@ -844,26 +846,41 @@ def setUp(self): date=timezone.now(), ) - self.prototype = Prototype.objects.create( + common_proto_data = { + "version": "2.5", + "license": "absent", + "license_path": None, + "license_hash": None, + "version_order": 11, + "required": False, + "shared": False, + "adcm_min_version": None, + "monitoring": "active", + "description": "", + } + self.cluster_prototype = Prototype.objects.create( bundle_id=self.bundle.id, - type="cluster", + type=ObjectType.CLUSTER, name="ADB", - display_name="ADB", - version="2.5", - license="absent", - license_path=None, - license_hash=None, - version_order=11, - required=False, - shared=False, - adcm_min_version=None, - monitoring="active", - description="", + **common_proto_data, + ) + self.service_prototype = Prototype.objects.create( + bundle_id=self.bundle.id, + type=ObjectType.SERVICE, + name="some_service", + **common_proto_data, + ) + self.component_prototype = Prototype.objects.create( + bundle_id=self.bundle.id, + type=ObjectType.COMPONENT, + name="some_component", + **common_proto_data, ) + self.object_config = ObjectConfig.objects.create(current=0, previous=0) self.cluster = Cluster.objects.create( - prototype_id=self.prototype.id, + prototype_id=self.cluster_prototype.id, name="Fear Limpopo", description="", config_id=self.object_config.id, @@ -872,10 +889,10 @@ def setUp(self): @patch("cm.api.reset_hc_map") def test_save_hc(self, mock_reset_hc_map): - cluster_object = ClusterObject.objects.create(prototype=self.prototype, cluster=self.cluster) - host = Host.objects.create(prototype=self.prototype, cluster=self.cluster) + cluster_object = ClusterObject.objects.create(prototype=self.service_prototype, cluster=self.cluster) + host = Host.objects.create(prototype=self.cluster_prototype, cluster=self.cluster) component = Prototype.objects.create( - parent=self.prototype, + parent=self.component_prototype, type="component", bundle_id=self.bundle.id, name="node", @@ -917,9 +934,15 @@ def test_save_hc__big_update__locked_hierarchy( host_2 is unlocked host_3 became locked """ - service = gen_service(self.cluster, bundle=self.bundle) - component_1 = gen_component(service, bundle=self.bundle) - component_2 = gen_component(service, bundle=self.bundle) + service = gen_service(self.cluster) + component_1_prototype = gen_prototype( + bundle=self.cluster.prototype.bundle, proto_type=ObjectType.COMPONENT, name="component_1" + ) + component_1 = gen_component(service=service, prototype=component_1_prototype) + component_2_prototype = gen_prototype( + bundle=self.cluster.prototype.bundle, proto_type=ObjectType.COMPONENT, name="component_2" + ) + component_2 = gen_component(service=service, prototype=component_2_prototype) provider = gen_provider() host_1 = gen_host(provider, cluster=self.cluster) host_2 = gen_host(provider, cluster=self.cluster) @@ -969,9 +992,15 @@ def test_save_hc__big_update__unlocked_hierarchy(self, mock_update, mock_load): host_2 remains unlocked host_3 remains unlocked """ - service = gen_service(self.cluster, bundle=self.bundle) - component_1 = gen_component(service, bundle=self.bundle) - component_2 = gen_component(service, bundle=self.bundle) + service = gen_service(self.cluster) + component_1_prototype = gen_prototype( + bundle=self.cluster.prototype.bundle, proto_type=ObjectType.COMPONENT, name="component_1" + ) + component_1 = gen_component(service=service, prototype=component_1_prototype) + component_2_prototype = gen_prototype( + bundle=self.cluster.prototype.bundle, proto_type=ObjectType.COMPONENT, name="component_2" + ) + component_2 = gen_component(service=service, prototype=component_2_prototype) provider = gen_provider() host_1 = gen_host(provider, cluster=self.cluster) host_2 = gen_host(provider, cluster=self.cluster) diff --git a/python/api_v2/cluster/utils.py b/python/api_v2/cluster/utils.py index b300fadcfc..760287ab8c 100644 --- a/python/api_v2/cluster/utils.py +++ b/python/api_v2/cluster/utils.py @@ -28,7 +28,6 @@ from cm.errors import AdcmEx from cm.issue import ( add_concern_to_object, - check_components_mapping_contraints, remove_concern_from_object, ) from cm.models import ( @@ -45,6 +44,7 @@ ) from cm.services.cluster import retrieve_clusters_topology from cm.services.concern import delete_issue +from cm.services.concern.checks import extract_data_for_requirements_check, is_constraint_requirements_unsatisfied from cm.services.concern.distribution import redistribute_issues_and_flags from cm.services.status.notify import reset_hc_map, reset_objects_in_mm from cm.status_api import send_host_component_map_update_event @@ -235,25 +235,28 @@ def _check_mapping_data(mapping_data: MappingData) -> None: if not isinstance(component_prototype.bound_to, Empty): _check_single_mapping_bound_to(mapping_entry=mapping_entry, mapping_data=mapping_data) + requirements_data = extract_data_for_requirements_check( + cluster=mapping_data.orm_objects["cluster"], + input_mapping=[ + {"host_id": entry.host.id, "component_id": entry.component.id, "service_id": entry.service.id} + for entry in mapping_data.mapping + ], + ) + constraint_not_ok, error_message = is_constraint_requirements_unsatisfied( + topology=requirements_data.topology, + component_prototype_map=requirements_data.component_prototype_map, + prototype_requirements=requirements_data.prototype_requirements, + components_map=requirements_data.objects_map_by_type["component"], + ) + if constraint_not_ok and error_message is not None: + raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=error_message) + for service in mapping_data.services.values(): service_prototype = mapping_data.prototypes[service.prototype_id] if service_prototype.requires: _check_single_service_requires( service_prototype=service_prototype, cluster_objects=mapping_data.objects_by_prototype_name ) - for component, component_prototype in mapping_data.service_components(service=service): - check_components_mapping_contraints( - hosts_count=len(mapping_data.hosts), - target_mapping_count=len( - [ - map_ - for map_ in mapping_data.mapping - if map_.service.id == service.id and map_.component.id == component.id - ] - ), - service_prototype=service_prototype, - component_prototype=component_prototype, - ) @atomic diff --git a/python/api_v2/tests/test_cluster.py b/python/api_v2/tests/test_cluster.py index ab9cee399c..3e316b2bec 100644 --- a/python/api_v2/tests/test_cluster.py +++ b/python/api_v2/tests/test_cluster.py @@ -17,15 +17,15 @@ Action, ADCMEntityStatus, AnsibleConfig, - Bundle, Cluster, ClusterObject, + ObjectType, Prototype, ServiceComponent, ) from cm.services.status.client import FullStatusMap from cm.tests.mocks.task_runner import RunTaskMock -from cm.tests.utils import gen_component, gen_host, gen_service, generate_hierarchy +from cm.tests.utils import gen_component, gen_host, gen_prototype, gen_service, generate_hierarchy from django.contrib.contenttypes.models import ContentType from guardian.models import GroupObjectPermission from rbac.models import User @@ -771,13 +771,24 @@ def setUp(self) -> None: hierarchy_1 = generate_hierarchy() self.cluster_1 = hierarchy_1["cluster"] - cluster_bundle = Bundle.objects.get(pk=self.cluster_1.bundle_id) self.service_11 = hierarchy_1["service"] self.component_111 = hierarchy_1["component"] - self.component_112 = gen_component(service=self.service_11, bundle=cluster_bundle) - self.service_12 = gen_service(cluster=self.cluster_1, bundle=cluster_bundle) - self.component_121 = gen_component(service=self.service_12, bundle=cluster_bundle) - self.component_122 = gen_component(service=self.service_12, bundle=cluster_bundle) + component_112_prototype = gen_prototype( + bundle=self.cluster_1.prototype.bundle, proto_type=ObjectType.COMPONENT, name="component_112" + ) + self.component_112 = gen_component(service=self.service_11, prototype=component_112_prototype) + service_12_prototype = gen_prototype( + bundle=self.cluster_1.prototype.bundle, proto_type=ObjectType.SERVICE, name="service_12" + ) + self.service_12 = gen_service(cluster=self.cluster_1, prototype=service_12_prototype) + component_121_prototype = gen_prototype( + bundle=self.cluster_1.prototype.bundle, proto_type=ObjectType.COMPONENT, name="component_121" + ) + self.component_121 = gen_component(service=self.service_12, prototype=component_121_prototype) + component_122_prototype = gen_prototype( + bundle=self.cluster_1.prototype.bundle, proto_type=ObjectType.COMPONENT, name="component_122" + ) + self.component_122 = gen_component(service=self.service_12, prototype=component_122_prototype) self.host_1 = hierarchy_1["host"] self.host_2 = gen_host(provider=hierarchy_1["provider"], cluster=self.cluster_1) self.set_hostcomponent( diff --git a/python/api_v2/tests/test_mapping.py b/python/api_v2/tests/test_mapping.py index d3626efae7..591c8dec61 100644 --- a/python/api_v2/tests/test_mapping.py +++ b/python/api_v2/tests/test_mapping.py @@ -639,8 +639,8 @@ def test_one_constraint_zero_in_hc_fail(self): "code": "COMPONENT_CONSTRAINT_ERROR", "level": "error", "desc": ( - f'Less then 1 required component "{component.display_name}" (0) in host component list ' - f'for service "{service.display_name}"' + f'Component "{component.display_name}" of service "{component.service.name}" ' + f"has unsatisfied constraint: {component.prototype.constraint}" ), }, ) @@ -670,8 +670,8 @@ def test_one_constraint_two_in_hc_fail(self): "code": "COMPONENT_CONSTRAINT_ERROR", "level": "error", "desc": ( - f'Amount (2) of component "{component.display_name}" more then maximum (1) in host component list ' - f'for service "{service.display_name}"' + f'Component "{component.display_name}" of service "{component.service.name}" ' + f"has unsatisfied constraint: {component.prototype.constraint}" ), }, ) @@ -718,8 +718,8 @@ def test_zero_one_constraint_two_in_hc_fail(self): "code": "COMPONENT_CONSTRAINT_ERROR", "level": "error", "desc": ( - f'Amount (2) of component "{component.display_name}" more then maximum (1) in host component ' - f'list for service "{service.display_name}"' + f'Component "{component.display_name}" of service "{component.service.name}" ' + f"has unsatisfied constraint: {component.prototype.constraint}" ), }, ) @@ -752,21 +752,17 @@ def test_one_two_constraint_fail(self): cluster=self.cluster, ) - for data, err_msg in ( - ( - [], - f'Less then 1 required component "{component.display_name}" (0) ' - f'in host component list for service "{service.display_name}"', - ), - ( - [ - {"hostId": self.host_1.pk, "componentId": component.pk}, - {"hostId": self.host_2.pk, "componentId": component.pk}, - {"hostId": self.host_3.pk, "componentId": component.pk}, - ], - f'Amount (3) of component "{component.display_name}" more then maximum (2) ' - f'in host component list for service "{service.display_name}"', - ), + err_msg = ( + f'Component "{component.display_name}" of service "{component.service.name}" ' + f"has unsatisfied constraint: {component.prototype.constraint}" + ) + for data in ( + [], + [ + {"hostId": self.host_1.pk, "componentId": component.pk}, + {"hostId": self.host_2.pk, "componentId": component.pk}, + {"hostId": self.host_3.pk, "componentId": component.pk}, + ], ): with self.subTest(f"[1,2] constraint, data: {data}"): response: Response = self.client.v2[self.cluster, "mapping"].post(data=data) @@ -815,20 +811,16 @@ def test_one_odd_first_variant_constraint_fail(self): cluster=self.cluster, ) - for data, err_msg in ( - ( - [], - f'Less then 1 required component "{component.display_name}" (0) ' - f'in host component list for service "{service.display_name}"', - ), - ( - [ - {"hostId": self.host_1.pk, "componentId": component.pk}, - {"hostId": self.host_2.pk, "componentId": component.pk}, - ], - f'Amount (2) of component "{component.display_name}" should be odd (odd) ' - f'in host component list for service "{service.display_name}"', - ), + err_msg = ( + f'Component "{component.display_name}" of service "{component.service.name}" ' + f"has unsatisfied constraint: {component.prototype.constraint}" + ) + for data in ( + [], + [ + {"hostId": self.host_1.pk, "componentId": component.pk}, + {"hostId": self.host_2.pk, "componentId": component.pk}, + ], ): with self.subTest(f"[1,odd] constraint, data: {data}"): response: Response = self.client.v2[self.cluster, "mapping"].post(data=data) @@ -877,21 +869,16 @@ def test_one_odd_second_variant_constraint_fail(self): service=service, cluster=self.cluster, ) - - for data, err_msg in ( - ( - [], - f'Less then 1 required component "{component.display_name}" (0) ' - f'in host component list for service "{service.display_name}"', - ), - ( - [ - {"hostId": self.host_1.pk, "componentId": component.pk}, - {"hostId": self.host_2.pk, "componentId": component.pk}, - ], - f'Amount (2) of component "{component.display_name}" should be odd (odd) ' - f'in host component list for service "{service.display_name}"', - ), + err_msg = ( + f'Component "{component.display_name}" of service "{component.service.name}" ' + f"has unsatisfied constraint: {component.prototype.constraint}" + ) + for data in ( + [], + [ + {"hostId": self.host_1.pk, "componentId": component.pk}, + {"hostId": self.host_2.pk, "componentId": component.pk}, + ], ): with self.subTest(f"[odd] constraint, data: {data}"): response: Response = self.client.v2[self.cluster, "mapping"].post(data=data) @@ -954,8 +941,8 @@ def test_zero_odd_constraint_fail(self): { "code": "COMPONENT_CONSTRAINT_ERROR", "level": "error", - "desc": f'Amount (2) of component "{component.display_name}" should be odd (odd) ' - f'in host component list for service "{service.display_name}"', + "desc": f'Component "{component.display_name}" of service "{component.service.name}" ' + f"has unsatisfied constraint: {component.prototype.constraint}", }, ) self.assertEqual(HostComponent.objects.count(), 0) @@ -1003,8 +990,8 @@ def test_one_plus_constraint_fail(self): { "code": "COMPONENT_CONSTRAINT_ERROR", "level": "error", - "desc": f'Less then 1 required component "{component.display_name}" (0) ' - f'in host component list for service "{service.display_name}"', + "desc": f'Component "{component.display_name}" of service "{component.service.name}" ' + f"has unsatisfied constraint: {component.prototype.constraint}", }, ) self.assertEqual(HostComponent.objects.count(), 0) @@ -1060,8 +1047,8 @@ def test_plus_constraint_fail(self): { "code": "COMPONENT_CONSTRAINT_ERROR", "level": "error", - "desc": f'Less then 3 required component "{component.display_name}" (2) ' - f'in host component list for service "{service.display_name}"', + "desc": f'Component "{component.display_name}" of service "{component.service.name}" ' + f"has unsatisfied constraint: {component.prototype.constraint}", }, ) self.assertEqual(HostComponent.objects.count(), 0) diff --git a/python/cm/api.py b/python/cm/api.py index 3bcdbf9278..6c38530a13 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -36,10 +36,6 @@ from cm.errors import AdcmEx, raise_adcm_ex from cm.issue import ( add_concern_to_object, - check_bound_components, - check_component_constraint, - check_hc_requires, - check_service_requires, remove_concern_from_object, update_hierarchy_issues, ) @@ -78,6 +74,10 @@ ) from cm.services.concern.checks import ( cluster_mapping_has_issue, + extract_data_for_requirements_check, + is_bound_to_requirements_unsatisfied, + is_constraint_requirements_unsatisfied, + is_requires_requirements_unsatisfied, object_configuration_has_issue, object_imports_has_issue, ) @@ -562,13 +562,36 @@ def check_hc(cluster: Cluster, hc_in: list[dict]) -> list[tuple[ClusterObject, H check_sub_key(hc_in=hc_in) host_comp_list = make_host_comp_list(cluster=cluster, hc_in=hc_in) - check_hc_requires(shc_list=host_comp_list) - check_bound_components(shc_list=host_comp_list) - for service in ClusterObject.objects.filter(cluster=cluster): - check_component_constraint( - cluster=cluster, service_prototype=service.prototype, hc_in=[i for i in host_comp_list if i[0] == service] - ) - check_service_requires(cluster=cluster, proto=service.prototype) + requirements_data = extract_data_for_requirements_check(cluster=cluster, input_mapping=hc_in) + + requires_not_ok, error_message = is_requires_requirements_unsatisfied( + topology=requirements_data.topology, + component_prototype_map=requirements_data.component_prototype_map, + prototype_requirements=requirements_data.prototype_requirements, + existing_objects_map=requirements_data.existing_objects_map, + existing_objects_by_type=requirements_data.objects_map_by_type, + ) + if requires_not_ok and error_message is not None: + raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=error_message) + + bound_not_ok, error_message = is_bound_to_requirements_unsatisfied( + topology=requirements_data.topology, + component_prototype_map=requirements_data.component_prototype_map, + prototype_requirements=requirements_data.prototype_requirements, + existing_objects_map=requirements_data.existing_objects_map, + ) + if bound_not_ok and error_message: + raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=error_message) + + constraint_not_ok, error_message = is_constraint_requirements_unsatisfied( + topology=requirements_data.topology, + component_prototype_map=requirements_data.component_prototype_map, + prototype_requirements=requirements_data.prototype_requirements, + components_map=requirements_data.objects_map_by_type["component"], + ) + if constraint_not_ok and error_message is not None: + raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=error_message) + check_maintenance_mode(cluster=cluster, host_comp_list=host_comp_list) return host_comp_list diff --git a/python/cm/issue.py b/python/cm/issue.py index 2b00f6fc24..6656bb2f18 100755 --- a/python/cm/issue.py +++ b/python/cm/issue.py @@ -14,15 +14,13 @@ from typing import Iterable from api_v2.concern.serializers import ConcernSerializer -from core.types import CoreObjectDescriptor +from core.types import CoreObjectDescriptor, PrototypeID from django.conf import settings from django.db.transaction import on_commit from djangorestframework_camel_case.util import camelize -from cm.adcm_config.config import get_prototype_config from cm.adcm_config.utils import proto_ref from cm.converters import orm_object_to_core_type -from cm.data_containers import PrototypeData from cm.errors import AdcmEx from cm.hierarchy import Tree from cm.logger import logger @@ -34,8 +32,6 @@ ConcernCause, ConcernItem, ConcernType, - ConfigLog, - Host, JobLog, ObjectType, Prototype, @@ -45,6 +41,8 @@ from cm.services.concern import create_issue, retrieve_issue from cm.services.concern.checks import ( cluster_mapping_has_issue, + extract_data_for_requirements_check, + is_constraint_requirements_unsatisfied, object_configuration_has_issue, object_has_required_services_issue, object_imports_has_issue, @@ -52,35 +50,6 @@ ) from cm.services.concern.messages import ConcernMessage, PlaceholderObjectsDTO, build_concern_reason from cm.status_api import send_concern_creation_event, send_concern_delete_event -from cm.utils import obj_ref - - -def check_config(obj: ADCMEntity) -> bool: - spec, _, _, _ = get_prototype_config(prototype=obj.prototype) - conf, attr = get_obj_config(obj=obj) - for key, value in spec.items(): - if "required" in value: - if value["required"] and key in conf and conf[key] is None: - logger.debug("required config key %s of %s is missing", key, obj_ref(obj=obj)) - return False - else: - if key in attr and "active" in attr[key] and not attr[key]["active"]: - continue - for subkey in value: - if value[subkey]["required"]: - if key not in conf: - logger.debug("required config group %s of %s is missing", key, obj_ref(obj=obj)) - return False - if subkey in conf[key]: - if conf[key][subkey] is None: - msg = "required config value for key %s/%s of %s is missing" - logger.debug(msg, key, subkey, obj_ref(obj=obj)) - return False - else: - msg = "required config key %s/%s of %s is missing" - logger.debug(msg, key, subkey, obj_ref(obj=obj)) - return False - return True def check_service_requires(cluster: Cluster, proto: Prototype) -> None: @@ -106,142 +75,11 @@ def check_service_requires(cluster: Cluster, proto: Prototype) -> None: ) -def check_hc_requires(shc_list: list[tuple[ClusterObject, Host, ServiceComponent]]) -> None: - for serv_host_comp in [i for i in shc_list if i[2].prototype.requires or i[0].prototype.requires]: - for require in [*serv_host_comp[2].prototype.requires, *serv_host_comp[0].prototype.requires]: - if require in serv_host_comp[2].prototype.requires: - ref = f'component "{serv_host_comp[2].prototype.name}" of service "{serv_host_comp[0].prototype.name}"' - else: - ref = f'service "{serv_host_comp[0].prototype.name}"' - - req_comp = require.get("component") - - if not ClusterObject.objects.filter(prototype__name=require["service"]).exists() and not req_comp: - raise AdcmEx( - code="COMPONENT_CONSTRAINT_ERROR", msg=f"No required service \"{require['service']}\" for {ref}" - ) - - if not req_comp: - continue - - if not any( - { # noqa: C419 - (shc[0].prototype.name == require["service"] and shc[2].prototype.name == req_comp) - for shc in shc_list - } - ): - raise AdcmEx( - code="COMPONENT_CONSTRAINT_ERROR", - msg=f'No required component "{req_comp}" of service "{require["service"]}" for {ref}', - ) - - -def check_bound_components(shc_list: list[tuple[ClusterObject, Host, ServiceComponent]]) -> None: - for shc in [i for i in shc_list if i[2].prototype.bound_to]: - component_prototype = shc[2].prototype - service_name = component_prototype.bound_to["service"] - component_name = component_prototype.bound_to["component"] - - bound_targets_shc = [ - i for i in shc_list if i[0].prototype.name == service_name and i[2].prototype.name == component_name - ] - - if not bound_targets_shc: - bound_target_ref = f'component "{component_name}" of service "{service_name}"' - bound_requester_ref = f'component "{shc[2].display_name}" of service "{shc[0].display_name}"' - msg = f"{bound_target_ref.capitalize()} not in hc for {bound_requester_ref}" - raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=msg) - - for target_shc in bound_targets_shc: - if not [i for i in shc_list if i[1] == target_shc[1] and i[2].prototype == component_prototype]: - bound_target_ref = f'component "{shc[2].prototype.name}" of service "{shc[0].prototype.name}"' - bound_requester_ref = ( - f'component "{target_shc[2].prototype.name}" of service "{target_shc[0].prototype.name}"' - ) - msg = f'No {bound_target_ref} on host "{target_shc[1].fqdn}" for {bound_requester_ref}' - raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=msg) - - -def get_obj_config(obj: ADCMEntity) -> tuple[dict, dict]: - if obj.config is None: - return {}, {} - - config_log = ConfigLog.obj.get(obj_ref=obj.config, id=obj.config.current) - attr = config_log.attr - if not attr: - attr = {} - - return config_log.config, attr - - -def check_min_required_components(count: int, constraint: int, component_prototype: Prototype, ref: str) -> None: - if count < constraint: - raise AdcmEx( - code="COMPONENT_CONSTRAINT_ERROR", - msg=f'Less then {constraint} required component "{component_prototype.name}" ({count}) {ref}', - ) - - -def check_max_required_components(count: int, constraint: int, component_prototype: Prototype, ref: str) -> None: - if count > constraint: - raise AdcmEx( - code="COMPONENT_CONSTRAINT_ERROR", - msg=f'Amount ({count}) of component "{component_prototype.name}" more then maximum ({constraint}) {ref}', - ) - - -def check_components_number_is_odd(count: int, constraint: str, component_prototype: Prototype, ref: str) -> None: - if count % 2 == 0: - raise AdcmEx( - code="COMPONENT_CONSTRAINT_ERROR", - msg=f'Amount ({count}) of component "{component_prototype.name}" should be odd ({constraint}) {ref}', - ) - - -def check_components_mapping_contraints( - hosts_count: int, - target_mapping_count: int, - service_prototype: Prototype | PrototypeData, - component_prototype: Prototype | PrototypeData, -) -> None: - constraint = component_prototype.constraint - ref = f'in host component list for {service_prototype.type} "{service_prototype.name}"' - - if isinstance(constraint[0], int): - check_min_required_components( - count=target_mapping_count, constraint=constraint[0], component_prototype=component_prototype, ref=ref - ) - if len(constraint) < 2: - check_max_required_components( - count=target_mapping_count, constraint=constraint[0], component_prototype=component_prototype, ref=ref - ) - - if len(constraint) > 1: - if isinstance(constraint[1], int): - check_max_required_components( - count=target_mapping_count, constraint=constraint[1], component_prototype=component_prototype, ref=ref - ) - elif constraint[1] == "odd" and target_mapping_count: - check_components_number_is_odd( - count=target_mapping_count, constraint=constraint[1], component_prototype=component_prototype, ref=ref - ) - - if constraint[0] == "+": - check_min_required_components( - count=target_mapping_count, constraint=hosts_count, component_prototype=component_prototype, ref=ref - ) - elif constraint[0] == "odd": # synonym to [1,odd] - check_min_required_components( - count=target_mapping_count, constraint=1, component_prototype=component_prototype, ref=ref - ) - check_components_number_is_odd( - count=target_mapping_count, constraint=constraint[0], component_prototype=component_prototype, ref=ref - ) - - def check_component_constraint( cluster: Cluster, service_prototype: Prototype, hc_in: list, old_bundle: Bundle | None = None ) -> None: + target_prototypes: set[PrototypeID] = set() + for component_prototype in Prototype.objects.filter(parent=service_prototype, type="component"): if old_bundle: try: @@ -259,18 +97,24 @@ def check_component_constraint( except Prototype.DoesNotExist: continue - check_components_mapping_contraints( - hosts_count=Host.objects.filter(cluster=cluster).count(), - target_mapping_count=len( - [ - i - for i in hc_in - if i[0].prototype.name == service_prototype.name and i[2].prototype.name == component_prototype.name - ] - ), - service_prototype=service_prototype, - component_prototype=component_prototype, - ) + target_prototypes.add(component_prototype.pk) + + requirements_data = extract_data_for_requirements_check( + cluster=cluster, + input_mapping=[ + {"host_id": host.id, "component_id": component.id, "service_id": service.id} + for service, host, component in hc_in + ], + target_component_prototypes=target_prototypes, + ) + constraint_not_ok, error_message = is_constraint_requirements_unsatisfied( + topology=requirements_data.topology, + component_prototype_map=requirements_data.component_prototype_map, + prototype_requirements=requirements_data.prototype_requirements, + components_map=requirements_data.objects_map_by_type["component"], + ) + if constraint_not_ok and error_message is not None: + raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=error_message) _issue_check_map = { @@ -410,14 +254,6 @@ def create_lock(owner: ADCMEntity, job: JobLog, custom_name: str = ""): ) -def update_job_in_lock_reason(lock: ConcernItem, job: JobLog) -> ConcernItem: - lock.reason = build_concern_reason( - ConcernMessage.LOCKED_BY_JOB.template, placeholder_objects=PlaceholderObjectsDTO(job=job, target=lock.owner) - ) - lock.save(update_fields=["reason"]) - return lock - - def unlock_affected_objects(task: TaskLog) -> None: task.refresh_from_db() diff --git a/python/cm/services/cluster.py b/python/cm/services/cluster.py index a192e6042b..d8333a5b4a 100644 --- a/python/cm/services/cluster.py +++ b/python/cm/services/cluster.py @@ -21,7 +21,7 @@ MaintenanceModeOfObjects, ObjectMaintenanceModeState, ) -from core.types import ADCMCoreType, ClusterID, CoreObjectDescriptor, HostID, ShortObjectInfo +from core.types import ADCMCoreType, ClusterID, CoreObjectDescriptor, HostID, MappingDict, ShortObjectInfo from django.db.transaction import atomic from rbac.models import re_apply_object_policy @@ -122,8 +122,10 @@ def perform_host_to_cluster_map( return hosts -def retrieve_clusters_topology(cluster_ids: Iterable[ClusterID]) -> Generator[ClusterTopology, None, None]: - return build_clusters_topology(cluster_ids=cluster_ids, db=ClusterDB) +def retrieve_clusters_topology( + cluster_ids: Iterable[ClusterID], input_mapping: dict[ClusterID, list[MappingDict]] | None = None +) -> Generator[ClusterTopology, None, None]: + return build_clusters_topology(cluster_ids=cluster_ids, db=ClusterDB, input_mapping=input_mapping) def retrieve_related_cluster_topology(orm_object: Cluster | ClusterObject | ServiceComponent | Host) -> ClusterTopology: diff --git a/python/cm/services/concern/checks.py b/python/cm/services/concern/checks.py index 9a8ae939ba..be7ba3b6ed 100644 --- a/python/cm/services/concern/checks.py +++ b/python/cm/services/concern/checks.py @@ -10,13 +10,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import deque +from collections import defaultdict, deque from functools import partial from operator import attrgetter from typing import Callable, Iterable, Literal, NamedTuple, TypeAlias -from core.cluster.types import ServiceTopology -from core.types import ClusterID, ComponentID, ConfigID, HostID, ObjectID, PrototypeID, ServiceID +from core.cluster.types import ClusterTopology, ServiceTopology +from core.types import ClusterID, ComponentID, ConfigID, HostID, MappingDict, ObjectID, PrototypeID, ServiceID from django.db.models import Q from typing_extensions import Self @@ -64,7 +64,7 @@ def from_db_repr(cls, constraint: ConstraintDBFormat) -> Self: case [int(exact)]: checks = (partial(check_exact, argument=exact),) case [int(min_), "odd"]: - checks = (partial(check_equal_or_greater, argument=min_), check_is_odd) + checks = (partial(check_equal_or_greater, argument=min_), partial(check_is_odd, allow_zero=min_ == 0)) case [int(min_), "+"]: checks = (partial(check_equal_or_greater, argument=min_),) case [int(min_), int(max_)]: @@ -81,11 +81,25 @@ def is_met_for(self, mapped_hosts: int, hosts_in_cluster: int) -> bool: class ServiceExternalRequirement(NamedTuple): name: str + def __str__(self): + return f'service "{self.name}"' + class ComponentExternalRequirement(NamedTuple): name: str service_name: str + def __str__(self): + return f'component "{self.name}" of service "{self.service_name}"' + + +class ServiceRequirements(NamedTuple): + requires: tuple[ServiceExternalRequirement | ComponentExternalRequirement, ...] + + @property + def is_requires_check_required(self) -> bool: + return len(self.requires) > 0 + class ComponentMappingRequirements(NamedTuple): constraint: Constraint @@ -105,6 +119,52 @@ def is_bound_to_check_required(self) -> bool: return self.bound_to is not None +class RequirementsCheckDTO(NamedTuple): + topology: ClusterTopology + component_prototype_map: dict[ComponentID, tuple[PrototypeID, ServiceID, PrototypeID]] + prototype_requirements: dict[PrototypeID, ComponentMappingRequirements | ServiceRequirements] + existing_objects_map: dict[ComponentExternalRequirement | ServiceExternalRequirement, ComponentID | ServiceID] + + @property + def prototype_requirements_only_component_requires( + self, + ) -> dict[PrototypeID, ComponentMappingRequirements | ServiceRequirements]: + res = {} + + for prototype_id, requirements in self.prototype_requirements.items(): + new_requires = tuple(req for req in requirements.requires if isinstance(req, ComponentExternalRequirement)) + + if isinstance(requirements, ComponentMappingRequirements): + new_requirements = ComponentMappingRequirements( + constraint=requirements.constraint, requires=new_requires, bound_to=requirements.bound_to + ) + elif isinstance(requirements, ServiceRequirements): + new_requirements = ServiceRequirements(requires=new_requires) + else: + raise NotImplementedError(f"Unexpected requirements type: {type(requirements)}") + + res[prototype_id] = new_requirements + + return res + + @property + def objects_map_by_type( + self, + ) -> dict[ + Literal["service", "component"], + dict[ServiceID | ComponentID, ServiceExternalRequirement | ComponentExternalRequirement], + ]: + existing_objects = defaultdict(dict) + + for entity_reqs, entity_id in self.existing_objects_map.items(): + if isinstance(entity_reqs, ComponentExternalRequirement): + existing_objects["component"][entity_id] = entity_reqs + elif isinstance(entity_reqs, ServiceExternalRequirement): + existing_objects["service"][entity_id] = entity_reqs + + return existing_objects + + def object_configuration_has_issue(target: ObjectWithConfig) -> HasIssue: config_spec = next(iter(retrieve_flat_spec_for_objects(prototypes=(target.prototype_id,)).values()), None) if not config_spec: @@ -230,104 +290,247 @@ def cluster_mapping_has_issue(cluster: Cluster) -> HasIssue: - bound_to """ + requirements_data = extract_data_for_requirements_check(cluster=cluster) + + bound_not_ok, _ = is_bound_to_requirements_unsatisfied( + topology=requirements_data.topology, + component_prototype_map=requirements_data.component_prototype_map, + prototype_requirements=requirements_data.prototype_requirements, + existing_objects_map=requirements_data.existing_objects_map, + ) + if bound_not_ok: + return True + + requires_not_ok, _ = is_requires_requirements_unsatisfied( + topology=requirements_data.topology, + component_prototype_map=requirements_data.component_prototype_map, + prototype_requirements=requirements_data.prototype_requirements_only_component_requires, + existing_objects_map=requirements_data.existing_objects_map, + existing_objects_by_type=requirements_data.objects_map_by_type, + ) + if requires_not_ok: + return True + + constraint_not_ok, _ = is_constraint_requirements_unsatisfied( + topology=requirements_data.topology, + component_prototype_map=requirements_data.component_prototype_map, + prototype_requirements=requirements_data.prototype_requirements, + components_map=requirements_data.objects_map_by_type["component"], + ) + if constraint_not_ok: + return True + + return False + + +def extract_data_for_requirements_check( + cluster: Cluster, + input_mapping: list[MappingDict] | None = None, + target_component_prototypes: set[PrototypeID] | None = None, +) -> RequirementsCheckDTO: bundle_id = cluster.prototype.bundle_id + prototype_requirements: dict[PrototypeID, ComponentMappingRequirements | ServiceRequirements] = {} - # extract requirements - requirements_from_components: dict[PrototypeID, ComponentMappingRequirements] = {} + query = {"bundle_id": bundle_id, "type__in": {ObjectType.COMPONENT, ObjectType.SERVICE}} + if target_component_prototypes is not None: + query.update({"pk__in": target_component_prototypes}) - for prototype_id, constraint, requires, bound_to in Prototype.objects.values_list( - "id", "constraint", "requires", "bound_to" - ).filter(bundle_id=bundle_id, type=ObjectType.COMPONENT): + for prototype_id, prototype_type, constraint, requires, bound_to in Prototype.objects.values_list( + "id", "type", "constraint", "requires", "bound_to" + ).filter(**query): prepared_requires = deque() for requirement in requires: service_name = requirement["service"] if component_name := requirement.get("component"): - # "service" requirements aren't checked for mapping issue prepared_requires.append(ComponentExternalRequirement(name=component_name, service_name=service_name)) - - requirements_from_components[prototype_id] = ComponentMappingRequirements( - constraint=Constraint.from_db_repr(constraint), - requires=tuple(prepared_requires), - bound_to=ComponentExternalRequirement(name=bound_to["component"], service_name=bound_to["service"]) - if bound_to - else None, - ) + else: + prepared_requires.append(ServiceExternalRequirement(name=service_name)) + + if prototype_type == ObjectType.COMPONENT: + prototype_requirements[prototype_id] = ComponentMappingRequirements( + constraint=Constraint.from_db_repr(constraint), + requires=tuple(prepared_requires), + bound_to=ComponentExternalRequirement(name=bound_to["component"], service_name=bound_to["service"]) + if bound_to + else None, + ) + elif prototype_type == ObjectType.SERVICE: + prototype_requirements[prototype_id] = ServiceRequirements(requires=tuple(prepared_requires)) + else: + raise NotImplementedError(f"Unexpected prototype type: {prototype_type}") # prepare data for check - topology = next(retrieve_clusters_topology((cluster.id,))) + input_mapping = {cluster.id: input_mapping} if input_mapping else None + topology = next(retrieve_clusters_topology(cluster_ids=(cluster.id,), input_mapping=input_mapping)) + + query = {"cluster": cluster} + if target_component_prototypes is not None: + query.update({"servicecomponent__prototype_id__in": target_component_prototypes}) - component_prototype_map: dict[ComponentID, tuple[PrototypeID, ServiceID]] = {} + component_prototype_map: dict[ComponentID, tuple[PrototypeID, ServiceID, PrototypeID]] = {} existing_objects_map: dict[ComponentExternalRequirement | ServiceExternalRequirement, ComponentID | ServiceID] = { ServiceExternalRequirement(name=service_name): service_id - for service_id, service_name in ClusterObject.objects.values_list("id", "prototype__name").filter( - cluster=cluster - ) + for service_id, service_name in ClusterObject.objects.values_list("id", "prototype__name") + .filter(**query) + .distinct() } - for component_id, prototype_id, service_id, component_name, service_name in ServiceComponent.objects.values_list( - "id", "prototype_id", "service_id", "prototype__name", "service__prototype__name" - ).filter(id__in=topology.component_ids): - component_prototype_map[component_id] = (prototype_id, service_id) + query = {"id__in": topology.component_ids} + if target_component_prototypes is not None: + query.update({"prototype_id__in": target_component_prototypes}) + + for ( + component_id, + prototype_id, + service_id, + service_prototype_id, + component_name, + service_name, + ) in ServiceComponent.objects.values_list( + "id", "prototype_id", "service_id", "service__prototype_id", "prototype__name", "service__prototype__name" + ).filter(**query): + component_prototype_map[component_id] = (prototype_id, service_id, service_prototype_id) existing_objects_map[ ComponentExternalRequirement(name=component_name, service_name=service_name) ] = component_id - hosts_amount = len(topology.hosts) + return RequirementsCheckDTO( + topology=topology, + component_prototype_map=component_prototype_map, + prototype_requirements=prototype_requirements, + existing_objects_map=existing_objects_map, + ) - existing_objects = set(existing_objects_map.keys()) - # run checks +def is_bound_to_requirements_unsatisfied( + topology: ClusterTopology, + component_prototype_map: dict[ComponentID, tuple[PrototypeID, ServiceID, PrototypeID]], + prototype_requirements: dict[PrototypeID, ComponentMappingRequirements | ServiceRequirements], + existing_objects_map: dict[ComponentExternalRequirement | ServiceExternalRequirement, ComponentID | ServiceID], +) -> tuple[bool, str | None]: + existing_components: dict[ComponentID, ComponentExternalRequirement] = {} + for entity_reqs, entity_id in existing_objects_map.items(): + if isinstance(entity_reqs, ComponentExternalRequirement): + existing_components[entity_id] = entity_reqs + + for component_id, (prototype_id, service_id, _) in component_prototype_map.items(): + requirements = prototype_requirements[prototype_id] + + # only mapped components should be checked for bound_to + if ( + not requirements.is_bound_to_check_required + or not topology.services[service_id].components[component_id].hosts + ): + continue - for component_id, (prototype_id, service_id) in component_prototype_map.items(): - requirements = requirements_from_components[prototype_id] + bound_requester_reference = str(existing_components[component_id]) + error_message = f"{str(requirements.bound_to).capitalize()} not in hc for {bound_requester_reference}" - if requirements.is_constraint_check_required and not requirements.constraint.is_met_for( - mapped_hosts=len(topology.services[service_id].components[component_id].hosts), - hosts_in_cluster=hosts_amount, - ): - return True + bound_component_id = existing_objects_map.get(requirements.bound_to) + if not bound_component_id: + return True, error_message + + service_id_of_bound_component = existing_objects_map.get( + ServiceExternalRequirement(name=requirements.bound_to.service_name) + ) + if not service_id_of_bound_component: + return True, error_message + + bound_service_topology: ServiceTopology | None = topology.services.get(service_id_of_bound_component) + if not bound_service_topology: + return True, error_message + + error_message = f"No {str(requirements.bound_to).capitalize()} on host for {bound_requester_reference}" - # only mapped components should be checked for requires and bound_to + bound_component_hosts: set[HostID] = set(bound_service_topology.components[bound_component_id].hosts) + current_component_hosts: set[HostID] = set(topology.services[service_id].components[component_id].hosts) + + if bound_component_hosts != current_component_hosts: + return True, error_message + + return False, None + + +def is_requires_requirements_unsatisfied( + topology: ClusterTopology, + component_prototype_map: dict[ComponentID, tuple[PrototypeID, ServiceID, PrototypeID]], + prototype_requirements: dict[PrototypeID, ComponentMappingRequirements | ServiceRequirements], + existing_objects_map: dict[ComponentExternalRequirement | ServiceExternalRequirement, ComponentID | ServiceID], + existing_objects_by_type: dict[ + Literal["service", "component"], + dict[ServiceID | ComponentID, ServiceExternalRequirement | ComponentExternalRequirement], + ], +) -> tuple[bool, str | None]: + seen_service_prototypes: set[PrototypeID] = set() + + for component_id, (prototype_id, service_id, service_prototype_id) in component_prototype_map.items(): + # only mapped components should be checked for requires if not topology.services[service_id].components[component_id].hosts: continue - if requirements.is_requires_check_required: - # all required components should be added - if not existing_objects.issuperset(requirements.requires): - return True - - for required_component in requirements.requires: - required_component_id = existing_objects_map[required_component] - required_service_id = existing_objects_map[ - ServiceExternalRequirement(name=required_component.service_name) - ] - # if required component is unmapped - that's mapping issue - if not topology.services[required_service_id].components[required_component_id].hosts: - return True - - if requirements.is_bound_to_check_required: - bound_component_id = existing_objects_map.get(requirements.bound_to) - if not bound_component_id: - return True - - service_id_of_bound_component = existing_objects_map.get( - ServiceExternalRequirement(name=requirements.bound_to.service_name) - ) - if not service_id_of_bound_component: - return True - - bound_service_topology: ServiceTopology | None = topology.services.get(service_id_of_bound_component) - if not bound_service_topology: - return True + component_requirements = prototype_requirements[prototype_id] + service_requirements = None + if service_prototype_id not in seen_service_prototypes: + service_requirements = prototype_requirements[service_prototype_id] + seen_service_prototypes.add(service_prototype_id) - bound_component_hosts: set[HostID] = set(bound_service_topology.components[bound_component_id].hosts) - current_component_hosts: set[HostID] = set(topology.services[service_id].components[component_id].hosts) + component_requires = ( + component_requirements.requires if component_requirements.is_requires_check_required else [] + ) + service_requires = ( + service_requirements.requires + if service_requirements is not None and service_requirements.is_requires_check_required + else [] + ) + all_requires = [ + *zip(component_requires, [existing_objects_by_type["component"][component_id]] * len(component_requires)), + *zip(service_requires, [existing_objects_by_type["service"][service_id]] * len(service_requires)), + ] + for required_entity, owner in all_requires: + try: + if isinstance(required_entity, ComponentExternalRequirement): + required_component_id = existing_objects_map[required_entity] + required_service_id = existing_objects_map[ + ServiceExternalRequirement(name=required_entity.service_name) + ] + elif isinstance(required_entity, ServiceExternalRequirement): + required_component_id = None + required_service_id = existing_objects_map[required_entity] + else: + raise NotImplementedError(f"Unexpected required_entity type: {type(required_entity)}") + except KeyError: + return True, f"No required {required_entity} for {owner}" + + if required_component_id is None: + continue + + if not topology.services[required_service_id].components[required_component_id].hosts: + return True, f"No required {required_entity} for {owner}" + + return False, None + + +def is_constraint_requirements_unsatisfied( + topology: ClusterTopology, + component_prototype_map: dict[ComponentID, tuple[PrototypeID, ServiceID, PrototypeID]], + prototype_requirements: dict[PrototypeID, ComponentMappingRequirements | ServiceRequirements], + components_map: dict[ComponentID, ComponentExternalRequirement], +) -> tuple[bool, str | None]: + for component_id, (prototype_id, service_id, _) in component_prototype_map.items(): + requirements = prototype_requirements[prototype_id] - if bound_component_hosts != current_component_hosts: - return True + if requirements.is_constraint_check_required and not requirements.constraint.is_met_for( + mapped_hosts=len(topology.services[service_id].components[component_id].hosts), + hosts_in_cluster=len(topology.hosts), + ): + return ( + True, + f"{str(components_map[component_id]).capitalize()} " + f"has unsatisfied constraint: {requirements.constraint.internal}", + ) - return False + return False, None # constraint check functions @@ -345,7 +548,10 @@ def check_exact(mapped_hosts: int, _: int, argument: int): return mapped_hosts == argument -def check_is_odd(mapped_hosts: int, _: int): +def check_is_odd(mapped_hosts: int, _: int, allow_zero: bool = False): + if mapped_hosts == 0 and allow_zero: + return True + return mapped_hosts % 2 == 1 diff --git a/python/cm/services/job/checks.py b/python/cm/services/job/checks.py index 472ca4911f..facf4e06a1 100644 --- a/python/cm/services/job/checks.py +++ b/python/cm/services/job/checks.py @@ -14,8 +14,13 @@ from cm.api import check_hc, check_maintenance_mode, check_sub_key, get_hc, make_host_comp_list from cm.errors import AdcmEx -from cm.issue import check_bound_components, check_component_constraint, check_hc_requires, check_service_requires +from cm.issue import check_component_constraint, check_service_requires from cm.models import Action, Cluster, ClusterObject, ConcernType, Host, Prototype, ServiceComponent +from cm.services.concern.checks import ( + extract_data_for_requirements_check, + is_bound_to_requirements_unsatisfied, + is_requires_requirements_unsatisfied, +) from cm.services.job._utils import cook_delta, get_old_hc from cm.services.job.types import HcAclAction @@ -71,8 +76,31 @@ def check_constraints_for_upgrade(cluster, upgrade, host_comp_list): except Prototype.DoesNotExist: pass - check_hc_requires(shc_list=host_comp_list) - check_bound_components(shc_list=host_comp_list) + requirements_data = extract_data_for_requirements_check( + cluster=cluster, + input_mapping=[ + {"host_id": host.id, "component_id": component.id, "service_id": service.id} + for service, host, component in host_comp_list + ], + ) + requires_not_ok, error_message = is_requires_requirements_unsatisfied( + topology=requirements_data.topology, + component_prototype_map=requirements_data.component_prototype_map, + prototype_requirements=requirements_data.prototype_requirements, + existing_objects_map=requirements_data.existing_objects_map, + existing_objects_by_type=requirements_data.objects_map_by_type, + ) + if requires_not_ok and error_message is not None: + raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=error_message) + + bound_not_ok, error_msg = is_bound_to_requirements_unsatisfied( + topology=requirements_data.topology, + component_prototype_map=requirements_data.component_prototype_map, + prototype_requirements=requirements_data.prototype_requirements, + existing_objects_map=requirements_data.existing_objects_map, + ) + if bound_not_ok: + raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=error_msg) check_maintenance_mode(cluster=cluster, host_comp_list=host_comp_list) except AdcmEx as e: if e.code == "COMPONENT_CONSTRAINT_ERROR": diff --git a/python/cm/services/types.py b/python/cm/services/types.py deleted file mode 100644 index 3199c69a63..0000000000 --- a/python/cm/services/types.py +++ /dev/null @@ -1,17 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import TypeAlias - -from cm.models import ADCM, Cluster, ClusterObject, Host, HostProvider, ServiceComponent - -ADCMEntityType: TypeAlias = ADCM | Cluster | ClusterObject | ServiceComponent | HostProvider | Host diff --git a/python/cm/tests/utils.py b/python/cm/tests/utils.py index 5671e20cb0..e9b28dc1b2 100644 --- a/python/cm/tests/utils.py +++ b/python/cm/tests/utils.py @@ -98,7 +98,7 @@ def gen_service( ) -> ClusterObject: """Generate service of specified cluster and prototype""" if not prototype: - bundle = bundle or gen_bundle() + bundle = bundle or cluster.prototype.bundle or gen_bundle() prototype = gen_prototype(bundle, "service") return ClusterObject.objects.create( cluster=cluster, @@ -115,7 +115,7 @@ def gen_component( ) -> ServiceComponent: """Generate service component for specified service and prototype""" if not prototype: - bundle = bundle or gen_bundle() + bundle = bundle or service.prototype.bundle or gen_bundle() prototype = gen_prototype(bundle, "component") return ServiceComponent.objects.create( cluster=service.cluster, @@ -139,7 +139,7 @@ def gen_provider(name: str | None = None, bundle=None, prototype=None) -> HostPr def gen_host(provider, cluster=None, fqdn: str | None = None, bundle=None, prototype=None) -> Host: """Generate host for specified cluster, provider, and prototype""" if not prototype: - bundle = bundle or gen_bundle() + bundle = bundle or provider.prototype.bundle or gen_bundle() prototype = gen_prototype(bundle, "host") return Host.objects.create( fqdn=fqdn or gen_name("host-"), diff --git a/python/core/cluster/operations.py b/python/core/cluster/operations.py index 3297d5275f..6d67ce2756 100644 --- a/python/core/cluster/operations.py +++ b/python/core/cluster/operations.py @@ -26,7 +26,7 @@ ObjectMaintenanceModeState, ServiceTopology, ) -from core.types import ClusterID, ComponentID, HostID, ShortObjectInfo +from core.types import ClusterID, ComponentID, HostID, MappingDict, ShortObjectInfo # !===== Cluster Topology =====! @@ -50,22 +50,31 @@ def get_host_component_entries( def build_clusters_topology( - cluster_ids: Iterable[ClusterID], db: ClusterTopologyDBProtocol + cluster_ids: Iterable[ClusterID], + db: ClusterTopologyDBProtocol, + input_mapping: dict[ClusterID, list[MappingDict]] | None = None, ) -> Generator[ClusterTopology, None, None]: + input_mapping = {} if input_mapping is None else input_mapping + hosts_in_clusters = { cluster_id: {host.id: host for host in hosts} for cluster_id, hosts in db.get_clusters_hosts(cluster_ids=cluster_ids).items() } services_in_clusters = db.get_clusters_services_with_components(cluster_ids=cluster_ids) + # either existing mapping or input mapping is used to collect `hosts_on_components` hosts_on_components: dict[ClusterID, dict[ComponentID, set[HostID]]] = { cluster_id: defaultdict(set) for cluster_id in cluster_ids } - if hosts_in_clusters and services_in_clusters: + if hosts_in_clusters and services_in_clusters and not input_mapping: for cluster_id, entries in db.get_host_component_entries(cluster_ids=cluster_ids).items(): for entry in entries: hosts_on_components[cluster_id][entry.component_id].add(entry.host_id) + for cluster_id, input_mapping_list in input_mapping.items(): + for input_mapping_entry in input_mapping_list: + hosts_on_components[cluster_id][input_mapping_entry["component_id"]].add(input_mapping_entry["host_id"]) + return ( ClusterTopology( cluster_id=cluster_id, diff --git a/python/core/types.py b/python/core/types.py index c6eb58bf13..e568ba28d1 100644 --- a/python/core/types.py +++ b/python/core/types.py @@ -12,7 +12,7 @@ from dataclasses import dataclass from enum import Enum -from typing import NamedTuple, TypeAlias +from typing import Literal, NamedTuple, TypeAlias ObjectID: TypeAlias = int ClusterID: TypeAlias = ObjectID @@ -33,6 +33,8 @@ ServiceName: TypeAlias = str ComponentName: TypeAlias = str +MappingDict: TypeAlias = dict[Literal["host_id", "component_id", "service_id"], HostID | ComponentID | ServiceID] + class ADCMCoreError(Exception): ... From 2717b0215683cbddb30dade76696f31829470011 Mon Sep 17 00:00:00 2001 From: Egor Araslanov Date: Tue, 27 Aug 2024 13:37:33 +0500 Subject: [PATCH 44/98] ADCM-5060 Linters fix post-merge --- python/api_v2/host/utils.py | 2 +- python/cm/api.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/python/api_v2/host/utils.py b/python/api_v2/host/utils.py index 2ea8eb6fb1..9da2d89f3c 100644 --- a/python/api_v2/host/utils.py +++ b/python/api_v2/host/utils.py @@ -20,8 +20,8 @@ ) from cm.logger import logger from cm.models import Cluster, Host, HostProvider, ObjectType, Prototype -from cm.services.concern.locks import get_lock_on_object from cm.services.concern import retrieve_issue +from cm.services.concern.locks import get_lock_on_object from cm.services.maintenance_mode import get_maintenance_mode_response from cm.services.status.notify import reset_hc_map from core.types import ADCMCoreType, CoreObjectDescriptor diff --git a/python/cm/api.py b/python/cm/api.py index c05e77ce42..dfd6fcb7b1 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -82,7 +82,6 @@ ) from cm.services.concern.distribution import distribute_concern_on_related_objects, redistribute_issues_and_flags from cm.services.concern.flags import BuiltInFlag, raise_flag -from cm.services.concern.flags import BuiltInFlag, raise_flag, update_hierarchy from cm.services.concern.locks import get_lock_on_object from cm.services.status.notify import reset_hc_map, reset_objects_in_mm from cm.status_api import ( From c80a5795d3fb6d567352269547f9da06c22a14b7 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Tue, 27 Aug 2024 08:49:08 +0000 Subject: [PATCH 45/98] ADCM-5896: remove logrotate --- Dockerfile | 1 - conf/adcm/config.yaml | 28 +------ os/etc/logrotate.conf | 26 ------ os/etc/logrotate.d/runstatus | 13 --- .../tests/bundles/adcm_configs/config.yaml | 26 ------ .../responses/config_schemas/for_adcm.json | 79 ------------------- python/api_v2/tests/test_audit/test_adcm.py | 2 - python/api_v2/tests/test_config.py | 3 - python/audit/tests/test_logrotate.py | 2 - python/cm/management/commands/logrotate.py | 69 +--------------- .../action_configs/cluster.json.j2 | 1 - .../action_configs/cluster_on_host.json.j2 | 1 - .../action_configs/component.json.j2 | 1 - .../action_configs/component_on_host.json.j2 | 1 - .../action_configs/host.json.j2 | 1 - .../action_configs/hostprovider.json.j2 | 1 - .../job_bundle_relative_cluster.json.j2 | 1 - .../job_bundle_relative_service.json.j2 | 1 - .../job_proto_relative_cluster.json.j2 | 1 - .../job_proto_relative_service.json.j2 | 1 - .../action_configs/service.json.j2 | 1 - .../action_configs/service_on_host.json.j2 | 1 - ...task_mixed_bundle_relative_cluster.json.j2 | 1 - ...task_mixed_bundle_relative_service.json.j2 | 1 - .../task_mixed_proto_relative_cluster.json.j2 | 1 - .../task_mixed_proto_relative_service.json.j2 | 1 - 26 files changed, 3 insertions(+), 262 deletions(-) delete mode 100644 os/etc/logrotate.conf delete mode 100644 os/etc/logrotate.d/runstatus diff --git a/Dockerfile b/Dockerfile index 1545b58466..8e51df8d3b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,7 +13,6 @@ RUN apk update && \ libffi \ libstdc++ \ libxslt \ - logrotate \ musl-dev \ nginx \ openldap-dev \ diff --git a/conf/adcm/config.yaml b/conf/adcm/config.yaml index a41b5948dd..fa21ae0828 100755 --- a/conf/adcm/config.yaml +++ b/conf/adcm/config.yaml @@ -2,7 +2,7 @@ type: adcm name: ADCM - version: 3.5 + version: 3.6 actions: run_ldap_sync: @@ -82,32 +82,6 @@ no_confirm: true ui_options: invisible: true - - name: "logrotate" - display_name: "Nginx Server Logrotate" - type: "group" - activatable: true - ui_options: - invisible: true - subs: - - name: size - display_name: "Max file size" - description: | - Specifies the allowed size the log file can reach before it is archived - type: string - default: 10M - - name: max_history - display_name: "Max files history" - description: | - Controls the maximum number of archive files to keep - type: integer - default: 10 - min: 1 - - name: compress - display_name: "Enable compression" - description: | - Compress the rotated files - type: boolean - default: false - name: "audit_data_retention" display_name: "Data retention policy" type: "group" diff --git a/os/etc/logrotate.conf b/os/etc/logrotate.conf deleted file mode 100644 index 05741a8624..0000000000 --- a/os/etc/logrotate.conf +++ /dev/null @@ -1,26 +0,0 @@ -# see "man logrotate" for details -# rotate log files weekly -weekly - -# keep 4 weeks worth of backlogs -rotate 4 - -# create new (empty) log files after rotating old ones -create - -# use date as a suffix of the rotated file -dateext - -# exclude alpine files -tabooext + .apk-new - -# uncomment this if you want your log files compressed -compress - -# main log file -/dev/null {} - -# apk packages drop log rotation information into this directory -include /etc/logrotate.d - -# system-specific logs may be also be configured here. diff --git a/os/etc/logrotate.d/runstatus b/os/etc/logrotate.d/runstatus deleted file mode 100644 index 94bed0a4f7..0000000000 --- a/os/etc/logrotate.d/runstatus +++ /dev/null @@ -1,13 +0,0 @@ -/adcm/data/log/status.log { - su root root - size 50M - create 0644 - missingok - sharedscripts - compress - delaycompress - rotate 10 - postrotate - killall -USR1 runstatus - endscript -} diff --git a/python/api_v2/tests/bundles/adcm_configs/config.yaml b/python/api_v2/tests/bundles/adcm_configs/config.yaml index 3f44996a0c..052c1e7921 100644 --- a/python/api_v2/tests/bundles/adcm_configs/config.yaml +++ b/python/api_v2/tests/bundles/adcm_configs/config.yaml @@ -100,32 +100,6 @@ default: 5 min: 1 max: 100 - - name: "logrotate" - display_name: "Nginx Server Logrotate" - type: "group" - activatable: true - ui_options: - invisible: true - subs: - - name: size - display_name: "Max file size" - description: | - Specifies the allowed size the log file can reach before it is archived - type: string - default: 10M - - name: max_history - display_name: "Max files history" - description: | - Controls the maximum number of archive files to keep - type: integer - default: 10 - min: 1 - - name: compress - display_name: "Enable compression" - description: | - Compress the rotated files - type: boolean - default: false - name: "config_rotation" display_name: "Configuration rotation" description: | diff --git a/python/api_v2/tests/files/responses/config_schemas/for_adcm.json b/python/api_v2/tests/files/responses/config_schemas/for_adcm.json index 33dc28e2d7..f8a2522156 100644 --- a/python/api_v2/tests/files/responses/config_schemas/for_adcm.json +++ b/python/api_v2/tests/files/responses/config_schemas/for_adcm.json @@ -265,84 +265,6 @@ "secret" ] }, - "logrotate": { - "title": "Nginx Server Logrotate", - "type": "object", - "description": "", - "default": {}, - "readOnly": false, - "adcmMeta": { - "isAdvanced": false, - "isInvisible": true, - "activation": { - "isAllowChange": true - }, - "synchronization": null, - "isSecret": false, - "stringExtra": null, - "enumExtra": null - }, - "additionalProperties": false, - "properties": { - "size": { - "title": "Max file size", - "type": "string", - "description": "Specifies the allowed size the log file can reach before it is archived\n", - "default": "10M", - "readOnly": false, - "adcmMeta": { - "isAdvanced": false, - "isInvisible": false, - "activation": null, - "synchronization": null, - "isSecret": false, - "stringExtra": { - "isMultiline": false - }, - "enumExtra": null - }, - "minLength": 1 - }, - "max_history": { - "title": "Max files history", - "type": "integer", - "description": "Controls the maximum number of archive files to keep\n", - "default": 10, - "readOnly": false, - "adcmMeta": { - "isAdvanced": false, - "isInvisible": false, - "activation": null, - "synchronization": null, - "isSecret": false, - "stringExtra": null, - "enumExtra": null - }, - "minimum": 1 - }, - "compress": { - "title": "Enable compression", - "type": "boolean", - "description": "Compress the rotated files\n", - "default": false, - "readOnly": false, - "adcmMeta": { - "isAdvanced": false, - "isInvisible": false, - "activation": null, - "synchronization": null, - "isSecret": false, - "stringExtra": null, - "enumExtra": null - } - } - }, - "required": [ - "size", - "max_history", - "compress" - ] - }, "audit_data_retention": { "title": "Data retention policy", "type": "object", @@ -919,7 +841,6 @@ "statistics_collection", "google_oauth", "yandex_oauth", - "logrotate", "audit_data_retention", "ldap_integration", "auth_policy" diff --git a/python/api_v2/tests/test_audit/test_adcm.py b/python/api_v2/tests/test_audit/test_adcm.py index d7e4ffc50e..99369535aa 100644 --- a/python/api_v2/tests/test_audit/test_adcm.py +++ b/python/api_v2/tests/test_audit/test_adcm.py @@ -41,7 +41,6 @@ def setUp(self) -> None: "global": {"adcm_url": "http://127.0.0.1:8000", "verification_public_key": "\n"}, "google_oauth": {"client_id": None, "secret": None}, "yandex_oauth": {"client_id": None, "secret": None}, - "logrotate": {"size": "10M", "max_history": 10, "compress": False}, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, @@ -74,7 +73,6 @@ def setUp(self) -> None: }, }, "adcmMeta": { - "/logrotate": {"isActive": False}, "/ldap_integration": {"isActive": False}, "/statistics_collection": {"isActive": False}, }, diff --git a/python/api_v2/tests/test_config.py b/python/api_v2/tests/test_config.py index 2188501f03..34cb5a2067 100644 --- a/python/api_v2/tests/test_config.py +++ b/python/api_v2/tests/test_config.py @@ -2303,7 +2303,6 @@ def test_retrieve_success(self): self.assertDictEqual( data["adcmMeta"], { - "/logrotate": {"isActive": False}, "/ldap_integration": {"isActive": False}, "/statistics_collection": {"isActive": True}, }, @@ -2315,7 +2314,6 @@ def test_create_success(self): "global": {"adcm_url": "http://127.0.0.1:8000", "verification_public_key": "\n"}, "google_oauth": {"client_id": None, "secret": None}, "yandex_oauth": {"client_id": None, "secret": None}, - "logrotate": {"size": "10M", "max_history": 10, "compress": False}, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, @@ -2348,7 +2346,6 @@ def test_create_success(self): }, }, "adcmMeta": { - "/logrotate": {"isActive": False}, "/ldap_integration": {"isActive": False}, "/statistics_collection": {"isActive": False}, }, diff --git a/python/audit/tests/test_logrotate.py b/python/audit/tests/test_logrotate.py index 9111106de7..e6579993c2 100644 --- a/python/audit/tests/test_logrotate.py +++ b/python/audit/tests/test_logrotate.py @@ -41,10 +41,8 @@ def setUp(self) -> None: config.update( { "audit_data_retention": {"log_rotation_on_fs": 1, "log_rotation_in_db": 1, "config_rotation_in_db": 1}, - "logrotate": {"size": "10M", "max_history": 10, "compress": False}, } ) - attr.update({"logrotate": {"active": False}}) new_config_log = ConfigLog.objects.create(config=config, attr=attr, obj_ref=adcm.config) adcm.config.previous = current_config_log.pk adcm.config.current = new_config_log.pk diff --git a/python/cm/management/commands/logrotate.py b/python/cm/management/commands/logrotate.py index a9a8d2c1e0..36af2d285d 100644 --- a/python/cm/management/commands/logrotate.py +++ b/python/cm/management/commands/logrotate.py @@ -12,8 +12,6 @@ from datetime import datetime, timedelta from enum import Enum -from pathlib import Path -from subprocess import STDOUT, CalledProcessError, check_output import os import shutil import logging @@ -42,37 +40,15 @@ logger = logging.getLogger("background_tasks") -LOGROTATE_CONF_FILE_TEMPLATE = """ -/adcm/data/log/nginx/*.log {{ - su root root - size {size} - missingok - nomail - {no_compress}compress - {no_compress}delaycompress - rotate {num_rotations} - sharedscripts - postrotate - kill -USR1 `cat /run/nginx/nginx.pid` - endscript -}} -""" - - class TargetType(Enum): ALL = "all" JOB = "job" CONFIG = "config" - NGINX = "nginx" class Command(BaseCommand): help = "Delete / rotate log files, db records, `run` directories" - __nginx_logrotate_conf = "/etc/logrotate.d/nginx" - __logrotate_cmd = f"logrotate {__nginx_logrotate_conf}" - __logrotate_cmd_debug = f"{__logrotate_cmd} -v" - def add_arguments(self, parser): parser.add_argument( "--target", @@ -85,13 +61,11 @@ def add_arguments(self, parser): def handle(self, *args, **options): # noqa: ARG002 __target_method_map = { TargetType.ALL.value: [ - self.__run_nginx_log_rotation, self.__run_joblog_rotation, self.__run_configlog_rotation, ], TargetType.JOB.value: [self.__run_joblog_rotation], TargetType.CONFIG.value: [self.__run_configlog_rotation], - TargetType.NGINX.value: [self.__run_nginx_log_rotation], } self.verbose = not options["disable_logs"] @@ -101,54 +75,15 @@ def handle(self, *args, **options): # noqa: ARG002 for func in __target_method_map[target]: func() - def __execute_cmd(self, cmd): - self.__log(f"executing cmd: `{cmd}`", "info") - try: - out = check_output(cmd, shell=True, stderr=STDOUT) # noqa: S602 - out = out.decode(settings.ENCODING_UTF_8).strip("\n") - if out: - self.__log(out, "debug") - except CalledProcessError as e: - err_msg = e.stdout.decode(settings.ENCODING_UTF_8).strip("\n") - msg = f"Error! cmd: `{cmd}` return code: `{e.returncode}` msg: `{err_msg}`" - self.__log(msg, "exception") - def __get_logrotate_config(self): adcm_object = ADCM.objects.first() - current_configlog = ConfigLog.objects.get(obj_ref=adcm_object.config, id=adcm_object.config.current) - adcm_conf = current_configlog.config + current_config = ConfigLog.objects.get(obj_ref=adcm_object.config, id=adcm_object.config.current).config logrotate_config = { - "logrotate": { - "active": current_configlog.attr["logrotate"]["active"], - "nginx": adcm_conf["logrotate"], - }, - "config": adcm_conf["audit_data_retention"], + "config": current_config["audit_data_retention"], } self.__log(f"Got rotation config: {logrotate_config}") return logrotate_config - def __generate_logrotate_conf_file(self): - conf_file_args = { - "size": f"{self.config['logrotate']['nginx']['size']}", - "no_compress": "" if self.config["logrotate"]["nginx"]["compress"] else "#", - "num_rotations": self.config["logrotate"]["nginx"]["max_history"], - } - with Path(self.__nginx_logrotate_conf).open("w", encoding=settings.ENCODING_UTF_8) as conf_file: - conf_file.write(LOGROTATE_CONF_FILE_TEMPLATE.format(**conf_file_args)) - self.__log(f"conf file `{self.__nginx_logrotate_conf}` generated", "debug") - - def __run_nginx_log_rotation(self): - if self.config["logrotate"]["active"]: - self.__log("Nginx log rotation started", "info") - self.__generate_logrotate_conf_file() - self.__log( - f"Using config file `{self.__nginx_logrotate_conf}`", - "debug", - ) - if self.verbose: - self.__execute_cmd(self.__logrotate_cmd_debug) - self.__execute_cmd(self.__logrotate_cmd) - def __run_configlog_rotation(self): try: configlog_days_delta = self.config["config"]["config_rotation_in_db"] diff --git a/python/cm/tests/files/response_templates/action_configs/cluster.json.j2 b/python/cm/tests/files/response_templates/action_configs/cluster.json.j2 index d7938a717c..4bea701268 100644 --- a/python/cm/tests/files/response_templates/action_configs/cluster.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/cluster.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, diff --git a/python/cm/tests/files/response_templates/action_configs/cluster_on_host.json.j2 b/python/cm/tests/files/response_templates/action_configs/cluster_on_host.json.j2 index 3c6855a4d9..9000362318 100644 --- a/python/cm/tests/files/response_templates/action_configs/cluster_on_host.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/cluster_on_host.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, diff --git a/python/cm/tests/files/response_templates/action_configs/component.json.j2 b/python/cm/tests/files/response_templates/action_configs/component.json.j2 index 461125a533..476d2651d7 100644 --- a/python/cm/tests/files/response_templates/action_configs/component.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/component.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, diff --git a/python/cm/tests/files/response_templates/action_configs/component_on_host.json.j2 b/python/cm/tests/files/response_templates/action_configs/component_on_host.json.j2 index 749689d59c..c150ad94ca 100644 --- a/python/cm/tests/files/response_templates/action_configs/component_on_host.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/component_on_host.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, diff --git a/python/cm/tests/files/response_templates/action_configs/host.json.j2 b/python/cm/tests/files/response_templates/action_configs/host.json.j2 index 9bdf7a53e6..930c0ef2f8 100644 --- a/python/cm/tests/files/response_templates/action_configs/host.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/host.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, diff --git a/python/cm/tests/files/response_templates/action_configs/hostprovider.json.j2 b/python/cm/tests/files/response_templates/action_configs/hostprovider.json.j2 index e704499f0a..b38bc6b9f8 100644 --- a/python/cm/tests/files/response_templates/action_configs/hostprovider.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/hostprovider.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, diff --git a/python/cm/tests/files/response_templates/action_configs/job_bundle_relative_cluster.json.j2 b/python/cm/tests/files/response_templates/action_configs/job_bundle_relative_cluster.json.j2 index 3e3fb4abb7..ec303cf63a 100644 --- a/python/cm/tests/files/response_templates/action_configs/job_bundle_relative_cluster.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/job_bundle_relative_cluster.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, diff --git a/python/cm/tests/files/response_templates/action_configs/job_bundle_relative_service.json.j2 b/python/cm/tests/files/response_templates/action_configs/job_bundle_relative_service.json.j2 index 7fc0ce65b4..f37247f499 100644 --- a/python/cm/tests/files/response_templates/action_configs/job_bundle_relative_service.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/job_bundle_relative_service.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, diff --git a/python/cm/tests/files/response_templates/action_configs/job_proto_relative_cluster.json.j2 b/python/cm/tests/files/response_templates/action_configs/job_proto_relative_cluster.json.j2 index bba54575a9..b7e2ed344e 100644 --- a/python/cm/tests/files/response_templates/action_configs/job_proto_relative_cluster.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/job_proto_relative_cluster.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, diff --git a/python/cm/tests/files/response_templates/action_configs/job_proto_relative_service.json.j2 b/python/cm/tests/files/response_templates/action_configs/job_proto_relative_service.json.j2 index 14d97603d0..3e7e3bfe0e 100644 --- a/python/cm/tests/files/response_templates/action_configs/job_proto_relative_service.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/job_proto_relative_service.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, diff --git a/python/cm/tests/files/response_templates/action_configs/service.json.j2 b/python/cm/tests/files/response_templates/action_configs/service.json.j2 index 44c2590dc7..be5e97abcd 100644 --- a/python/cm/tests/files/response_templates/action_configs/service.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/service.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, diff --git a/python/cm/tests/files/response_templates/action_configs/service_on_host.json.j2 b/python/cm/tests/files/response_templates/action_configs/service_on_host.json.j2 index f3f6d05aaf..5bba2eb8c2 100644 --- a/python/cm/tests/files/response_templates/action_configs/service_on_host.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/service_on_host.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, diff --git a/python/cm/tests/files/response_templates/action_configs/task_mixed_bundle_relative_cluster.json.j2 b/python/cm/tests/files/response_templates/action_configs/task_mixed_bundle_relative_cluster.json.j2 index 8b5f1a723a..2140a3f385 100644 --- a/python/cm/tests/files/response_templates/action_configs/task_mixed_bundle_relative_cluster.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/task_mixed_bundle_relative_cluster.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, diff --git a/python/cm/tests/files/response_templates/action_configs/task_mixed_bundle_relative_service.json.j2 b/python/cm/tests/files/response_templates/action_configs/task_mixed_bundle_relative_service.json.j2 index 8e16774ca2..b0ff8f9acc 100644 --- a/python/cm/tests/files/response_templates/action_configs/task_mixed_bundle_relative_service.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/task_mixed_bundle_relative_service.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, diff --git a/python/cm/tests/files/response_templates/action_configs/task_mixed_proto_relative_cluster.json.j2 b/python/cm/tests/files/response_templates/action_configs/task_mixed_proto_relative_cluster.json.j2 index 37e780d607..177b12e7d6 100644 --- a/python/cm/tests/files/response_templates/action_configs/task_mixed_proto_relative_cluster.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/task_mixed_proto_relative_cluster.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, diff --git a/python/cm/tests/files/response_templates/action_configs/task_mixed_proto_relative_service.json.j2 b/python/cm/tests/files/response_templates/action_configs/task_mixed_proto_relative_service.json.j2 index 037e501463..b5c2f419da 100644 --- a/python/cm/tests/files/response_templates/action_configs/task_mixed_proto_relative_service.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/task_mixed_proto_relative_service.json.j2 @@ -16,7 +16,6 @@ "client_id": null, "secret": null }, - "logrotate": null, "audit_data_retention": { "log_rotation_on_fs": 365, "log_rotation_in_db": 365, From 19997a7c77b0d316f6e13e06f1d576d01ef15ff0 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 27 Aug 2024 08:59:05 +0000 Subject: [PATCH 46/98] ADCM-5898 Rework logging settings --- Dockerfile | 3 +- go/adcm/status/log.go | 5 + os/etc/adcm/adcm.cfg | 3 +- os/etc/adcm/log.cfg | 131 ++++++++++++++++++ os/etc/nginx/http.d/proxy.inc | 16 ++- os/etc/nginx/nginx.conf | 115 +++++++++++++++ {conf => os/etc}/nginx/uwsgi_params | 0 os/etc/sv/nginx/run | 9 +- python/adcm/settings.py | 102 +------------- python/adcm/setup/__init__.py | 11 ++ .../{custom_loggers.py => setup/logging.py} | 17 +++ 11 files changed, 298 insertions(+), 114 deletions(-) create mode 100644 os/etc/adcm/log.cfg create mode 100644 os/etc/nginx/nginx.conf rename {conf => os/etc}/nginx/uwsgi_params (100%) create mode 100644 python/adcm/setup/__init__.py rename python/adcm/{custom_loggers.py => setup/logging.py} (73%) diff --git a/Dockerfile b/Dockerfile index 1545b58466..3a6ede0ea2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -25,8 +25,7 @@ RUN apk update && \ sshpass && \ curl -sSL https://install.python-poetry.org | python - ENV PATH="/root/.local/bin:$PATH" -COPY pyproject.toml /adcm/ -COPY poetry.lock /adcm/ +COPY pyproject.toml poetry.lock /adcm/ RUN python -m venv /adcm/venv/2.9 && \ poetry config virtualenvs.create false && \ poetry -C /adcm install --no-root && \ diff --git a/go/adcm/status/log.go b/go/adcm/status/log.go index 0dfa3c2605..cc04c62e85 100644 --- a/go/adcm/status/log.go +++ b/go/adcm/status/log.go @@ -179,6 +179,11 @@ func (w *fileWriter) ReopenLogFile() { func GetLogLevel() string { const defaultLogLevel = "ERROR" + priorityLogLevel, ok := os.LookupEnv("STATUS_LOG_LEVEL") + if ok { + return priorityLogLevel + } + logLevel, ok := os.LookupEnv("LOG_LEVEL") if !ok { return defaultLogLevel diff --git a/os/etc/adcm/adcm.cfg b/os/etc/adcm/adcm.cfg index 6247a5a77c..641ae486a1 100644 --- a/os/etc/adcm/adcm.cfg +++ b/os/etc/adcm/adcm.cfg @@ -9,6 +9,7 @@ socket=/run/adcm.sock chmod-socket=777 max-requests=5000 maxsize=2000000 -logfile=/adcm/data/log/wsgi.log log-4xx = true log-5xx = true +disable-logging = true +logger = file:/adcm/data/log/wsgi.log diff --git a/os/etc/adcm/log.cfg b/os/etc/adcm/log.cfg new file mode 100644 index 0000000000..23afa6127c --- /dev/null +++ b/os/etc/adcm/log.cfg @@ -0,0 +1,131 @@ +# Logging settings +[loggers] +keys=root, adcm, audit, django, background_tasks, task_runner_err, django_auth_ldap + +[handlers] +keys=adcm_file, adcm_debug_file, background_task_file, ldap_file, audit_file, stream_stderr, stream_stdout, task_runner_err_file + +[formatters] +keys=adcm + +[filters] +keys=requireDebugFalse + +[handler_stream_stdout] +class=logging.StreamHandler +formatter=adcm +kwargs={"stream": sys.stdout} + +[handler_stream_stderr] +class=logging.StreamHandler +formatter=adcm +kwargs={"stream": sys.stdout} + +[handler_adcm_file] +filters=requireDebugFalse +formatter=adcm +class=adcm.setup.logging.LockingTimedRotatingFileHandler +kwargs={ + "filename": "%(LOG_DIR)s/adcm.log", + "when": "midnight", + "backupCount": 5, + } + +[handler_audit_file] +formatter=adcm +class=adcm.setup.logging.LockingTimedRotatingFileHandler +kwargs={ + "filename": "%(LOG_DIR)s/audit.log", + "when": "midnight", + "backupCount": 5, + } + + +[handler_adcm_debug_file] +filters=requireDebugFalse +formatter=adcm +class=adcm.setup.logging.LockingTimedRotatingFileHandler +kwargs={ + "filename": "%(LOG_DIR)s/adcm_debug.log", + "when": "midnight", + "backupCount": 5, + } + +[handler_background_task_file] +formatter=adcm +class=adcm.setup.logging.LockingTimedRotatingFileHandler +kwargs={ + "filename": "%(LOG_DIR)s/cron_task.log", + "when": "midnight", + "backupCount": 5, + } + +[handler_task_runner_err_file] +filters=requireDebugFalse +formatter=adcm +class=adcm.setup.logging.LockingTimedRotatingFileHandler +kwargs={ + "filename": "%(LOG_DIR)s/task_runner.err", + "when": "midnight", + "backupCount": 5, + } + +[handler_ldap_file] +class=adcm.setup.logging.LockingTimedRotatingFileHandler +formatter=adcm +kwargs={ + "filename": "%(LOG_DIR)s/ldap.log", + "when": "m", + "backupCount": 5, + } + +[formatter_adcm] +format={asctime} {levelname} {module} {message} +style={ + +[filter_requireDebugFalse] +class=django.utils.log.RequireDebugFalse + +[logger_root] +level=NOTSET +handlers= + +[logger_adcm] +handlers=adcm_file +level=%(LOG_LEVEL)s +propagate=1 +qualname=cm + +[logger_stream_std] +handlers=stream_stdout,stream_stderr +level=%(LOG_LEVEL)s +qualname=cm + +[logger_audit] +handlers=audit_file +level=INFO +propagate=1 +qualname=audit + +[logger_django] +handlers=adcm_debug_file +level=%(LOG_LEVEL)s +propagate=1 +qualname=django + +[logger_background_tasks] +handlers=background_task_file +level=%(LOG_LEVEL)s +propagate=1 +qualname=django.core.management + +[logger_task_runner_err] +handlers=task_runner_err_file +level=%(LOG_LEVEL)s +propagate=1 +qualname=task_runner + +[logger_django_auth_ldap] +qualname=django_auth_ldap +handlers=ldap_file +level=%(LOG_LEVEL)s diff --git a/os/etc/nginx/http.d/proxy.inc b/os/etc/nginx/http.d/proxy.inc index 8c751e3728..249af20ee4 100644 --- a/os/etc/nginx/http.d/proxy.inc +++ b/os/etc/nginx/http.d/proxy.inc @@ -3,8 +3,12 @@ root /adcm/wwwroot; client_max_body_size 100M; - client_body_temp_path /adcm/data/tmp/nginx_client_temp 1 2; + client_body_temp_path /tmp/nginx_client_temp 1 2; client_body_timeout 300s; + proxy_temp_path /tmp/nginx_proxy_temp; + fastcgi_temp_path /tmp/nginx_fastcgi_temp; + uwsgi_temp_path /tmp/nginx_uwsgi_temp; + scgi_temp_path /tmp/nginx_scgi_temp; location / { if ($arg_nocache) { @@ -17,7 +21,7 @@ uwsgi_pass django; uwsgi_read_timeout 300s; uwsgi_send_timeout 300s; - include /adcm/conf/nginx/uwsgi_params; + include /etc/nginx/uwsgi_params; proxy_set_header Host $http_host; } @@ -25,18 +29,18 @@ uwsgi_pass django; uwsgi_read_timeout 300s; uwsgi_send_timeout 300s; - include /adcm/conf/nginx/uwsgi_params; + include /etc/nginx/uwsgi_params; proxy_set_header Host $http_host; } location /cm { uwsgi_pass django; - include /adcm/conf/nginx/uwsgi_params; + include /etc/nginx/uwsgi_params; } location /social { uwsgi_pass django; - include /adcm/conf/nginx/uwsgi_params; + include /etc/nginx/uwsgi_params; } location /ws { @@ -50,7 +54,7 @@ location /status/ { # That takes so many lines in access log for statuses, # so it is better to disable access for that url - access_log /var/log/nginx/access.log combined if=$abnormal; + access_log /adcm/data/log/nginx/access.log combined if=$abnormal; proxy_pass http://127.0.0.1:8020/; proxy_set_header Host $http_host; } diff --git a/os/etc/nginx/nginx.conf b/os/etc/nginx/nginx.conf new file mode 100644 index 0000000000..03b5ee5ae9 --- /dev/null +++ b/os/etc/nginx/nginx.conf @@ -0,0 +1,115 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# /etc/nginx/nginx.conf + +pid /tmp/nginx.pid; + +# Set number of worker processes automatically based on number of CPU cores. +worker_processes auto; + +# Enables the use of JIT for regular expressions to speed-up their processing. +pcre_jit on; + +# Configures default error logger. +error_log /adcm/data/log/nginx/error.log warn; + +# Includes files with directives to load dynamic modules. +include /etc/nginx/modules/*.conf; + +# Include files with config snippets into the root context. +include /etc/nginx/conf.d/*.conf; + +events { + # The maximum number of simultaneous connections that can be opened by + # a worker process. + worker_connections 1024; +} + +http { + # Includes mapping of file name extensions to MIME types of responses + # and defines the default type. + include /etc/nginx/mime.types; + default_type application/octet-stream; + + # Name servers used to resolve names of upstream servers into addresses. + # It's also needed when using tcpsocket and udpsocket in Lua modules. + #resolver 1.1.1.1 1.0.0.1 2606:4700:4700::1111 2606:4700:4700::1001; + + # Don't tell nginx version to the clients. Default is 'on'. + server_tokens off; + # Specifies the maximum accepted body size of a client request, as + # indicated by the request header Content-Length. If the stated content + # length is greater than this size, then the client receives the HTTP + # error code 413. Set to 0 to disable. Default is '1m'. + client_max_body_size 1m; + + # Sendfile copies data between one FD and other from within the kernel, + # which is more efficient than read() + write(). Default is off. + sendfile on; + + # Causes nginx to attempt to send its HTTP response head in one packet, + # instead of using partial frames. Default is 'off'. + tcp_nopush on; + + # Enables the specified protocols. Default is TLSv1 TLSv1.1 TLSv1.2. + # TIP: If you're not obligated to support ancient clients, remove TLSv1.1. + ssl_protocols TLSv1.1 TLSv1.2 TLSv1.3; + + # Path of the file with Diffie-Hellman parameters for EDH ciphers. + # TIP: Generate with: `openssl dhparam -out /etc/ssl/nginx/dh2048.pem 2048` + #ssl_dhparam /etc/ssl/nginx/dh2048.pem; + + # Specifies that our cipher suits should be preferred over client ciphers. + # Default is 'off'. + ssl_prefer_server_ciphers on; + + # Enables a shared SSL cache with size that can hold around 8000 sessions. + # Default is 'none'. + ssl_session_cache shared:SSL:2m; + + # Specifies a time during which a client may reuse the session parameters. + # Default is '5m'. + ssl_session_timeout 1h; + + # Disable TLS session tickets (they are insecure). Default is 'on'. + ssl_session_tickets off; + + # Path of the file with Diffie-Hellman parameters for EDH ciphers. + # TIP: Generate with: `openssl dhparam -out /etc/ssl/nginx/dh2048.pem 2048` + #ssl_dhparam /etc/ssl/nginx/dh2048.pem; + + # Enable gzipping of responses. + #gzip on; + + # Set the Vary HTTP header as defined in the RFC 2616. Default is 'off'. + gzip_vary on; + + # Helper variable for proxying websockets. + map $http_upgrade $connection_upgrade { + default upgrade; + '' close; + } + + + # Specifies the main log format. + log_format main '$remote_addr - $remote_user [$time_local] "$request" ' + '$status $body_bytes_sent "$http_referer" ' + '"$http_user_agent" "$http_x_forwarded_for"'; + + # Sets the path, format, and configuration for a buffered log write. + access_log /adcm/data/log/nginx/access.log main; + + + # Includes virtual hosts configs. + include /etc/nginx/http.d/*.conf; +} diff --git a/conf/nginx/uwsgi_params b/os/etc/nginx/uwsgi_params similarity index 100% rename from conf/nginx/uwsgi_params rename to os/etc/nginx/uwsgi_params diff --git a/os/etc/sv/nginx/run b/os/etc/sv/nginx/run index 136c63518f..e1b61c2b93 100755 --- a/os/etc/sv/nginx/run +++ b/os/etc/sv/nginx/run @@ -21,10 +21,9 @@ if [ -z "$MIGRATION_MODE" ] || [ "$MIGRATION_MODE" -ne 1 ]; then exec 1>"${adcmlog}/nginx/service_nginx.out" exec 2>"${adcmlog}/nginx/service_nginx.err" - mkdir -p /run/nginx - - # Cleanup tmp dir during restart of nginx. That dir is for - # uploading bundles cache. - rm -rf /adcm/data/tmp/nginx_client_temp || true + # Cleanup tmp dir during restart of nginx. + # That dir is for uploading bundles cache. + # This is for old existing installations + rm -rf ${adcmtmp}/nginx_client_temp || true exec /usr/sbin/nginx -c /etc/nginx/nginx.conf -g "daemon off;" fi diff --git a/python/adcm/settings.py b/python/adcm/settings.py index 39fe17cf71..04957439bc 100644 --- a/python/adcm/settings.py +++ b/python/adcm/settings.py @@ -224,106 +224,8 @@ def get_db_options() -> dict: LOG_LEVEL = os.getenv("LOG_LEVEL", logging.getLevelName(logging.ERROR)) -if not DEBUG: - LOGGING = { - "version": 1, - "disable_existing_loggers": False, - "filters": { - "require_debug_false": { - "()": "django.utils.log.RequireDebugFalse", - }, - }, - "formatters": { - "adcm": { - "format": "{asctime} {levelname} {module} {message}", - "style": "{", - }, - "ldap": { - "format": "{levelname} {module}: {message}", - "style": "{", - }, - }, - "handlers": { - "adcm_file": { - "filters": ["require_debug_false"], - "formatter": "adcm", - "class": "logging.FileHandler", - "filename": LOG_FILE, - }, - "adcm_debug_file": { - "filters": ["require_debug_false"], - "formatter": "adcm", - "class": "logging.FileHandler", - "filename": LOG_DIR / "adcm_debug.log", - }, - "task_runner_err_file": { - "filters": ["require_debug_false"], - "formatter": "adcm", - "class": "logging.FileHandler", - "filename": LOG_DIR / "task_runner.err", - }, - "background_task_file_handler": { - "formatter": "adcm", - "class": "logging.handlers.TimedRotatingFileHandler", - "filename": LOG_DIR / "cron_task.log", - "when": "midnight", - "backupCount": 10, - }, - "audit_file_handler": { - "class": "adcm.custom_loggers.LockingTimedRotatingFileHandler", - "filename": LOG_DIR / "audit.log", - "when": "midnight", - "backupCount": 10, - }, - "stream_stdout_handler": { - "class": "logging.StreamHandler", - "formatter": "adcm", - "stream": "ext://sys.stdout", - }, - "stream_stderr_handler": { - "class": "logging.StreamHandler", - "formatter": "adcm", - "stream": "ext://sys.stderr", - }, - "ldap_file_handler": { - "class": "logging.FileHandler", - "formatter": "adcm", - "filename": LOG_DIR / "ldap.log", - }, - }, - "loggers": { - "adcm": { - "handlers": ["adcm_file"], - "level": LOG_LEVEL, - "propagate": True, - }, - "django": { - "handlers": ["adcm_debug_file"], - "level": LOG_LEVEL, - "propagate": True, - }, - "background_tasks": { - "handlers": ["background_task_file_handler"], - "level": LOG_LEVEL, - "propagate": True, - }, - "audit": { - "handlers": ["audit_file_handler"], - "level": "INFO", - "propagate": True, - }, - "task_runner_err": { - "handlers": ["task_runner_err_file"], - "level": LOG_LEVEL, - "propagate": True, - }, - "stream_std": { - "handlers": ["stream_stdout_handler", "stream_stderr_handler"], - "level": LOG_LEVEL, - }, - "django_auth_ldap": {"handlers": ["ldap_file_handler"], "level": LOG_LEVEL, "propagate": True}, - }, - } +LOGGING_CONFIG = "adcm.setup.logging.configure_logging_from_file" +LOGGING = Path("/etc/adcm/log.cfg") DEFAULT_AUTO_FIELD = "django.db.models.AutoField" diff --git a/python/adcm/setup/__init__.py b/python/adcm/setup/__init__.py new file mode 100644 index 0000000000..824dd6c8fe --- /dev/null +++ b/python/adcm/setup/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/python/adcm/custom_loggers.py b/python/adcm/setup/logging.py similarity index 73% rename from python/adcm/custom_loggers.py rename to python/adcm/setup/logging.py index 1f41b3516a..9f62da70b9 100644 --- a/python/adcm/custom_loggers.py +++ b/python/adcm/setup/logging.py @@ -14,6 +14,23 @@ from pathlib import Path from tempfile import gettempdir import fcntl +import logging + + +def configure_logging_from_file(filepath: Path | str): + if not Path(filepath).is_file(): + print(f"File with loggers configuration is missing at {filepath}") + return + + from django.conf import settings + + logging.config.fileConfig( + filepath, + defaults={ + "LOG_DIR": settings.LOG_DIR, + "LOG_LEVEL": settings.LOG_LEVEL, + }, + ) class LockingTimedRotatingFileHandler(TimedRotatingFileHandler): From a9cb26de8e3c0cd30c64167920904642cddad3dc Mon Sep 17 00:00:00 2001 From: Egor Araslanov Date: Tue, 27 Aug 2024 12:29:09 +0500 Subject: [PATCH 47/98] ADCM-5898 Rework logging in status server --- go/adcm/runstatus.go | 16 ++- go/adcm/status/api.go | 38 +++--- go/adcm/status/auth.go | 2 +- go/adcm/status/errors.go | 4 +- go/adcm/status/handlers.go | 25 ++-- go/adcm/status/init.go | 8 +- go/adcm/status/log.go | 218 +++++++++++++++------------------- go/adcm/status/service_map.go | 6 +- go/adcm/status/status.go | 2 +- go/adcm/status/storage.go | 6 +- go/adcm/status/ws.go | 26 ++-- 11 files changed, 166 insertions(+), 185 deletions(-) diff --git a/go/adcm/runstatus.go b/go/adcm/runstatus.go index 1ca05fe63b..aa51888fbd 100644 --- a/go/adcm/runstatus.go +++ b/go/adcm/runstatus.go @@ -30,5 +30,19 @@ func main() { os.Exit(0) } - status.Start(status.ReadSecret(fileAuthKey), *logFile, status.GetLogLevel()) + status.Start(status.ReadSecret(fileAuthKey), *logFile, GetLogLevel()) +} + +func GetLogLevel() string { + priorityLogLevel, ok := os.LookupEnv("STATUS_LOG_LEVEL") + if ok { + return priorityLogLevel + } + + logLevel, ok := os.LookupEnv("LOG_LEVEL") + if !ok { + return status.DefaultLogLevel + } + + return logLevel } diff --git a/go/adcm/status/api.go b/go/adcm/status/api.go index 9034b06c7e..d2c086e044 100644 --- a/go/adcm/status/api.go +++ b/go/adcm/status/api.go @@ -54,31 +54,30 @@ func (api *AdcmApi) getToken() (string, bool) { "password": {api.Secrets.ADCMUser.Password}, }) if err != nil { - logg.E.l("getToken: http error: ", err) + logg.E.Printf("getToken: http error: %v", err) return "", false } defer resp.Body.Close() if resp.StatusCode != 200 { - logg.E.f("getToken: http status: %s", resp.Status) + logg.E.Printf("getToken: http status: %s", resp.Status) body, err := ioutil.ReadAll(io.LimitReader(resp.Body, MaxPostSize)) if err == nil { - logg.E.f("getToken: POST body: '%s'", body) + logg.E.Printf("getToken: POST body: %q", body) } return "", false } body, err := ioutil.ReadAll(io.LimitReader(resp.Body, MaxPostSize)) if err != nil { - logg.E.l("getToken: body read error: ", err) + logg.E.Printf("getToken: body read error: %v", err) return "", false } - //logg.D.f("getToken body: %s", body) var v struct{ Token string } if err := json.Unmarshal(body, &v); err != nil { - logg.E.l("getToken: json decode error: ", err) + logg.E.Printf("getToken: json decode error: %v", err) return "", false } - logg.D.l("getToken: token: ", v.Token) + api.token = v.Token return v.Token, true } @@ -87,18 +86,17 @@ func (api *AdcmApi) checkAuth(token string) bool { client := api.getClient() req, _ := http.NewRequest("GET", api.Url+"/rbac/me/", nil) req.Header.Add("Authorization", "Token "+token) - //logg.D.f("checkAuth: client %+v, request %+v", client, req) resp, err := client.Do(req) if err != nil { - logg.E.f("checkAuth: http error: %v", err) + logg.E.Printf("checkAuth: http error: %v", err) return false } defer resp.Body.Close() if resp.StatusCode != 200 { - logg.W.f("check ADCM token %s fail: %v", token, resp.Status) + logg.W.Printf("check ADCM token %s fail: %v", token, resp.Status) return false } - logg.D.l("checkAuth: check ADCM token ok") + logg.D.Println("checkAuth: check ADCM token ok") return true } @@ -106,18 +104,18 @@ func (api *AdcmApi) checkSessionAuth(sessionId string) bool { client := api.getClient() req, _ := http.NewRequest("GET", api.Url+"/stack/", nil) req.AddCookie(&http.Cookie{Name: "sessionid", Value: sessionId}) - //logg.D.f("checkSessionAuth: client %+v, request %+v", client, req) + //logg.D.Printf(checkSessionAuth: client %+v, request %+v", client, req) resp, err := client.Do(req) if err != nil { - logg.E.f("checkSessionAuth: http error: %v", err) + logg.E.Printf("checkSessionAuth: http error: %v", err) return false } defer resp.Body.Close() if resp.StatusCode != 200 { - logg.W.f("check ADCM sessionId %s fail: %v", sessionId, resp.Status) + logg.W.Printf("check ADCM sessionId %s fail: %v", sessionId, resp.Status) return false } - logg.D.l("checkSessionAuth: check ADCM sessionId ok") + logg.D.Println("checkSessionAuth: check ADCM sessionId ok") return true } @@ -131,24 +129,24 @@ func (api *AdcmApi) loadServiceMap() bool { req.Header.Add("Authorization", "Token "+token) resp, err := client.Do(req) if err != nil { - logg.E.l("loadServiceMap: http error: ", err) + logg.E.Println("loadServiceMap: http error: ", err) return false } defer resp.Body.Close() if resp.StatusCode != 200 { - logg.E.f("loadServiceMap: http status: %s", resp.Status) + logg.E.Printf("loadServiceMap: http status: %s", resp.Status) body, err := ioutil.ReadAll(io.LimitReader(resp.Body, MaxPostSize)) if err == nil { - logg.E.f("loadServiceMap: POST body: '%s'", body) + logg.E.Printf("loadServiceMap: POST body: '%s'", body) } return false } _, err = ioutil.ReadAll(io.LimitReader(resp.Body, MaxPostSize)) if err != nil { - logg.E.l("loadServiceMap: body read error: ", err) + logg.E.Println("loadServiceMap: body read error: ", err) return false } - logg.D.f("loadServiceMap: call /stack/load/servicemap/ got %s response", resp.Status) + logg.D.Printf("loadServiceMap: call /stack/load/servicemap/ got %s response", resp.Status) return true } diff --git a/go/adcm/status/auth.go b/go/adcm/status/auth.go index 2f841aeca9..fbcd023803 100644 --- a/go/adcm/status/auth.go +++ b/go/adcm/status/auth.go @@ -43,7 +43,7 @@ func checkADCMUserToken(hub Hub, token string) bool { func djangoAuth(r *http.Request, hub Hub) bool { sessionId, err := r.Cookie("sessionid") if err != nil { - logg.D.f("no sessionid cookie") + logg.D.Println("No sessionid cookie") return false } return hub.AdcmApi.checkSessionAuth(sessionId.Value) diff --git a/go/adcm/status/errors.go b/go/adcm/status/errors.go index c2af9fb73a..77a5cad759 100644 --- a/go/adcm/status/errors.go +++ b/go/adcm/status/errors.go @@ -80,6 +80,6 @@ func errOut(w http.ResponseWriter, r *http.Request, apiErr ApiErr) { w.Header().Set("Content-Type", "application/json") w.WriteHeader(apiErr.httpCode) json.NewEncoder(w).Encode(apiErr) //nolint: errcheck - logg.W.f("%s %s", apiErr.Code, apiErr.Msg) - logg.I.f("%s %s %d", r.Method, r.URL.Path, apiErr.httpCode) + logg.W.Printf("%s %s", apiErr.Code, apiErr.Msg) + logg.I.Printf("%s %s %d", r.Method, r.URL.Path, apiErr.httpCode) } diff --git a/go/adcm/status/handlers.go b/go/adcm/status/handlers.go index f86fa49c36..d050319aaa 100644 --- a/go/adcm/status/handlers.go +++ b/go/adcm/status/handlers.go @@ -20,6 +20,7 @@ import ( "io/ioutil" "net/http" "strconv" + "strings" "github.com/bouk/httprouter" ) @@ -37,7 +38,7 @@ type clusterDetails struct { func index(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "text/html") fmt.Fprint(w, "Status Server API\n") - logg.I.f("%s %s %d\n", r.Method, r.URL.Path, 200) + logg.I.Printf("%s %s %d", r.Method, r.URL.Path, 200) } func apiRoot(w http.ResponseWriter, r *http.Request) { @@ -125,7 +126,7 @@ func readConfig(h Hub, w http.ResponseWriter, r *http.Request) { func showLogLevel(h Hub, w http.ResponseWriter, r *http.Request) { jsonOut(w, r, struct { Level string `json:"level"` - }{Level: logg.getLogLevel()}) + }{Level: logg.level}) } func postLogLevel(h Hub, w http.ResponseWriter, r *http.Request) { @@ -135,12 +136,12 @@ func postLogLevel(h Hub, w http.ResponseWriter, r *http.Request) { if _, err := decodeBody(w, r, &level); err != nil { return } - intLevel, err := logg.decodeLogLevel(level.Level) + + err := logg.SetLogLevel(strings.ToUpper(level.Level)) if err != nil { ErrOut4(w, r, "LOG_ERROR", err.Error()) return } - logg.set(intLevel) } func showHostComp(h Hub, w http.ResponseWriter, r *http.Request) { @@ -242,9 +243,9 @@ func postEvent(h Hub, w http.ResponseWriter, r *http.Request) { if err != nil { return } - logg.D.f("postEvent - %+v", event) + logg.D.Printf("postEvent - %+v", event) if !checkEvent(event, w, r) { - logg.W.f("POST body: '%s'", body) + logg.W.Printf("POST body: '%s'", body) return } h.EventWS.send2ws(event) @@ -336,12 +337,12 @@ func postServiceMap(h Hub, w http.ResponseWriter, r *http.Request) { ErrOut4(w, r, "JSON_ERROR", err.Error()) return } - logg.D.f("postServiceMap: %+v", m) + logg.D.Printf("postServiceMap: %+v", m) if len(m.HostService) < 1 { - logg.W.f("%s %s", "INPUT_WARNING", "no HostService in servicemap post") + logg.W.Printf("%s %s", "INPUT_WARNING", "no HostService in servicemap post") } if len(m.Component) < 1 { - logg.W.f("%s %s", "INPUT_WARNING", "no Component in servicemap post") + logg.W.Printf("%s %s", "INPUT_WARNING", "no Component in servicemap post") } h.ServiceMap.init(m) // h.ServiceStorage.pure() @@ -406,7 +407,7 @@ func decodeBody(w http.ResponseWriter, r *http.Request, v interface{}) ([]byte, err = decoder.Decode(v) if err != nil { ErrOut4(w, r, "JSON_ERROR", err.Error()) - logg.W.f("POST body: '%s'", body) + logg.W.Printf("POST body: '%s'", body) return body, err } return body, nil @@ -425,8 +426,8 @@ func jsonOut3(w http.ResponseWriter, r *http.Request, out interface{}, status_co w.WriteHeader(status_code) if out != "" { if err := json.NewEncoder(w).Encode(out); err != nil { - logg.E.f("JSON out error: %v, (%v)", err, out) + logg.E.Printf("JSON out error: %v, (%v)", err, out) } } - logg.I.f("%s %s %d", r.Method, r.URL.Path, status_code) + logg.I.Printf("%s %s %d", r.Method, r.URL.Path, status_code) } diff --git a/go/adcm/status/init.go b/go/adcm/status/init.go index 9bfe178dca..d7b1dc5b4e 100644 --- a/go/adcm/status/init.go +++ b/go/adcm/status/init.go @@ -39,7 +39,7 @@ type Hub struct { func Start(secrets *SecretConfig, logFile string, logLevel string) { hub := Hub{Secrets: secrets} - initLog(logFile, logLevel) + InitLog(logFile, logLevel) initSignal() hub.MMObjects = newMMObjects() @@ -70,7 +70,7 @@ func Start(secrets *SecretConfig, logFile string, logLevel string) { } func startHTTP(httpPort string, hub Hub) { - logg.I.f("start http server on %s", httpPort) + logg.I.Printf("start http server on %s", httpPort) router := httprouter.New() router.RedirectTrailingSlash = false @@ -149,8 +149,8 @@ func initSignal() { go func() { for { sig := <-c - logg.D.f("recive signal %v", sig) - logg.rotate() + logg.D.Printf("Signal received %v", sig) + logg.ReopenLogFile() } }() } diff --git a/go/adcm/status/log.go b/go/adcm/status/log.go index cc04c62e85..66ae771c7d 100644 --- a/go/adcm/status/log.go +++ b/go/adcm/status/log.go @@ -13,181 +13,149 @@ package status import ( - "errors" "fmt" "log" "os" + "strings" "sync" ) -type logger struct { - D logWrapper - I logWrapper - W logWrapper - E logWrapper - C logWrapper - level *int - levelMap map[string]int -} +const DefaultLogLevel = "ERROR" -type logWrapper struct { - out logWriter - log *log.Logger - level int - current *int +type simpleLogger interface { + Println(v ...interface{}) + Printf(format string, v ...interface{}) } -const ( - DEBUG = 1 - INFO = 2 - WARN = 3 - ERR = 4 - CRIT = 5 -) - -var logg logger - -func (log *logger) decodeLogLevel(level string) (int, error) { - intLevel, ok := log.levelMap[level] - if !ok { - return 0, errors.New("Unknown log level: " + level) - } - return intLevel, nil +type logHandler interface { + Write(output []byte) (int, error) + ReopenLogFile() } -func (log *logger) getLogLevel() string { - for strLevel, intLevel := range log.levelMap { - if intLevel == *log.level { - return strLevel - } +// Logger +type logger struct { + D simpleLogger + I simpleLogger + W simpleLogger + E simpleLogger + C simpleLogger + handler logHandler + level string +} + +func (l *logger) ReopenLogFile() { + l.handler.ReopenLogFile() +} + +func (l *logger) SetLogLevel(level string) error { + createRealLogger := func(level string) *log.Logger { + return log.New( + l.handler, + "["+strings.ToUpper(level)+"] ", + log.Ldate|log.Lmicroseconds|log.Lshortfile, + ) } - return "none" -} - -func (log *logger) rotate() { - log.E.out.ReopenLogFile() -} -func (log *logger) set(level int) { - *log.level = level - log.W.l("set log level to \"" + log.getLogLevel() + "\"") -} - -func (log *logWrapper) l(v ...interface{}) { - if log.level < *log.current { - return + dummy := &dummyLogger{} + + switch level { + case "DEBUG": + l.D = createRealLogger("DEBUG") + l.I = createRealLogger("INFO") + l.W = createRealLogger("WARNING") + l.E = createRealLogger("ERROR") + l.C = createRealLogger("CRITICAL") + case "INFO": + l.D = dummy + l.I = createRealLogger("INFO") + l.W = createRealLogger("WARNING") + l.E = createRealLogger("ERROR") + l.C = createRealLogger("CRITICAL") + case "WARNING": + l.D = dummy + l.I = dummy + l.W = createRealLogger("WARNING") + l.E = createRealLogger("ERROR") + l.C = createRealLogger("CRITICAL") + case "ERROR": + l.D = dummy + l.I = dummy + l.W = dummy + l.E = createRealLogger("ERROR") + l.C = createRealLogger("CRITICAL") + case "CRITICAL": + l.D = dummy + l.I = dummy + l.W = dummy + l.E = dummy + l.C = createRealLogger("CRITICAL") + default: + return fmt.Errorf("unknown log level: %s", level) } - log.log.Println(v...) + l.level = level + return nil } -func (log *logWrapper) f(format string, v ...interface{}) { - if log.level < *log.current { - return - } - log.log.Printf(format, v...) -} +var logg logger -func initLog(logFile string, level string) { +func InitLog(logFile string, level string) { logg = logger{} - var out logWriter - logg.levelMap = map[string]int{ - "DEBUG": DEBUG, - "INFO": INFO, - "WARNING": WARN, - "ERROR": ERR, - "CRITICAL": CRIT, - } - logLevel, err := logg.decodeLogLevel(level) - if err != nil { - fmt.Println(err.Error()) - os.Exit(1) - } + if logFile == "" { - out = newStdoutWriter() + logg.handler = &stdOutHandler{fp: os.Stdout} } else { - out = newFileWriter(logFile) + logg.handler = &fileHandler{filename: logFile} + logg.handler.ReopenLogFile() } - logg.level = &logLevel - logg.D = newLog(out, &logLevel, DEBUG, "[DEBUG] ") - logg.I = newLog(out, &logLevel, INFO, "[INFO] ") - logg.W = newLog(out, &logLevel, WARN, "[WARN] ") - logg.E = newLog(out, &logLevel, ERR, "[ERROR] ") - logg.C = newLog(out, &logLevel, CRIT, "[CRITICAL] ") -} -func newLog(out logWriter, current *int, level int, tag string) logWrapper { - return logWrapper{ - out: out, - level: level, - current: current, - log: log.New(out, tag, log.Ldate|log.Lmicroseconds|log.Lshortfile), + err := logg.SetLogLevel(level) + if err != nil { + if retryErr := logg.SetLogLevel(DefaultLogLevel); retryErr != nil { + log.Fatalf("Failed to set level %q and fallback to default %q", level, DefaultLogLevel) + } } } -type logWriter interface { - Write(output []byte) (int, error) - ReopenLogFile() -} +// Dummy Logger -type stdoutWriter struct { - fp *os.File -} +type dummyLogger struct{} + +func (dl *dummyLogger) Println(v ...interface{}) {} +func (dll *dummyLogger) Printf(format string, v ...interface{}) {} + +// Handlers -func newStdoutWriter() *stdoutWriter { - return &stdoutWriter{fp: os.Stdout} +type stdOutHandler struct { + fp *os.File } -func (w *stdoutWriter) Write(output []byte) (int, error) { +func (w *stdOutHandler) Write(output []byte) (int, error) { return w.fp.Write(output) } -func (w *stdoutWriter) ReopenLogFile() { +func (w *stdOutHandler) ReopenLogFile() { } -// File Writer - -type fileWriter struct { +type fileHandler struct { lock sync.Mutex filename string fp *os.File } -func newFileWriter(filename string) *fileWriter { - w := fileWriter{filename: filename} - w.ReopenLogFile() - return &w -} - -func (w *fileWriter) Write(output []byte) (int, error) { +func (w *fileHandler) Write(output []byte) (int, error) { w.lock.Lock() defer w.lock.Unlock() return w.fp.Write(output) } -func (w *fileWriter) ReopenLogFile() { +func (w *fileHandler) ReopenLogFile() { var err error w.lock.Lock() defer w.lock.Unlock() w.fp, err = os.OpenFile(w.filename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) if err != nil { - log.Fatalf("error opening log file %s: %v", w.filename, err) + log.Fatalf("Error opening log file %s: %v", w.filename, err) } } - -func GetLogLevel() string { - const defaultLogLevel = "ERROR" - - priorityLogLevel, ok := os.LookupEnv("STATUS_LOG_LEVEL") - if ok { - return priorityLogLevel - } - - logLevel, ok := os.LookupEnv("LOG_LEVEL") - if !ok { - return defaultLogLevel - } - - return logLevel -} diff --git a/go/adcm/status/service_map.go b/go/adcm/status/service_map.go index ef243aed6f..4ab9c552db 100644 --- a/go/adcm/status/service_map.go +++ b/go/adcm/status/service_map.go @@ -89,10 +89,10 @@ func newServiceServer() *ServiceServer { } func (s *ServiceServer) run() { - logg.I.l("start service map server") + logg.I.Println("start service map server") for { c := <-s.in - logg.I.l("ServiceServer command: ", c) + logg.I.Println("ServiceServer command: ", c) switch c.command { case "init": s.smap = initServiceMap(c.smap) @@ -124,7 +124,7 @@ func (s *ServiceServer) run() { hosts := s.smap.getComponentHosts(c.cluster) s.out <- ssResp{rmap: hosts, ok: true} default: - logg.E.l("ServiceServer unknown ss command: ", c) + logg.E.Println("ServiceServer unknown ss command: ", c) } } } diff --git a/go/adcm/status/status.go b/go/adcm/status/status.go index 5561ae38ea..d7303604c1 100644 --- a/go/adcm/status/status.go +++ b/go/adcm/status/status.go @@ -180,7 +180,7 @@ func getClusterHostStatus(h Hub, clusterId int) (int, map[int]Status) { } status, ok := h.HostStatusStorage.get(ALL, hostId) if !ok { - logg.D.f("getClusterHostStatus: no status for host #%v ", hostId) + logg.D.Printf("getClusterHostStatus: no status for host #%v ", hostId) status = Status{Status: 16} } if status.Status != 0 { diff --git a/go/adcm/status/storage.go b/go/adcm/status/storage.go index b2dc13f085..d5cd59ab3f 100644 --- a/go/adcm/status/storage.go +++ b/go/adcm/status/storage.go @@ -124,10 +124,10 @@ func (s *Storage) setTimeOut(timeout int) { } func (s *Storage) run() { - logg.I.f("start storage %s server", s.label) + logg.I.Printf("start storage %s server", s.label) for { c := <-s.in - logg.I.f("Storage %s command: %+v", s.label, c) + logg.I.Printf("Storage %s command: %+v", s.label, c) switch c.command { case cmdSet: v := s.dbMap.set(c.key1, c.key2, c.val) @@ -148,7 +148,7 @@ func (s *Storage) run() { v, ok := s.dbMap.get1(c.key1) s.out <- storageResponse{map1: v, ok: ok} default: - logg.E.f("Storage %s unknown command: %+v", s.label, c) + logg.E.Printf("Storage %s unknown command: %+v", s.label, c) } } } diff --git a/go/adcm/status/ws.go b/go/adcm/status/ws.go index 124a1120c4..213b8d1b96 100644 --- a/go/adcm/status/ws.go +++ b/go/adcm/status/ws.go @@ -68,16 +68,16 @@ func (h *wsHub) run() { for { select { case ws := <-h.register: - logg.D.l("wsHub register: ", ws) + logg.D.Println("wsHub register: ", ws) h.clients[ws] = true case ws := <-h.unregister: - logg.D.l("wsHub unregister: ", ws) + logg.D.Println("wsHub unregister: ", ws) if _, ok := h.clients[ws]; ok { delete(h.clients, ws) close(ws.send) } case msg := <-h.broadcast: - logg.D.f("wsHub broadcast: %v", msg) + logg.D.Printf("wsHub broadcast: %v", msg) for ws := range h.clients { ws.send <- msg } @@ -86,7 +86,7 @@ func (h *wsHub) run() { } func (h *wsHub) send2ws(s wsMsg) { - //logg.D.f("enter send2ws: %v", s) + //logg.D.Printf(enter send2ws: %v", s) h.broadcast <- s } @@ -97,18 +97,18 @@ func write2ws(c *wsClient) { select { case s, ok := <-c.send: if !ok { - logg.D.l("write2ws chanel closed") + logg.D.Println("write2ws chanel closed") return } - logg.D.l("write2ws recive: ", s) + logg.D.Println("write2ws recive: ", s) c.ws.SetWriteDeadline(time.Now().Add(writeWait)) //nolint: errcheck js, err := s.encode() if err != nil { - logg.E.l("write2ws incorrect json: ", s) + logg.E.Println("write2ws incorrect json: ", s) continue } if err := c.ws.WriteMessage(websocket.TextMessage, js); err != nil { - logg.W.l("write2ws write: ", err) + logg.W.Println("write2ws write: ", err) c.ws.Close() return } @@ -131,7 +131,7 @@ func read4ws(h *wsHub, c *wsClient) { for { _, _, err := c.ws.ReadMessage() if err != nil { - logg.I.f("read2ws client %v close ws: %v", c, err) + logg.I.Printf("read2ws client %v close ws: %v", c, err) h.unregister <- c c.ws.Close() return @@ -141,14 +141,14 @@ func read4ws(h *wsHub, c *wsClient) { func initWS(h *wsHub, w http.ResponseWriter, r *http.Request) { ws, err := upgrader.Upgrade(w, r, nil) - logg.D.l("initWs open ws") + logg.D.Println("initWs open ws") if err != nil { - logg.E.l("initWs upgrade: ", err) + logg.E.Println("initWs upgrade: ", err) return } defer func() { - logg.D.l("initWs close ws") + logg.D.Println("initWs close ws") ws.Close() }() @@ -170,7 +170,7 @@ func checkOrigin(r *http.Request) bool { } s1 := strings.Split(u.Host, ":") s2 := strings.Split(r.Host, ":") - logg.D.f("checkOrigin origin host: %v, header host: %v", u.Host, r.Host) + logg.D.Printf("checkOrigin origin host: %v, header host: %v", u.Host, r.Host) if s1[0] == s2[0] { return true } else { From 75386328e547f1ed24143d1464124454e633dcaa Mon Sep 17 00:00:00 2001 From: Egor Araslanov Date: Tue, 27 Aug 2024 16:17:47 +0500 Subject: [PATCH 48/98] ADCM-5060 Fix tests after merge --- python/api_v2/tests/test_concerns.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/python/api_v2/tests/test_concerns.py b/python/api_v2/tests/test_concerns.py index 01ab6b466f..471d458fe0 100644 --- a/python/api_v2/tests/test_concerns.py +++ b/python/api_v2/tests/test_concerns.py @@ -491,6 +491,10 @@ def setUp(self) -> None: ) } + # so flag autogen will work + self.provider.state = "changed" + self.provider.save(update_fields=["state"]) + def repr_concerns(self, concerns: Iterable[ConcernItem]) -> str: return "\n".join( f" {i}. {rec}" @@ -887,6 +891,13 @@ def test_concern_removal_with_flag_autogeneration_on_config_change(self) -> None provider_concern = self.provider.get_own_issue(ConcernCause.CONFIG) another_provider_concern = another_provider.get_own_issue(ConcernCause.CONFIG) + # update states, so flag autogeneration will work as expected + Host.objects.all().update(state="something") + HostProvider.objects.all().update(state="something") + Cluster.objects.all().update(state="something") + ClusterObject.objects.all().update(state="something") + ServiceComponent.objects.all().update(state="something") + expected_concerns = {} def _update_expected_concerns(): @@ -1194,6 +1205,8 @@ def test_host_config_issue(self): self.assertIsNotNone(self.provider.get_own_issue(ConcernCause.CONFIG)) # host config issue resolved, provider remains + host.state = "changed" + host.save(update_fields=["state"]) self.change_config_via_api(host) self.assertIsNotNone(self.provider.get_own_issue(ConcernCause.CONFIG)) @@ -1214,6 +1227,8 @@ def test_host_config_issue(self): def test_two_hosts_config_issue_one_resolved(self): host_1 = self.add_host_via_api(self.provider, fqdn="host1") + host_1.state = "changed" + host_1.save(update_fields=["state"]) host_2 = self.add_host_via_api(self.provider, fqdn="host2") host_1_config_issue = host_1.get_own_issue(ConcernCause.CONFIG) From 5290dc8a1510b5cf4dcfc215cc46c9f3752d2b8e Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Fri, 30 Aug 2024 07:41:32 +0000 Subject: [PATCH 49/98] ADCM-5911 Use `WatchedFileHandler` and separate env variables for detecting loglevels of different loggers --- os/etc/adcm/log.cfg | 131 ------------------ .../{setup/logging.py => custom_loggers.py} | 17 --- python/adcm/settings.py | 105 +++++++++++++- python/adcm/setup/__init__.py | 11 -- 4 files changed, 101 insertions(+), 163 deletions(-) delete mode 100644 os/etc/adcm/log.cfg rename python/adcm/{setup/logging.py => custom_loggers.py} (73%) delete mode 100644 python/adcm/setup/__init__.py diff --git a/os/etc/adcm/log.cfg b/os/etc/adcm/log.cfg deleted file mode 100644 index 23afa6127c..0000000000 --- a/os/etc/adcm/log.cfg +++ /dev/null @@ -1,131 +0,0 @@ -# Logging settings -[loggers] -keys=root, adcm, audit, django, background_tasks, task_runner_err, django_auth_ldap - -[handlers] -keys=adcm_file, adcm_debug_file, background_task_file, ldap_file, audit_file, stream_stderr, stream_stdout, task_runner_err_file - -[formatters] -keys=adcm - -[filters] -keys=requireDebugFalse - -[handler_stream_stdout] -class=logging.StreamHandler -formatter=adcm -kwargs={"stream": sys.stdout} - -[handler_stream_stderr] -class=logging.StreamHandler -formatter=adcm -kwargs={"stream": sys.stdout} - -[handler_adcm_file] -filters=requireDebugFalse -formatter=adcm -class=adcm.setup.logging.LockingTimedRotatingFileHandler -kwargs={ - "filename": "%(LOG_DIR)s/adcm.log", - "when": "midnight", - "backupCount": 5, - } - -[handler_audit_file] -formatter=adcm -class=adcm.setup.logging.LockingTimedRotatingFileHandler -kwargs={ - "filename": "%(LOG_DIR)s/audit.log", - "when": "midnight", - "backupCount": 5, - } - - -[handler_adcm_debug_file] -filters=requireDebugFalse -formatter=adcm -class=adcm.setup.logging.LockingTimedRotatingFileHandler -kwargs={ - "filename": "%(LOG_DIR)s/adcm_debug.log", - "when": "midnight", - "backupCount": 5, - } - -[handler_background_task_file] -formatter=adcm -class=adcm.setup.logging.LockingTimedRotatingFileHandler -kwargs={ - "filename": "%(LOG_DIR)s/cron_task.log", - "when": "midnight", - "backupCount": 5, - } - -[handler_task_runner_err_file] -filters=requireDebugFalse -formatter=adcm -class=adcm.setup.logging.LockingTimedRotatingFileHandler -kwargs={ - "filename": "%(LOG_DIR)s/task_runner.err", - "when": "midnight", - "backupCount": 5, - } - -[handler_ldap_file] -class=adcm.setup.logging.LockingTimedRotatingFileHandler -formatter=adcm -kwargs={ - "filename": "%(LOG_DIR)s/ldap.log", - "when": "m", - "backupCount": 5, - } - -[formatter_adcm] -format={asctime} {levelname} {module} {message} -style={ - -[filter_requireDebugFalse] -class=django.utils.log.RequireDebugFalse - -[logger_root] -level=NOTSET -handlers= - -[logger_adcm] -handlers=adcm_file -level=%(LOG_LEVEL)s -propagate=1 -qualname=cm - -[logger_stream_std] -handlers=stream_stdout,stream_stderr -level=%(LOG_LEVEL)s -qualname=cm - -[logger_audit] -handlers=audit_file -level=INFO -propagate=1 -qualname=audit - -[logger_django] -handlers=adcm_debug_file -level=%(LOG_LEVEL)s -propagate=1 -qualname=django - -[logger_background_tasks] -handlers=background_task_file -level=%(LOG_LEVEL)s -propagate=1 -qualname=django.core.management - -[logger_task_runner_err] -handlers=task_runner_err_file -level=%(LOG_LEVEL)s -propagate=1 -qualname=task_runner - -[logger_django_auth_ldap] -qualname=django_auth_ldap -handlers=ldap_file -level=%(LOG_LEVEL)s diff --git a/python/adcm/setup/logging.py b/python/adcm/custom_loggers.py similarity index 73% rename from python/adcm/setup/logging.py rename to python/adcm/custom_loggers.py index 9f62da70b9..1f41b3516a 100644 --- a/python/adcm/setup/logging.py +++ b/python/adcm/custom_loggers.py @@ -14,23 +14,6 @@ from pathlib import Path from tempfile import gettempdir import fcntl -import logging - - -def configure_logging_from_file(filepath: Path | str): - if not Path(filepath).is_file(): - print(f"File with loggers configuration is missing at {filepath}") - return - - from django.conf import settings - - logging.config.fileConfig( - filepath, - defaults={ - "LOG_DIR": settings.LOG_DIR, - "LOG_LEVEL": settings.LOG_LEVEL, - }, - ) class LockingTimedRotatingFileHandler(TimedRotatingFileHandler): diff --git a/python/adcm/settings.py b/python/adcm/settings.py index 04957439bc..860c38bd8f 100644 --- a/python/adcm/settings.py +++ b/python/adcm/settings.py @@ -222,10 +222,107 @@ def get_db_options() -> dict: "SECRET_KEY": ADCM_TOKEN, } -LOG_LEVEL = os.getenv("LOG_LEVEL", logging.getLevelName(logging.ERROR)) - -LOGGING_CONFIG = "adcm.setup.logging.configure_logging_from_file" -LOGGING = Path("/etc/adcm/log.cfg") +DEFAULT_LOG_LEVEL = os.getenv("LOG_LEVEL", logging.getLevelName(logging.ERROR)) +DEFAULT_FILE_HANDLER_CLASS = "logging.handlers.WatchedFileHandler" + +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "filters": { + "require_debug_false": { + "()": "django.utils.log.RequireDebugFalse", + }, + }, + "formatters": { + "adcm": { + "format": "{asctime} {levelname} {module} {message}", + "style": "{", + }, + }, + "handlers": { + # files + "adcm_file": { + "filters": ["require_debug_false"], + "formatter": "adcm", + "class": DEFAULT_FILE_HANDLER_CLASS, + "filename": LOG_FILE, + }, + "adcm_debug_file": { + "filters": ["require_debug_false"], + "formatter": "adcm", + "class": DEFAULT_FILE_HANDLER_CLASS, + "filename": LOG_DIR / "adcm_debug.log", + }, + "task_runner_err_file": { + "filters": ["require_debug_false"], + "formatter": "adcm", + "class": DEFAULT_FILE_HANDLER_CLASS, + "filename": LOG_DIR / "task_runner.err", + }, + "background_task_file_handler": { + "formatter": "adcm", + "class": DEFAULT_FILE_HANDLER_CLASS, + "filename": LOG_DIR / "cron_task.log", + }, + "ldap_file_handler": { + "class": DEFAULT_FILE_HANDLER_CLASS, + "formatter": "adcm", + "filename": LOG_DIR / "ldap.log", + }, + # streams + "stream_stdout_handler": { + "class": "logging.StreamHandler", + "formatter": "adcm", + "stream": "ext://sys.stdout", + }, + "stream_stderr_handler": { + "class": "logging.StreamHandler", + "formatter": "adcm", + "stream": "ext://sys.stderr", + }, + # special + "audit_file_handler": { + "class": DEFAULT_FILE_HANDLER_CLASS, + "filename": LOG_DIR / "audit.log", + }, + }, + "loggers": { + "adcm": { + "handlers": ["adcm_file"], + "level": os.getenv("ADCM_LOG_LEVEL", DEFAULT_LOG_LEVEL), + "propagate": True, + }, + "django": { + "handlers": ["adcm_debug_file"], + "level": os.getenv("ADCM_LOG_LEVEL", DEFAULT_LOG_LEVEL), + "propagate": True, + }, + "background_tasks": { + "handlers": ["background_task_file_handler"], + "level": os.getenv("BACKGROUND_TASKS_LOG_LEVEL", DEFAULT_LOG_LEVEL), + "propagate": True, + }, + "audit": { + "handlers": ["audit_file_handler"], + "level": os.getenv("AUDIT_LOG_LEVEL", logging.getLevelName(logging.INFO)), + "propagate": True, + }, + "task_runner_err": { + "handlers": ["task_runner_err_file"], + "level": os.getenv("TASK_RUNNER_LOG_LEVEL", DEFAULT_LOG_LEVEL), + "propagate": True, + }, + "stream_std": { + "handlers": ["stream_stdout_handler", "stream_stderr_handler"], + "level": DEFAULT_LOG_LEVEL, + }, + "django_auth_ldap": { + "handlers": ["ldap_file_handler"], + "level": os.getenv("LDAP_LOG_LEVEL", DEFAULT_LOG_LEVEL), + "propagate": True, + }, + }, +} DEFAULT_AUTO_FIELD = "django.db.models.AutoField" diff --git a/python/adcm/setup/__init__.py b/python/adcm/setup/__init__.py deleted file mode 100644 index 824dd6c8fe..0000000000 --- a/python/adcm/setup/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. From bdc79cd51f3f0a5e64feae8f630cd1a00a17d4b1 Mon Sep 17 00:00:00 2001 From: Skrynnik Daniil Date: Mon, 2 Sep 2024 12:44:02 +0300 Subject: [PATCH 50/98] ADCM-5919: adjust wsgi.log settings --- os/etc/adcm/adcm.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/os/etc/adcm/adcm.cfg b/os/etc/adcm/adcm.cfg index 641ae486a1..2638fa269b 100644 --- a/os/etc/adcm/adcm.cfg +++ b/os/etc/adcm/adcm.cfg @@ -8,8 +8,8 @@ pidfile=/run/uwsgi.pid socket=/run/adcm.sock chmod-socket=777 max-requests=5000 -maxsize=2000000 log-4xx = true log-5xx = true disable-logging = true -logger = file:/adcm/data/log/wsgi.log +logto = /adcm/data/log/wsgi.log +log-maxsize = 2000000 From 6e6af55a998bf6598b6a0691f78fa727e064e92b Mon Sep 17 00:00:00 2001 From: Skrynnik Daniil Date: Thu, 5 Sep 2024 15:49:59 +0300 Subject: [PATCH 51/98] ADCM-5934: Wrong hosts order on mapping page --- python/api_v2/cluster/views.py | 2 +- python/api_v2/tests/test_mapping.py | 13 ++++++++----- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/python/api_v2/cluster/views.py b/python/api_v2/cluster/views.py index 534ecfe340..48fae19604 100644 --- a/python/api_v2/cluster/views.py +++ b/python/api_v2/cluster/views.py @@ -398,7 +398,7 @@ def mapping(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG00 ) def mapping_hosts(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 cluster = self.get_object() - serializer = self.get_serializer(instance=Host.objects.filter(cluster=cluster), many=True) + serializer = self.get_serializer(instance=Host.objects.filter(cluster=cluster).order_by("fqdn"), many=True) return Response(status=HTTP_200_OK, data=serializer.data) diff --git a/python/api_v2/tests/test_mapping.py b/python/api_v2/tests/test_mapping.py index a5b5e327c3..9d0fee0022 100644 --- a/python/api_v2/tests/test_mapping.py +++ b/python/api_v2/tests/test_mapping.py @@ -42,13 +42,13 @@ class TestMapping(BaseAPITestCase): def setUp(self) -> None: super().setUp() - self.host_1 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_1") + self.host_1 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_B") self.add_host_to_cluster(cluster=self.cluster_1, host=self.host_1) - self.host_2 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_2") + self.host_2 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_A") self.add_host_to_cluster(cluster=self.cluster_1, host=self.host_2) - self.host_3 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_3") + self.host_3 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_C") self.add_host_to_cluster(cluster=self.cluster_2, host=self.host_3) self.service_1 = self.add_services_to_cluster(service_names=["service_1"], cluster=self.cluster_1).get() @@ -199,8 +199,11 @@ def test_mapping_hosts_success(self): response = self.client.v2[self.cluster_1, "mapping", "hosts"].get() self.assertEqual(response.status_code, HTTP_200_OK) - self.assertEqual(len(response.json()), 2) - self.assertEqual({host["id"] for host in response.json()}, {self.host_1.pk, self.host_2.pk}) + response = response.json() + self.assertEqual(len(response), 2) + self.assertEqual({host["id"] for host in response}, {self.host_1.pk, self.host_2.pk}) + # check sort by fqdn + self.assertListEqual([h["name"] for h in response], sorted([self.host_1.fqdn, self.host_2.fqdn])) def test_mapping_components_success(self): response = self.client.v2[self.cluster_1, "mapping", "components"].get() From bd868192ed627bd555baa1a3acfb99b66defa40e Mon Sep 17 00:00:00 2001 From: Pavel Nesterovkiy Date: Thu, 5 Sep 2024 14:47:03 +0000 Subject: [PATCH 52/98] bugfix/ADCM-5883 fix jobLog autoscroll https://tracker.yandex.ru/ADCM-5883 --- .../JobPageChildJobsTable.tsx | 17 +++++++++----- .../JobPage/JobPageLog/JobPageLog.tsx | 23 +++++++------------ .../JobPageLog/useRequestJobLogPage.ts | 12 ++++++---- 3 files changed, 26 insertions(+), 26 deletions(-) diff --git a/adcm-web/app/src/components/pages/JobsPage/JobPage/JobPageChildJobsTable/JobPageChildJobsTable.tsx b/adcm-web/app/src/components/pages/JobsPage/JobPage/JobPageChildJobsTable/JobPageChildJobsTable.tsx index 1989fcef63..cfeab0badf 100644 --- a/adcm-web/app/src/components/pages/JobsPage/JobPage/JobPageChildJobsTable/JobPageChildJobsTable.tsx +++ b/adcm-web/app/src/components/pages/JobsPage/JobPage/JobPageChildJobsTable/JobPageChildJobsTable.tsx @@ -71,11 +71,18 @@ const JobPageChildJobsTable = () => { return; } - const nextJob = + let nextJob = task.childJobs.findLast((child) => child.status === AdcmJobStatus.Running) || task.childJobs.find((child) => child.status === AdcmJobStatus.Created); - if (!nextJob) return; + if (!nextJob) { + const lastJobIndex = task.childJobs.indexOf(lastViewedJob); + + if (lastJobIndex === -1 || !task.childJobs[lastJobIndex + 1]) return; + + nextJob = task.childJobs[lastJobIndex + 1]; + } + setLastViewedJobId(nextJob.id); setExpandableRows(new Set([nextJob.id])); }; @@ -109,10 +116,9 @@ const JobPageChildJobsTable = () => { } if (task.status === AdcmJobStatus.Failed) { - const lastFailedJob = task.childJobs.findLast((child) => child.status === AdcmJobStatus.Running); + const lastFailedJob = task.childJobs.findLast((child) => child.status === AdcmJobStatus.Failed); if (!lastFailedJob) return; - - changeExpandedRowsState([{ key: lastFailedJob.id, isExpand: true }]); + setExpandableRows(new Set([lastFailedJob.id])); return; } @@ -164,7 +170,6 @@ const JobPageChildJobsTable = () => { isAutoScroll={isAutoScrollState} setIsAutoScroll={setIsAutoScroll} id={job.id} - isStarted={isTaskWasStartedRef?.current} /> } > diff --git a/adcm-web/app/src/components/pages/JobsPage/JobPage/JobPageLog/JobPageLog.tsx b/adcm-web/app/src/components/pages/JobsPage/JobPage/JobPageLog/JobPageLog.tsx index e99da7e2ba..c34341694e 100644 --- a/adcm-web/app/src/components/pages/JobsPage/JobPage/JobPageLog/JobPageLog.tsx +++ b/adcm-web/app/src/components/pages/JobsPage/JobPage/JobPageLog/JobPageLog.tsx @@ -1,5 +1,5 @@ -import React, { useState, useMemo, useEffect, RefObject, useRef, MutableRefObject } from 'react'; -import { useStore } from '@hooks'; +import React, { useState, useMemo, useEffect, RefObject, useRef, MutableRefObject, useCallback } from 'react'; +import { useStore, useResizeObserver } from '@hooks'; import { useRequestJobLogPage } from './useRequestJobLogPage'; import JobLog from '@commonComponents/job/JobLog/JobLog'; import JobLogsTabs from '@commonComponents/job/JobLogsTabs/JobLogsTabs'; @@ -13,20 +13,14 @@ const defaultLogs: AdcmJobLogItem[] = []; interface JobPageLogProps { id: number; isLinkEmpty?: boolean; - isStarted?: boolean; isAutoScroll?: boolean; setIsAutoScroll?: (isAutoScroll: boolean) => void; isUserScrollRef?: MutableRefObject; } -const JobPageLog: React.FC = ({ - id, - isAutoScroll = false, - setIsAutoScroll, - isUserScrollRef, - isStarted = false, -}) => { +const JobPageLog: React.FC = ({ id, isAutoScroll = false, setIsAutoScroll, isUserScrollRef }) => { useRequestJobLogPage(id); + const jobRef: RefObject = useRef(null); const childJob = useStore(({ adcm }) => adcm.jobs.task.childJobs.find((job) => job.id === id)); const logs = useStore(({ adcm }) => adcm.jobs.jobLogs[id] ?? defaultLogs); @@ -46,8 +40,8 @@ const JobPageLog: React.FC = ({ }; }, []); - useEffect(() => { - if (!isAutoScroll || logs.length === 0 || !isUserScrollRef || !jobRef?.current || !isStarted) return; + const scrollToExpandedLog = useCallback(() => { + if (!isAutoScroll || logs.length === 0 || !isUserScrollRef || !jobRef?.current) return; const parentTr = jobRef.current.closest('tr'); const prevSiblingTr = parentTr?.previousSibling as HTMLDivElement; @@ -55,7 +49,6 @@ const JobPageLog: React.FC = ({ if (!parentTr || !tableContainer || !prevSiblingTr) return; const scrollTopTo = tableContainer.offsetTop + parentTr.offsetTop - (window.innerHeight - parentTr.scrollHeight); - isUserScrollRef.current = false; window.scrollTo({ @@ -63,9 +56,9 @@ const JobPageLog: React.FC = ({ top: scrollTopTo, behavior: 'smooth', }); + }, [isAutoScroll, logs.length, isUserScrollRef, jobRef]); - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [logs, jobRef, jobRef?.current?.scrollHeight]); + useResizeObserver(jobRef, scrollToExpandedLog); useEffect(() => { if (isMinDelayEnded && !isLoadedLogs && logs !== defaultLogs) { diff --git a/adcm-web/app/src/components/pages/JobsPage/JobPage/JobPageLog/useRequestJobLogPage.ts b/adcm-web/app/src/components/pages/JobsPage/JobPage/JobPageLog/useRequestJobLogPage.ts index 1a1ad4a8b1..0fd2c3370a 100644 --- a/adcm-web/app/src/components/pages/JobsPage/JobPage/JobPageLog/useRequestJobLogPage.ts +++ b/adcm-web/app/src/components/pages/JobsPage/JobPage/JobPageLog/useRequestJobLogPage.ts @@ -4,11 +4,13 @@ import { defaultDebounceDelay } from '@constants'; import { AdcmJobStatus } from '@models/adcm'; import { useMemo, useState } from 'react'; +const maxFuseRequestCount = 2; + export const useRequestJobLogPage = (id: number | undefined) => { const dispatch = useDispatch(); const task = useStore(({ adcm }) => adcm.jobs.task); const requestFrequency = useStore(({ adcm }) => adcm.jobsTable.requestFrequency); - const [isLastUpdated, setIsLastUpdated] = useState(false); + const [fuseRequestCount, setFuseRequestCount] = useState(0); const debounceGetData = useDebounce(() => { if (!id) return; @@ -16,20 +18,20 @@ export const useRequestJobLogPage = (id: number | undefined) => { }, defaultDebounceDelay); const isNeedUpdate = useMemo(() => { - if (!task.childJobs || isLastUpdated) return false; + if (!task.childJobs || fuseRequestCount >= maxFuseRequestCount) return false; const curJob = task.childJobs.find((job) => job.id == id); if (!curJob) return false; + if (curJob.status === AdcmJobStatus.Created) return true; if (curJob.status !== AdcmJobStatus.Running && task.status !== AdcmJobStatus.Running) { - setIsLastUpdated(true); - return true; + setFuseRequestCount((prevState) => prevState + 1); } return true; - }, [task, isLastUpdated, id]); + }, [task, fuseRequestCount, id]); useRequestTimer(debounceGetData, debounceGetData, isNeedUpdate ? requestFrequency : 0, [id]); }; From ee69a77aac88f888a28f18866d1c1f45f48663dc Mon Sep 17 00:00:00 2001 From: Kirill Fedorenko Date: Thu, 5 Sep 2024 14:47:24 +0000 Subject: [PATCH 53/98] ADCM-5927 [UI] Sort hosts by their states https://tracker.yandex.ru/ADCM-5927 --- .../HostProvidersTable/HostProvidersTable.constants.ts | 1 + .../pages/HostsPage/HostsTable/HostsTable.constants.ts | 1 + .../ClusterHosts/ClusterHostsTable/ClusterHostsTable.constant.ts | 1 + 3 files changed, 3 insertions(+) diff --git a/adcm-web/app/src/components/pages/HostProvidersPage/HostProvidersTable/HostProvidersTable.constants.ts b/adcm-web/app/src/components/pages/HostProvidersPage/HostProvidersTable/HostProvidersTable.constants.ts index f3e52d8999..18b5c1b069 100644 --- a/adcm-web/app/src/components/pages/HostProvidersPage/HostProvidersTable/HostProvidersTable.constants.ts +++ b/adcm-web/app/src/components/pages/HostProvidersPage/HostProvidersTable/HostProvidersTable.constants.ts @@ -17,6 +17,7 @@ export const columns: TableColumn[] = [ { label: 'State', name: 'state', + isSortable: true, }, { label: 'Description', diff --git a/adcm-web/app/src/components/pages/HostsPage/HostsTable/HostsTable.constants.ts b/adcm-web/app/src/components/pages/HostsPage/HostsTable/HostsTable.constants.ts index 5332958817..1770693243 100644 --- a/adcm-web/app/src/components/pages/HostsPage/HostsTable/HostsTable.constants.ts +++ b/adcm-web/app/src/components/pages/HostsPage/HostsTable/HostsTable.constants.ts @@ -10,6 +10,7 @@ export const columns: TableColumn[] = [ { label: 'State', name: 'state', + isSortable: true, }, { label: 'Hostprovider', diff --git a/adcm-web/app/src/components/pages/cluster/ClusterHosts/ClusterHostsTable/ClusterHostsTable.constant.ts b/adcm-web/app/src/components/pages/cluster/ClusterHosts/ClusterHostsTable/ClusterHostsTable.constant.ts index 8860cbb762..c7e0827479 100644 --- a/adcm-web/app/src/components/pages/cluster/ClusterHosts/ClusterHostsTable/ClusterHostsTable.constant.ts +++ b/adcm-web/app/src/components/pages/cluster/ClusterHosts/ClusterHostsTable/ClusterHostsTable.constant.ts @@ -10,6 +10,7 @@ export const columns: TableColumn[] = [ { label: 'State', name: 'state', + isSortable: true, }, { label: 'Hostprovider', From 4ef4b10dabbef78271b647d3cccbbbcb055345a2 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Fri, 6 Sep 2024 07:31:35 +0000 Subject: [PATCH 54/98] ADCM-5925 Implement command to gather hosts hardware info --- os/etc/crontabs/root | 1 + .../action_plugins/adcm_add_host_info.py | 158 ++++++++++++++++++ .../cm/collect_statistics/ansible/ansible.cfg | 7 + .../ansible/collect_host_info.yaml | 15 ++ python/cm/collect_statistics/types.py | 26 ++- .../management/commands/gather_host_facts.py | 71 ++++++++ 6 files changed, 277 insertions(+), 1 deletion(-) create mode 100644 python/cm/collect_statistics/ansible/action_plugins/adcm_add_host_info.py create mode 100644 python/cm/collect_statistics/ansible/ansible.cfg create mode 100644 python/cm/collect_statistics/ansible/collect_host_info.yaml create mode 100644 python/cm/management/commands/gather_host_facts.py diff --git a/os/etc/crontabs/root b/os/etc/crontabs/root index 4c5a86dd69..80a08d4ede 100755 --- a/os/etc/crontabs/root +++ b/os/etc/crontabs/root @@ -5,3 +5,4 @@ 0 10 */1 * * python /adcm/python/manage.py clearaudit */1 * * * * python /adcm/python/manage.py run_ldap_sync 0 0 * * * python /adcm/python/manage.py collect_statistics --mode send +0 22 * * 1 python /adcm/python/manage.py gather_host_facts diff --git a/python/cm/collect_statistics/ansible/action_plugins/adcm_add_host_info.py b/python/cm/collect_statistics/ansible/action_plugins/adcm_add_host_info.py new file mode 100644 index 0000000000..b4d6c51bd9 --- /dev/null +++ b/python/cm/collect_statistics/ansible/action_plugins/adcm_add_host_info.py @@ -0,0 +1,158 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque +from datetime import datetime, timezone +from hashlib import md5 +from typing import NamedTuple +import re +import sys +import json +import traceback + +from ansible.plugins.action import ActionBase +from core.types import HostID + +sys.path.append("/adcm/python") + +import adcm.init_django # noqa: F401, isort:skip + +from cm.collect_statistics.types import HostDeviceFacts, HostFacts, HostOSFacts +from cm.models import HostInfo + +# To parse output of lshw command that'll be like: +# +# H/W path Device Class Description +# ========================================================= +# /0/100/1d/0/0 hwmon3 disk NVMe disk +# /0/100/1d/0/2 /dev/ng0n1 disk NVMe disk +# /0/100/1d/0/1 /dev/nvme0n1 disk NVMe disk +# +# It'll try to catch device and description groups +LSHW_PATTERN = re.compile(r"^/[\d\w/]+\s+(?P[\w\d/]+)\s+[^\s]+\s+(?P.+)", flags=re.MULTILINE) + + +def _extract_disk_info(lshw_out: str) -> dict[str, str]: + """ + Example + + Input: + + H/W path Device Class Description + ========================================================= + /0/100/1d/0/0 hwmon3 disk NVMe disk + /0/100/1d/0/2 /dev/ng0n1 disk NVMe disk + /0/100/1d/0/1 /dev/nvme0n1 disk NVMe disk + + Output: + + {'hwmon3': 'NVMe disk', 'ng0n1': 'NVMe disk', 'nvme0n1': 'NVMe disk'} + >>> + """ + + return { + device.rsplit("/", maxsplit=1)[-1]: description.strip() + for device, description in LSHW_PATTERN.findall(lshw_out) + } + + +class DataToStore(NamedTuple): + facts: HostFacts + hash_value: str + + +class ActionModule(ActionBase): + def run(self, tmp=None, task_vars=None): + super().run(tmp=tmp, task_vars=task_vars) + + hosts_facts = self.prepare_hosts_facts_for_storage(task_vars=task_vars) + self.save_facts(hosts_facts) + + return {"changed": True} + + def prepare_hosts_facts_for_storage(self, task_vars: dict) -> dict[HostID, DataToStore]: + processed_hosts: dict[HostID, DataToStore] = {} + + hostvars = task_vars["hostvars"] + + for host_name in task_vars["ansible_play_batch"]: + raw_facts = hostvars[host_name].get("ansible_facts", {}) + + # This means that facts weren't gathered for some reason: + # most likely because it's unreachable + if not raw_facts or not raw_facts.get("_ansible_facts_gathered", False): + print(f"Skipping {host_name} due to empty/absent facts") + continue + + try: + disk_descriptions = _extract_disk_info(hostvars[host_name]["disk_command_out"]) + + host_id = hostvars[host_name]["adcm_hostid"] + + structured_facts = HostFacts( + cpu_vcores=raw_facts["processor_vcpus"], + os=HostOSFacts( + family=raw_facts["os_family"], + distribution=raw_facts["distribution"], + version=raw_facts["distribution_version"], + ), + ram=raw_facts["memtotal_mb"], + devices=[ + HostDeviceFacts( + name=device_name, + removable=device["removable"], + rotational=device["rotational"], + size=device["size"], + description=disk_descriptions.get(device_name, ""), + ) + for device_name, device in raw_facts["devices"].items() + ], + ) + + processed_hosts[host_id] = DataToStore( + facts=structured_facts, + hash_value=md5(json.dumps(structured_facts).encode("utf-8")).hexdigest(), # noqa: S324 + ) + except Exception as e: # noqa: BLE001 + message = ( + f"Failed to prepare devices record for {host_name}: {e}\n" f"Traceback:\n{traceback.format_exc()}\n" + ) + sys.stderr.write(message) + + return processed_hosts + + def save_facts(self, facts: dict[HostID, DataToStore]) -> None: + hosts_with_up_to_date_facts = deque() + + for host_id, hash_ in HostInfo.objects.values_list("host_id", "hash").filter(host_id__in=facts.keys()): + host_info = facts.get(host_id) + if host_info and host_info.hash_value != hash_: + continue + + hosts_with_up_to_date_facts.append(host_id) + + # for each batch to have the same datetime + date = datetime.now(tz=timezone.utc) + + for_update: set[HostID] = set(facts).difference(hosts_with_up_to_date_facts) + + HostInfo.objects.filter(host_id__in=for_update).delete() + + new_hosts_facts = deque() + + for host_id in for_update: + host_info = facts[host_id] + new_hosts_facts.append( + HostInfo(date=date, hash=host_info.hash_value, value=host_info.facts, host_id=host_id) + ) + + HostInfo.objects.bulk_create(new_hosts_facts) diff --git a/python/cm/collect_statistics/ansible/ansible.cfg b/python/cm/collect_statistics/ansible/ansible.cfg new file mode 100644 index 0000000000..4bb824b5c2 --- /dev/null +++ b/python/cm/collect_statistics/ansible/ansible.cfg @@ -0,0 +1,7 @@ +[defaults] +deprecation_warnings=False +callback_whitelist=profile_tasks +stdout_callback=yaml +[ssh_connection] +retries=3 +pipelining=True diff --git a/python/cm/collect_statistics/ansible/collect_host_info.yaml b/python/cm/collect_statistics/ansible/collect_host_info.yaml new file mode 100644 index 0000000000..265a7fc9e7 --- /dev/null +++ b/python/cm/collect_statistics/ansible/collect_host_info.yaml @@ -0,0 +1,15 @@ +- name: Gather cluster's hosts hardware information + hosts: all + serial: 20 + ignore_unreachable: true + gather_facts: yes + + tasks: + - shell: + cmd: "lshw -short -C disk" + register: lshw_result + - set_fact: + disk_command_out: lshw_result.stdout + - name: Save hosts info to DB + adcm_add_host_info: + run_once: true diff --git a/python/cm/collect_statistics/types.py b/python/cm/collect_statistics/types.py index 482f5d82c4..a31d0a424f 100644 --- a/python/cm/collect_statistics/types.py +++ b/python/cm/collect_statistics/types.py @@ -12,7 +12,7 @@ from abc import ABC, abstractmethod from pathlib import Path -from typing import Collection, Generic, Protocol, TypeVar +from typing import Collection, Generic, Protocol, TypedDict, TypeVar T = TypeVar("T") @@ -46,3 +46,27 @@ def encode(self, data: T) -> T: @abstractmethod def decode(self, data: T) -> T: pass + + +# Host Facts Section + + +class HostDeviceFacts(TypedDict): + name: str + removable: str + rotational: str + size: str + description: str + + +class HostOSFacts(TypedDict): + distribution: str + version: str + family: str + + +class HostFacts(TypedDict): + cpu_vcores: int + os: HostOSFacts + ram: int + devices: list[HostDeviceFacts] diff --git a/python/cm/management/commands/gather_host_facts.py b/python/cm/management/commands/gather_host_facts.py new file mode 100644 index 0000000000..3662466b58 --- /dev/null +++ b/python/cm/management/commands/gather_host_facts.py @@ -0,0 +1,71 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys +import json +import subprocess + +from django.conf import settings +from django.core.management import BaseCommand + +from cm.collect_statistics.gather_hardware_info import get_inventory +from cm.utils import get_env_with_venv_path + + +class Command(BaseCommand): + help = "Gather hardware facts about hosts" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._inventory_dir = settings.DATA_DIR / "tmp" / "gather_host_facts" + self._workdir = settings.CODE_DIR / "collect_statistics" / "ansible" + + def handle(self, *_, **__) -> None: + self._inventory_dir.mkdir(exist_ok=True, parents=True) + + inventory_file = self._inventory_dir / "inventory.json" + + with inventory_file.open(mode="w", encoding="utf-8") as file_: + json.dump(get_inventory(), file_) + + os.chdir(self._workdir) + + ansible_command = [ + "ansible-playbook", + "--vault-password-file", + str(settings.CODE_DIR / "ansible_secret.py"), + "-i", + str(inventory_file), + str(self._workdir / "collect_host_info.yaml"), + ] + + stdout_file = self._inventory_dir / "ansible.stdout" + stderr_file = self._inventory_dir / "ansible.stderr" + + with stdout_file.open(mode="w", encoding="utf-8") as stdout, stderr_file.open( + mode="w", encoding="utf-8" + ) as stderr: + ansible_process = subprocess.Popen( + ansible_command, # noqa: S603 + env=get_env_with_venv_path(venv="2.9"), + stdout=stdout, + stderr=stderr, + ) + + exit_code = ansible_process.wait() + + if exit_code != 0: + print(f"Playbook execution failed with exit code {exit_code}") + sys.exit(exit_code) + + print("Hosts hardware information gathered successfully") From ee703cc7bd3900119a22be4efb0b5371b2b20fef Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Fri, 6 Sep 2024 07:32:50 +0000 Subject: [PATCH 55/98] ADCM-5922 Unify host-from-group removal approach for Action and Config host groups --- python/api_v2/cluster/utils.py | 56 +++---------------------- python/cm/api.py | 27 +++--------- python/cm/services/action_host_group.py | 4 +- python/cm/services/group_config.py | 6 +++ python/cm/services/host_group_common.py | 51 ++++++++++++++++++++++ 5 files changed, 72 insertions(+), 72 deletions(-) create mode 100644 python/cm/services/host_group_common.py diff --git a/python/api_v2/cluster/utils.py b/python/api_v2/cluster/utils.py index 85d67de430..f36a94aeaa 100644 --- a/python/api_v2/cluster/utils.py +++ b/python/api_v2/cluster/utils.py @@ -11,9 +11,7 @@ # limitations under the License. from collections import defaultdict -from functools import reduce from itertools import chain -from operator import or_ from typing import Literal from cm.data_containers import ( @@ -35,10 +33,8 @@ update_issues_and_flags_after_deleting, ) from cm.models import ( - ActionHostGroup, Cluster, ClusterObject, - GroupConfig, Host, HostComponent, MaintenanceMode, @@ -46,14 +42,15 @@ Prototype, ServiceComponent, ) +from cm.services.action_host_group import ActionHostGroupRepo from cm.services.cluster import retrieve_clusters_topology from cm.services.concern.locks import get_lock_on_object +from cm.services.group_config import ConfigHostGroupRepo from cm.services.status.notify import reset_hc_map, reset_objects_in_mm from cm.status_api import send_host_component_map_update_event from core.cluster.operations import find_hosts_difference -from core.cluster.types import MovedHosts from django.contrib.contenttypes.models import ContentType -from django.db.models import Q, QuerySet +from django.db.models import QuerySet from django.db.transaction import atomic, on_commit from rbac.models import Policy from rest_framework.status import HTTP_409_CONFLICT @@ -276,8 +273,6 @@ def _save_mapping(mapping_data: MappingData) -> QuerySet[HostComponent]: for added_host in mapping_data.added_hosts: add_concern_to_object(object_=added_host, concern=lock) - _handle_mapping_config_groups(mapping_data=mapping_data) - mapping_objects: list[HostComponent] = [] for map_ in mapping_data.mapping: mapping_objects.append( @@ -294,9 +289,9 @@ def _save_mapping(mapping_data: MappingData) -> QuerySet[HostComponent]: updated_topology = next(retrieve_clusters_topology(cluster_ids=(mapping_data.cluster.id,))) - handle_mapping_action_host_groups( - mapping_delta=find_hosts_difference(old_topology=original_topology, new_topology=updated_topology).unmapped - ) + unmapped_hosts = find_hosts_difference(old_topology=original_topology, new_topology=updated_topology).unmapped + ActionHostGroupRepo().remove_unmapped_hosts_from_groups(unmapped_hosts) + ConfigHostGroupRepo().remove_unmapped_hosts_from_groups(unmapped_hosts) update_hierarchy_issues(obj=mapping_data.orm_objects["cluster"]) for provider_id in {host.provider_id for host in mapping_data.hosts.values()}: @@ -309,45 +304,6 @@ def _save_mapping(mapping_data: MappingData) -> QuerySet[HostComponent]: return HostComponent.objects.filter(cluster_id=mapping_data.cluster.id) -def _handle_mapping_config_groups(mapping_data: MappingData) -> None: - remaining_host_service = {(diff.host.id, diff.service.id) for diff in mapping_data.mapping_difference["remain"]} - removed_hosts_not_in_mapping = { - mapping_data.orm_objects["hosts"][removed_mapping.host.id] - for removed_mapping in mapping_data.mapping_difference["remove"] - if (removed_mapping.host.id, removed_mapping.service.id) not in remaining_host_service - } - removed_mapping_host_ids = {hc.host.id for hc in mapping_data.mapping_difference["remove"]} - - for group_config in GroupConfig.objects.filter( - object_type__model__in=["clusterobject", "servicecomponent"], - hosts__in=removed_mapping_host_ids, - ).distinct(): - group_config.hosts.remove(*removed_hosts_not_in_mapping) - - -def handle_mapping_action_host_groups(mapping_delta: MovedHosts) -> None: - select_predicates = [] - for service_id, hosts in mapping_delta.services.items(): - select_predicates.append( - Q( - host_id__in=hosts, - actionhostgroup__object_id=service_id, - actionhostgroup__object_type=ContentType.objects.get_for_model(ClusterObject), - ) - ) - for component_id, hosts in mapping_delta.components.items(): - select_predicates.append( - Q( - host_id__in=hosts, - actionhostgroup__object_id=component_id, - actionhostgroup__object_type=ContentType.objects.get_for_model(ServiceComponent), - ) - ) - - if len(select_predicates) > 0: - ActionHostGroup.hosts.through.objects.filter(reduce(or_, select_predicates)).delete() - - def _handle_mapping_policies(mapping_data: MappingData) -> None: service_ids_in_mappings: set[int] = set( chain( diff --git a/python/cm/api.py b/python/cm/api.py index 7dd27001b9..14bed2aeec 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -16,7 +16,6 @@ import json from adcm_version import compare_prototype_versions -from api_v2.cluster.utils import handle_mapping_action_host_groups from core.cluster.operations import find_hosts_difference from core.types import CoreObjectDescriptor from django.conf import settings @@ -70,9 +69,11 @@ ServiceComponent, TaskLog, ) +from cm.services.action_host_group import ActionHostGroupRepo from cm.services.cluster import retrieve_clusters_topology from cm.services.concern.flags import BuiltInFlag, raise_flag, update_hierarchy from cm.services.concern.locks import get_lock_on_object +from cm.services.group_config import ConfigHostGroupRepo from cm.services.status.notify import reset_hc_map, reset_objects_in_mm from cm.status_api import ( send_config_creation_event, @@ -537,7 +538,7 @@ def save_hc( cluster: Cluster, host_comp_list: list[tuple[ClusterObject, Host, ServiceComponent]] ) -> list[HostComponent]: hc_queryset = HostComponent.objects.filter(cluster=cluster).order_by("id") - service_set = {hc.service for hc in hc_queryset} + service_set = {hc.service for hc in hc_queryset.select_related("service")} old_hosts = {i.host for i in hc_queryset.select_related("host")} new_hosts = {i[1] for i in host_comp_list} @@ -551,22 +552,6 @@ def save_hc( for added_host in new_hosts.difference(old_hosts): add_concern_to_object(object_=added_host, concern=lock) - still_hc = still_existed_hc(cluster, host_comp_list) - host_service_of_still_hc = {(hc.host, hc.service) for hc in still_hc} - - for removed_hc in set(hc_queryset) - set(still_hc): - groupconfigs = GroupConfig.objects.filter( - object_type__model__in=["clusterobject", "servicecomponent"], - hosts=removed_hc.host, - ) - for group_config in groupconfigs: - if (group_config.object_type.model == "clusterobject") and ( - (removed_hc.host, removed_hc.service) in host_service_of_still_hc - ): - continue - - group_config.hosts.remove(removed_hc.host) - hc_queryset.delete() host_component_list = [] @@ -581,9 +566,9 @@ def save_hc( host_component_list.append(host_component) updated_topology = next(retrieve_clusters_topology(cluster_ids=(cluster.id,))) - handle_mapping_action_host_groups( - mapping_delta=find_hosts_difference(old_topology=previous_topology, new_topology=updated_topology).unmapped - ) + unmapped_hosts = find_hosts_difference(new_topology=updated_topology, old_topology=previous_topology).unmapped + ActionHostGroupRepo().remove_unmapped_hosts_from_groups(unmapped_hosts) + ConfigHostGroupRepo().remove_unmapped_hosts_from_groups(unmapped_hosts) update_hierarchy_issues(cluster) diff --git a/python/cm/services/action_host_group.py b/python/cm/services/action_host_group.py index ed320a1ec7..bddd3b7d17 100644 --- a/python/cm/services/action_host_group.py +++ b/python/cm/services/action_host_group.py @@ -28,6 +28,7 @@ from cm.converters import core_type_to_model, model_name_to_core_type from cm.models import ActionHostGroup, Host, HostComponent, TaskLog +from cm.services.host_group_common import HostGroupRepoMixin ActionHostGroupID: TypeAlias = int @@ -67,8 +68,9 @@ class HostError(ADCMMessageError): ... -class ActionHostGroupRepo: +class ActionHostGroupRepo(HostGroupRepoMixin): group_hosts_model = ActionHostGroup.hosts.through + group_hosts_field_name = "actionhostgroup" def create(self, dto: CreateDTO) -> ActionHostGroupID: object_type = ContentType.objects.get_for_model(core_type_to_model(dto.owner.type)) diff --git a/python/cm/services/group_config.py b/python/cm/services/group_config.py index a8113d76c2..0631677522 100644 --- a/python/cm/services/group_config.py +++ b/python/cm/services/group_config.py @@ -17,6 +17,7 @@ from cm.converters import core_type_to_model, model_name_to_core_type from cm.models import GroupConfig +from cm.services.host_group_common import HostGroupRepoMixin GroupConfigName: TypeAlias = str @@ -68,3 +69,8 @@ def retrieve_group_configs_for_hosts( group.hosts.add(ShortObjectInfo(id=record["host_id"], name=record["host_name"])) return result + + +class ConfigHostGroupRepo(HostGroupRepoMixin): + group_hosts_model = GroupConfig.hosts.through + group_hosts_field_name = "groupconfig" diff --git a/python/cm/services/host_group_common.py b/python/cm/services/host_group_common.py new file mode 100644 index 0000000000..028c4a2e84 --- /dev/null +++ b/python/cm/services/host_group_common.py @@ -0,0 +1,51 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from functools import reduce +from operator import or_ +from typing import Iterable + +from core.cluster.types import MovedHosts +from django.contrib.contenttypes.models import ContentType +from django.db.models import Model, Q + +from cm.models import ClusterObject, ServiceComponent + + +class HostGroupRepoMixin: + group_hosts_model: Model + group_hosts_field_name: str + + def remove_unmapped_hosts_from_groups(self, unmapped_hosts: MovedHosts) -> None: + if not (unmapped_hosts.services or unmapped_hosts.components): + return + + hosts_in_service_groups = Q( + Q(**{f"{self.group_hosts_field_name}__object_type": ContentType.objects.get_for_model(ClusterObject)}), + self._combine_with_or( + Q(host_id__in=hosts, **{f"{self.group_hosts_field_name}__object_id": service_id}) + for service_id, hosts in unmapped_hosts.services.items() + ), + ) + + hosts_in_component_groups = Q( + Q(**{f"{self.group_hosts_field_name}__object_type": ContentType.objects.get_for_model(ServiceComponent)}), + self._combine_with_or( + Q(host_id__in=hosts, **{f"{self.group_hosts_field_name}__object_id": component_id}) + for component_id, hosts in unmapped_hosts.components.items() + ), + ) + + self.group_hosts_model.objects.filter(hosts_in_service_groups | hosts_in_component_groups).delete() + + def _combine_with_or(self, clauses: Iterable[Q]) -> Q: + return reduce(or_, clauses, Q()) From 497291a4d096a762213c7a4515f31ad9125e3f52 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Fri, 6 Sep 2024 07:35:37 +0000 Subject: [PATCH 56/98] ADCM-5923: inventory for statistics --- .../gather_hardware_info.py | 41 +++++++++++ python/cm/services/job/inventory/__init__.py | 18 +++-- python/cm/services/job/inventory/_base.py | 12 ++++ python/cm/tests/test_management_commands.py | 71 +++++++++++++++++++ 4 files changed, 132 insertions(+), 10 deletions(-) create mode 100644 python/cm/collect_statistics/gather_hardware_info.py diff --git a/python/cm/collect_statistics/gather_hardware_info.py b/python/cm/collect_statistics/gather_hardware_info.py new file mode 100644 index 0000000000..3d6c867fb6 --- /dev/null +++ b/python/cm/collect_statistics/gather_hardware_info.py @@ -0,0 +1,41 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import defaultdict + +from core.types import HostID, HostName +from django.db.models import Value +from django.db.models.functions import Coalesce + +from cm.models import Host +from cm.services.job.inventory import get_basic_info_for_hosts + + +def get_inventory() -> dict: + """ + Collects inventory data for all existing hosts. + Host groups are split by cluster edition (`ADCM` for unlinked hosts) + """ + + host_fqdn_edition: dict[HostID, tuple[HostName, str]] = { + host["id"]: (host["fqdn"], host["edition"]) + for host in Host.objects.select_related("cluster__prototype__bundle") + .values("id", "fqdn", edition=Coalesce("cluster__prototype__bundle__edition", Value("ADCM"))) + .all() + } + + host_groups = defaultdict(lambda: defaultdict(dict)) + for host_id, info in get_basic_info_for_hosts(hosts=set(host_fqdn_edition.keys())).items(): + fqdn, edition = host_fqdn_edition[host_id] + host_groups[edition]["hosts"][fqdn] = info.dict(by_alias=True, exclude_defaults=True) + + return {"all": {"children": host_groups}} diff --git a/python/cm/services/job/inventory/__init__.py b/python/cm/services/job/inventory/__init__.py index 8a08fce9ea..043138aac1 100644 --- a/python/cm/services/job/inventory/__init__.py +++ b/python/cm/services/job/inventory/__init__.py @@ -10,10 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from cm.services.job.inventory._base import ( - get_cluster_vars, - get_inventory_data, -) +from cm.services.job.inventory._base import get_basic_info_for_hosts, get_cluster_vars, get_inventory_data from cm.services.job.inventory._config import get_adcm_configuration, get_objects_configurations from cm.services.job.inventory._groups import detect_host_groups_for_cluster_bundle_action from cm.services.job.inventory._imports import get_imports_for_inventory @@ -27,16 +24,17 @@ ) __all__ = [ - "ClusterVars", "ClusterNode", - "ServiceNode", + "ClusterVars", + "ComponentNode", "HostNode", "HostProviderNode", - "ComponentNode", - "get_cluster_vars", - "get_inventory_data", - "get_imports_for_inventory", + "ServiceNode", "detect_host_groups_for_cluster_bundle_action", "get_adcm_configuration", + "get_basic_info_for_hosts", + "get_cluster_vars", + "get_imports_for_inventory", + "get_inventory_data", "get_objects_configurations", ] diff --git a/python/cm/services/job/inventory/_base.py b/python/cm/services/job/inventory/_base.py index 748d5d8e4d..a66588a8df 100644 --- a/python/cm/services/job/inventory/_base.py +++ b/python/cm/services/job/inventory/_base.py @@ -390,3 +390,15 @@ def _get_objects_basic_info( } return result + + +def get_basic_info_for_hosts(hosts: set[HostID]) -> dict[HostID, HostNode]: + objects_in_inventory = {ADCMCoreType.HOST: hosts} + hosts_info = _get_objects_basic_info( + objects_in_inventory=objects_in_inventory, + objects_configuration=get_objects_configurations(objects_in_inventory), + objects_before_upgrade={}, + objects_maintenance_mode=MaintenanceModeOfObjects(services={}, components={}, hosts={}), + ) + + return {host_id: host_node for (_, host_id), host_node in hosts_info.items()} diff --git a/python/cm/tests/test_management_commands.py b/python/cm/tests/test_management_commands.py index 99d4af9303..5a1e5bf327 100644 --- a/python/cm/tests/test_management_commands.py +++ b/python/cm/tests/test_management_commands.py @@ -22,6 +22,7 @@ from adcm.tests.base import BaseTestCase, BusinessLogicMixin from api_v2.tests.base import BaseAPITestCase, ParallelReadyTestCase +from core.types import ADCMCoreType from django.conf import settings from django.db.models import Q from django.test import TestCase @@ -33,9 +34,11 @@ from cm.collect_statistics.collectors import BundleCollector from cm.collect_statistics.encoders import TarFileEncoder from cm.collect_statistics.errors import RetriesExceededError, SenderConnectionError +from cm.collect_statistics.gather_hardware_info import get_inventory from cm.collect_statistics.senders import SenderSettings, StatisticSender from cm.collect_statistics.storages import JSONFile, StorageError, TarFileWithJSONFileStorage from cm.models import ADCM, Bundle, ServiceComponent +from cm.services.job.inventory import get_objects_configurations from cm.tests.utils import gen_cluster, gen_provider @@ -258,6 +261,74 @@ def test_collect_community_bundle_collector(self) -> None: self.assertDictEqual(actual, expected) + def test_inventory(self): + # prepare data + bundle_community = self.add_bundle(self.bundles_dir / "cluster_1") + bundle_enterprise = self.add_bundle(self.bundles_dir / "cluster_full_config") + bundle_enterprise.edition = "enterprise" + bundle_enterprise.save(update_fields=["edition"]) + bundle_provider = self.add_bundle(self.bundles_dir / "provider") + + cluster_community = self.add_cluster(bundle=bundle_community, name="Cluster community") + cluster_enterprise = self.add_cluster(bundle=bundle_enterprise, name="Cluster enterprise") + provider = self.add_provider(bundle=bundle_provider, name="Provider") + + h1_free = self.add_host(provider=provider, fqdn="H1 free") + h2_community = self.add_host(provider=provider, fqdn="H2 community", cluster=cluster_community) + h3_enterprise = self.add_host(provider=provider, fqdn="H3 enterprise", cluster=cluster_enterprise) + h4_enterprise = self.add_host(provider=provider, fqdn="H4 enterprise", cluster=cluster_enterprise) + + configs = get_objects_configurations( + objects={ADCMCoreType.HOST: {h1_free.id, h2_community.id, h3_enterprise.id, h4_enterprise.id}} + ) + + # test + expected_inventory = { + "all": { + "children": { + "ADCM": { + "hosts": { + h1_free.fqdn: { + "adcm_hostid": h1_free.id, + "state": h1_free.state, + "multi_state": h1_free.multi_state, + **configs[ADCMCoreType.HOST, h1_free.id], + } + } + }, + "community": { + "hosts": { + h2_community.fqdn: { + "adcm_hostid": h2_community.id, + "state": h2_community.state, + "multi_state": h2_community.multi_state, + **configs[ADCMCoreType.HOST, h2_community.id], + } + } + }, + "enterprise": { + "hosts": { + h3_enterprise.fqdn: { + "adcm_hostid": h3_enterprise.id, + "state": h3_enterprise.state, + "multi_state": h3_enterprise.multi_state, + **configs[ADCMCoreType.HOST, h3_enterprise.id], + }, + h4_enterprise.fqdn: { + "adcm_hostid": h4_enterprise.id, + "state": h4_enterprise.state, + "multi_state": h4_enterprise.multi_state, + **configs[ADCMCoreType.HOST, h4_enterprise.id], + }, + } + }, + } + } + } + actual_inventory = get_inventory() + + self.assertDictEqual(actual_inventory, expected_inventory) + class TestStorage(BaseAPITestCase): def setUp(self) -> None: From c291a920039fe62ee68e6de63976055b4f317af9 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Fri, 6 Sep 2024 07:48:48 +0000 Subject: [PATCH 57/98] ADCM-5918: Add static type checker Pyright for ADCM --- Makefile | 1 + poetry.lock | 126 +++++++++++++++++++++++- pyproject.toml | 55 +++++++++++ python/api_v2/service/utils.py | 6 +- python/cm/api.py | 2 +- python/cm/models.py | 12 +-- python/cm/services/job/jinja_scripts.py | 6 -- python/cm/utils.py | 5 +- 8 files changed, 186 insertions(+), 27 deletions(-) diff --git a/Makefile b/Makefile index 50c4136250..209922ab3d 100644 --- a/Makefile +++ b/Makefile @@ -45,6 +45,7 @@ lint: poetry install --no-root --with lint poetry run ruff check $(PY_FILES) poetry run ruff format --check $(PY_FILES) + poetry run pyright --project pyproject.toml poetry run python dev/linters/license_checker.py --folders $(PY_FILES) go poetry run python dev/linters/migrations_checker.py python diff --git a/poetry.lock b/poetry.lock index 9ccc232a30..f291ed9cee 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "adcm-version" @@ -546,6 +546,45 @@ Django = ">=3.2" gprof2dot = ">=2017.09.19" sqlparse = "*" +[[package]] +name = "django-stubs" +version = "5.0.4" +description = "Mypy stubs for Django" +optional = false +python-versions = ">=3.8" +files = [ + {file = "django_stubs-5.0.4-py3-none-any.whl", hash = "sha256:c2502f5ecbae50c68f9a86d52b5b2447d8648fd205036dad0ccb41e19a445927"}, + {file = "django_stubs-5.0.4.tar.gz", hash = "sha256:78e3764488fdfd2695f12502136548ec22f8d4b1780541a835042b8238d11514"}, +] + +[package.dependencies] +asgiref = "*" +django = "*" +django-stubs-ext = ">=5.0.4" +tomli = {version = "*", markers = "python_version < \"3.11\""} +types-PyYAML = "*" +typing-extensions = ">=4.11.0" + +[package.extras] +compatible-mypy = ["mypy (>=1.11.0,<1.12.0)"] +oracle = ["oracledb"] +redis = ["redis"] + +[[package]] +name = "django-stubs-ext" +version = "5.0.4" +description = "Monkey-patching and extensions for django-stubs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "django_stubs_ext-5.0.4-py3-none-any.whl", hash = "sha256:910cbaff3d1e8e806a5c27d5ddd4088535aae8371ea921b7fd680fdfa5f14e30"}, + {file = "django_stubs_ext-5.0.4.tar.gz", hash = "sha256:85da065224204774208be29c7d02b4482d5a69218a728465c2fbe41725fdc819"}, +] + +[package.dependencies] +django = "*" +typing-extensions = "*" + [[package]] name = "django-test-migrations" version = "1.3.0" @@ -585,6 +624,29 @@ files = [ {file = "djangorestframework-camel-case-1.4.2.tar.gz", hash = "sha256:cdae75846648abb6585c7470639a1d2fb064dc45f8e8b62aaa50be7f1a7a61f4"}, ] +[[package]] +name = "djangorestframework-stubs" +version = "3.15.0" +description = "PEP-484 stubs for django-rest-framework" +optional = false +python-versions = ">=3.8" +files = [ + {file = "djangorestframework_stubs-3.15.0-py3-none-any.whl", hash = "sha256:6c634f16fe1f9b1654cfd921eca64cd4188ce8534ab5e3ec7e44aaa0ca969d93"}, + {file = "djangorestframework_stubs-3.15.0.tar.gz", hash = "sha256:f60ee1c80abb01a77acc0169969e07c45c2739ae64667b9a0dd4a2e32697dcab"}, +] + +[package.dependencies] +django-stubs = ">=5.0.0" +requests = ">=2.0.0" +types-PyYAML = ">=5.4.3" +types-requests = ">=0.1.12" +typing-extensions = ">=3.10.0" + +[package.extras] +compatible-mypy = ["django-stubs[compatible-mypy]", "mypy (>=1.10.0,<1.11.0)"] +coreapi = ["coreapi (>=2.0.0)"] +markdown = ["types-Markdown (>=0.1.5)"] + [[package]] name = "drf-extensions" version = "0.7.1" @@ -1441,6 +1503,24 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] +[[package]] +name = "pyright" +version = "1.1.378" +description = "Command line wrapper for pyright" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyright-1.1.378-py3-none-any.whl", hash = "sha256:8853776138b01bc284da07ac481235be7cc89d3176b073d2dba73636cb95be79"}, + {file = "pyright-1.1.378.tar.gz", hash = "sha256:78a043be2876d12d0af101d667e92c7734f3ebb9db71dccc2c220e7e7eb89ca2"}, +] + +[package.dependencies] +nodeenv = ">=1.6.0" + +[package.extras] +all = ["twine (>=3.4.1)"] +dev = ["twine (>=3.4.1)"] + [[package]] name = "python-gnupg" version = "0.5.2" @@ -1507,6 +1587,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1514,8 +1595,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1532,6 +1621,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1539,6 +1629,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1881,15 +1972,40 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "types-pyyaml" +version = "6.0.12.20240808" +description = "Typing stubs for PyYAML" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-PyYAML-6.0.12.20240808.tar.gz", hash = "sha256:b8f76ddbd7f65440a8bda5526a9607e4c7a322dc2f8e1a8c405644f9a6f4b9af"}, + {file = "types_PyYAML-6.0.12.20240808-py3-none-any.whl", hash = "sha256:deda34c5c655265fc517b546c902aa6eed2ef8d3e921e4765fe606fe2afe8d35"}, +] + +[[package]] +name = "types-requests" +version = "2.32.0.20240712" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.32.0.20240712.tar.gz", hash = "sha256:90c079ff05e549f6bf50e02e910210b98b8ff1ebdd18e19c873cd237737c1358"}, + {file = "types_requests-2.32.0.20240712-py3-none-any.whl", hash = "sha256:f754283e152c752e46e70942fa2a146b5bc70393522257bb85bd1ef7e019dcc3"}, +] + +[package.dependencies] +urllib3 = ">=2" + [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -1964,4 +2080,4 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "49bdab2c71e90a5eaba1d87f8d6d196bc806cbda886a4678f1d42454133cd1bd" +content-hash = "3f6d3864c6ada4a37bc2b9907a3f39dbb8859bd550a1a61ec449cc6c0fa81630" diff --git a/pyproject.toml b/pyproject.toml index 39586e1b72..20b85d5ea3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,9 @@ optional = true [tool.poetry.group.lint.dependencies] ruff = "^0.1.13" +pyright = "^1.1.378" +django-stubs = "^5.0.4" +djangorestframework-stubs = "^3.15.0" [tool.poetry.group.unittests] optional = true @@ -125,3 +128,55 @@ select = [ "DTZ", "ICN", "PIE", "Q", "RET", "SIM", "ARG", "PTH", "PLE", "TRY" ] + +[tool.pyright] +include = [ + "python", + "dev/linters", + "conf/adcm/python_scripts", +] +exclude = [ + "python/api", + "python/api_ui", + "python/audit/cases", + "python/rbac/endpoints", + "python/rbac/endpoints_ui", + "python/rbac/services", + "**/tests", + "**/migrations", + "**/__pycache__", +] + +typeCheckingMode = "standard" +reportUnnecessaryTypeIgnoreComment = true + +reportMissingImports = "error" +reportMissingTypeStubs = false + +pythonVersion = "3.10" +pythonPlatform = "Linux" + +executionEnvironments = [ + { root = "conf/adcm/python_scripts", extraPaths = [ "python" ]}, +] + +# TODO: Remove. Temporarily disable type issue checks +reportAttributeAccessIssue = false +reportReturnType = false +reportOptionalMemberAccess = false +reportArgumentType = false +reportIncompatibleMethodOverride = false +reportOptionalSubscript = false +reportAssignmentType = false +reportIncompatibleVariableOverride = false +reportCallIssue = false +reportOptionalIterable = false +reportInvalidTypeForm = false +reportOptionalCall = false +reportInvalidTypeArguments = false +reportPossiblyUnboundVariable = false +reportInvalidTypeVarUse = false +reportGeneralTypeIssues = false +reportIndexIssue = false +reportOperatorIssue = false +# END Remove. diff --git a/python/api_v2/service/utils.py b/python/api_v2/service/utils.py index 976f70a1db..aa94e4c8eb 100644 --- a/python/api_v2/service/utils.py +++ b/python/api_v2/service/utils.py @@ -69,7 +69,7 @@ def bulk_init_config(objects: QuerySet[ADCMEntity]) -> None: {', '.join(['(0, 0)'] * objects.count())} RETURNING id;""" ) object_config_ids = [item[0] for item in cursor.fetchall()] - object_configs: QuerySet[ObjectConfig] = ObjectConfig.objects.filter(pk__in=object_config_ids) + object_configs = ObjectConfig.objects.filter(pk__in=object_config_ids) obj_proto_conf_map = {} objects_to_update = [] @@ -79,7 +79,7 @@ def bulk_init_config(objects: QuerySet[ADCMEntity]) -> None: objects_to_update.append(obj) objects.model.objects.bulk_update(objs=objects_to_update, fields=["config"]) - config_logs: list[ConfigLog] = [] + config_logs = [] for obj_conf in object_configs: obj = obj_conf.object spec, _, config, attr = obj_proto_conf_map[obj.pk] @@ -87,7 +87,7 @@ def bulk_init_config(objects: QuerySet[ADCMEntity]) -> None: process_file_type(obj=obj, spec=spec, conf=config) ConfigLog.objects.bulk_create(objs=config_logs) - config_logs: QuerySet[ConfigLog] = ( + config_logs = ( ConfigLog.objects.filter(obj_ref__in=object_configs) .order_by("-pk") .select_related("obj_ref")[: len(config_logs)] diff --git a/python/cm/api.py b/python/cm/api.py index 8011967e49..0b2eb9e413 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -377,7 +377,7 @@ def update_obj_config(obj_conf: ObjectConfig, config: dict, attr: dict, descript message = f"Both `config` and `attr` should be of `dict` type, not {type(config)} and {type(attr)} respectively" raise TypeError(message) - obj: MainObject | ADCM | GroupConfig = obj_conf.object + obj = obj_conf.object if obj is None: message = "Can't update configuration that have no linked object" raise ValueError(message) diff --git a/python/cm/models.py b/python/cm/models.py index c6bf298072..a3f6aa6405 100644 --- a/python/cm/models.py +++ b/python/cm/models.py @@ -1135,15 +1135,11 @@ def get_start_impossible_reason(self, obj: ADCMEntity | ActionHostGroup) -> str obj = obj.object if obj.prototype.type == "adcm": - obj: ADCM - current_configlog = ConfigLog.objects.get(obj_ref=obj.config, id=obj.config.current) if not current_configlog.attr["ldap_integration"]["active"]: return NO_LDAP_SETTINGS if obj.prototype.type == "cluster": - obj: Cluster - if not self.allow_in_maintenance_mode: if Host.objects.filter(cluster=obj, maintenance_mode=MaintenanceMode.ON).exists(): return MANY_HOSTS_IN_MM @@ -1160,8 +1156,6 @@ def get_start_impossible_reason(self, obj: ADCMEntity | ActionHostGroup) -> str return COMPONENT_IN_MM elif obj.prototype.type == "service": - obj: ClusterObject - if not self.allow_in_maintenance_mode: if obj.maintenance_mode == MaintenanceMode.ON: return SERVICE_IN_MM @@ -1180,8 +1174,6 @@ def get_start_impossible_reason(self, obj: ADCMEntity | ActionHostGroup) -> str return MANY_HOSTS_IN_MM elif obj.prototype.type == "component": - obj: ServiceComponent - if not self.allow_in_maintenance_mode: if obj.maintenance_mode == MaintenanceMode.ON: return COMPONENT_IN_MM @@ -1194,9 +1186,7 @@ def get_start_impossible_reason(self, obj: ADCMEntity | ActionHostGroup) -> str ).exists(): return MANY_HOSTS_IN_MM - elif obj.prototype.type == "host": - obj: Host - + elif obj.prototype.type == "host": # noqa: SIM102 if not self.allow_in_maintenance_mode and obj.maintenance_mode == MaintenanceMode.ON: return HOST_IN_MM diff --git a/python/cm/services/job/jinja_scripts.py b/python/cm/services/job/jinja_scripts.py index cd46a52639..4af6879988 100755 --- a/python/cm/services/job/jinja_scripts.py +++ b/python/cm/services/job/jinja_scripts.py @@ -148,9 +148,3 @@ def get_action_info(action: Action) -> ActionContext: owner_group = owner_prototype.type.upper() return ActionContext(name=action.name, owner_group=owner_group) - - -def _get_host_group_names_only( - host_groups: dict[HostGroupName, set[tuple[HostID, HostName]]], -) -> dict[HostGroupName, list[HostName]]: - return {group_name: [host_tuple[1] for host_tuple in group_data] for group_name, group_data in host_groups.items()} diff --git a/python/cm/utils.py b/python/cm/utils.py index f39e6c18d8..bdff7b4c1e 100644 --- a/python/cm/utils.py +++ b/python/cm/utils.py @@ -12,9 +12,12 @@ from collections.abc import Mapping from copy import deepcopy -from typing import Any, Iterable, Protocol, TypeVar +from typing import TYPE_CHECKING, Any, Iterable, Protocol, TypeVar import os +if TYPE_CHECKING: + from cm.models import ADCMEntity + ANY = "any" AVAILABLE = "available" MASKING = "masking" From 5c1057d6a7e727e6232f9cf10b5e1661e8e474ee Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Fri, 6 Sep 2024 07:54:42 +0000 Subject: [PATCH 58/98] ADCM-5902: Forbid to upload the same bundle twice --- python/adcm/tests/base.py | 4 +- python/api_v2/tests/test_audit/test_bundle.py | 6 +- python/api_v2/tests/test_bundle.py | 60 +++++++++++++------ python/cm/bundle.py | 38 +++++++++++- 4 files changed, 82 insertions(+), 26 deletions(-) diff --git a/python/adcm/tests/base.py b/python/adcm/tests/base.py index 04cbaeaaa4..d11b325c95 100644 --- a/python/adcm/tests/base.py +++ b/python/adcm/tests/base.py @@ -109,9 +109,9 @@ def _prepare_temporal_directories_for_adcm() -> dict: class BundleLogicMixin: @staticmethod - def prepare_bundle_file(source_dir: Path) -> str: + def prepare_bundle_file(source_dir: Path, target_dir: Path | None = None) -> str: bundle_file = f"{source_dir.name}.tar" - with tarfile.open(settings.DOWNLOAD_DIR / bundle_file, "w") as tar: + with tarfile.open((target_dir or settings.DOWNLOAD_DIR) / bundle_file, "w") as tar: for file in source_dir.iterdir(): tar.add(name=file, arcname=file.name) diff --git a/python/api_v2/tests/test_audit/test_bundle.py b/python/api_v2/tests/test_audit/test_bundle.py index 624fa280a0..b54e350272 100644 --- a/python/api_v2/tests/test_audit/test_bundle.py +++ b/python/api_v2/tests/test_audit/test_bundle.py @@ -33,9 +33,11 @@ def setUp(self) -> None: self.test_user = self.create_user(**self.test_user_credentials) def test_audit_upload_success(self): - new_bundle_file = self.prepare_bundle_file(source_dir=self.test_bundles_dir / "cluster_one") + new_bundle_file = self.prepare_bundle_file( + source_dir=self.test_bundles_dir / "cluster_one", target_dir=settings.TMP_DIR + ) - with open(settings.DOWNLOAD_DIR / new_bundle_file, encoding=settings.ENCODING_UTF_8) as f: + with open(settings.TMP_DIR / new_bundle_file, encoding=settings.ENCODING_UTF_8) as f: response = (self.client.v2 / "bundles").post(data={"file": f}, format_="multipart") self.assertEqual(response.status_code, HTTP_201_CREATED) diff --git a/python/api_v2/tests/test_bundle.py b/python/api_v2/tests/test_bundle.py index 363c64e949..53edb9791b 100644 --- a/python/api_v2/tests/test_bundle.py +++ b/python/api_v2/tests/test_bundle.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from cm.models import Action, Bundle from django.conf import settings from rest_framework.status import ( @@ -33,7 +34,8 @@ def setUp(self) -> None: self.bundle_1 = self.add_bundle(source_dir=cluster_bundle_1_path) cluster_new_bundle_path = self.test_bundles_dir / "cluster_two" - self.new_bundle_file = self.prepare_bundle_file(source_dir=cluster_new_bundle_path) + + self.new_bundle_file = self.prepare_bundle_file(source_dir=cluster_new_bundle_path, target_dir=settings.TMP_DIR) same_names_bundle_path = self.test_bundles_dir / "cluster_identical_cluster_and_service_names" self.same_names_bundle = self.add_bundle(source_dir=same_names_bundle_path) @@ -45,22 +47,31 @@ def test_list_success(self): self.assertEqual(response.json()["count"], 2) def test_upload_success(self): - with open(settings.DOWNLOAD_DIR / self.new_bundle_file, encoding=settings.ENCODING_UTF_8) as f: + with open(settings.TMP_DIR / self.new_bundle_file, encoding=settings.ENCODING_UTF_8) as f: response = (self.client.v2 / "bundles").post(data={"file": f}, format_="multipart") self.assertEqual(Bundle.objects.filter(name="cluster_two").exists(), True) self.assertEqual(response.status_code, HTTP_201_CREATED) def test_upload_duplicate_fail(self): - with open(settings.DOWNLOAD_DIR / self.new_bundle_file, encoding=settings.ENCODING_UTF_8) as f: - with open(settings.DOWNLOAD_DIR / self.new_bundle_file, encoding=settings.ENCODING_UTF_8) as f_duplicate: + with open(settings.TMP_DIR / self.new_bundle_file, encoding=settings.ENCODING_UTF_8) as f: + with open(settings.TMP_DIR / self.new_bundle_file, encoding=settings.ENCODING_UTF_8) as f_duplicate: (self.client.v2 / "bundles").post(data={"file": f}, format_="multipart") response = (self.client.v2 / "bundles").post(data={"file": f_duplicate}, format_="multipart") self.assertEqual(response.status_code, HTTP_409_CONFLICT) + self.assertDictEqual( + response.json(), + { + "code": "BUNDLE_CONFLICT", + "desc": "Bundle with the same content is already " + f"uploaded {settings.DOWNLOAD_DIR / self.new_bundle_file}", + "level": "error", + }, + ) def test_upload_fail(self): - with open(settings.DOWNLOAD_DIR / self.new_bundle_file, encoding=settings.ENCODING_UTF_8) as f: + with open(settings.TMP_DIR / self.new_bundle_file, encoding=settings.ENCODING_UTF_8) as f: f.readlines() response = (self.client.v2 / "bundles").post(data={"file": f}, format_="multipart") @@ -133,27 +144,33 @@ def test_upload_no_required_component_fail(self): self.assertEqual(Bundle.objects.count(), initial_bundles_count) def test_upload_adcm_min_old_version_success(self): - bundle_file = self.prepare_bundle_file(source_dir=self.test_bundles_dir / "adcm_min_version" / "old") + bundle_file = self.prepare_bundle_file( + source_dir=self.test_bundles_dir / "adcm_min_version" / "old", target_dir=settings.TMP_DIR + ) - with open(settings.DOWNLOAD_DIR / bundle_file, encoding=settings.ENCODING_UTF_8) as f: + with open(settings.TMP_DIR / bundle_file, encoding=settings.ENCODING_UTF_8) as f: response = (self.client.v2 / "bundles").post(data={"file": f}, format_="multipart") self.assertEqual(Bundle.objects.filter(name="cluster_adcm_min_version").exists(), True) self.assertEqual(response.status_code, HTTP_201_CREATED) def test_upload_adcm_min_version_success(self): - bundle_file = self.prepare_bundle_file(source_dir=self.test_bundles_dir / "adcm_min_version" / "new" / "older") + bundle_file = self.prepare_bundle_file( + source_dir=self.test_bundles_dir / "adcm_min_version" / "new" / "older", target_dir=settings.TMP_DIR + ) - with open(settings.DOWNLOAD_DIR / bundle_file, encoding=settings.ENCODING_UTF_8) as f: + with open(settings.TMP_DIR / bundle_file, encoding=settings.ENCODING_UTF_8) as f: response = (self.client.v2 / "bundles").post(data={"file": f}, format_="multipart") self.assertEqual(Bundle.objects.filter(name="cluster_adcm_min_version").exists(), True) self.assertEqual(response.status_code, HTTP_201_CREATED) def test_upload_adcm_min_version_fail(self): - bundle_file = self.prepare_bundle_file(source_dir=self.test_bundles_dir / "adcm_min_version" / "new" / "newer") + bundle_file = self.prepare_bundle_file( + source_dir=self.test_bundles_dir / "adcm_min_version" / "new" / "newer", target_dir=settings.TMP_DIR + ) - with open(settings.DOWNLOAD_DIR / bundle_file, encoding=settings.ENCODING_UTF_8) as f: + with open(settings.TMP_DIR / bundle_file, encoding=settings.ENCODING_UTF_8) as f: response = (self.client.v2 / "bundles").post(data={"file": f}, format_="multipart") self.assertEqual(response.status_code, HTTP_409_CONFLICT) @@ -167,9 +184,11 @@ def test_upload_adcm_min_version_fail(self): ) def test_upload_adcm_min_version_multiple_fail(self): - bundle_file = self.prepare_bundle_file(source_dir=self.test_bundles_dir / "adcm_min_version" / "multiple") + bundle_file = self.prepare_bundle_file( + source_dir=self.test_bundles_dir / "adcm_min_version" / "multiple", target_dir=settings.TMP_DIR + ) - with open(settings.DOWNLOAD_DIR / bundle_file, encoding=settings.ENCODING_UTF_8) as f: + with open(settings.TMP_DIR / bundle_file, encoding=settings.ENCODING_UTF_8) as f: response = (self.client.v2 / "bundles").post(data={"file": f}, format_="multipart") self.assertEqual(response.status_code, HTTP_409_CONFLICT) @@ -184,10 +203,11 @@ def test_upload_adcm_min_version_multiple_fail(self): def test_upload_plain_scripts_and_scripts_jinja_fail(self): bundle_file = self.prepare_bundle_file( - source_dir=self.test_bundles_dir / "invalid_bundles" / "plain_scripts_and_scripts_jinja" + source_dir=self.test_bundles_dir / "invalid_bundles" / "plain_scripts_and_scripts_jinja", + target_dir=settings.TMP_DIR, ) - with open(settings.DOWNLOAD_DIR / bundle_file, encoding=settings.ENCODING_UTF_8) as f: + with open(settings.TMP_DIR / bundle_file, encoding=settings.ENCODING_UTF_8) as f: response = (self.client.v2 / "bundles").post(data={"file": f}, format_="multipart") self.assertEqual(response.status_code, HTTP_409_CONFLICT) @@ -197,10 +217,10 @@ def test_upload_plain_scripts_and_scripts_jinja_fail(self): def test_upload_scripts_jinja_in_job_fail(self): bundle_file = self.prepare_bundle_file( - source_dir=self.test_bundles_dir / "invalid_bundles" / "scripts_jinja_in_job" + source_dir=self.test_bundles_dir / "invalid_bundles" / "scripts_jinja_in_job", target_dir=settings.TMP_DIR ) - with open(settings.DOWNLOAD_DIR / bundle_file, encoding=settings.ENCODING_UTF_8) as f: + with open(settings.TMP_DIR / bundle_file, encoding=settings.ENCODING_UTF_8) as f: response = (self.client.v2 / "bundles").post(data={"file": f}, format_="multipart") self.assertEqual(response.status_code, HTTP_409_CONFLICT) @@ -208,11 +228,13 @@ def test_upload_scripts_jinja_in_job_fail(self): self.assertIn('Map key "scripts_jinja" is not allowed here', response.data["desc"]) def test_upload_scripts_jinja_success(self): - bundle_file = self.prepare_bundle_file(source_dir=self.test_bundles_dir / "actions_with_scripts_jinja") + bundle_file = self.prepare_bundle_file( + source_dir=self.test_bundles_dir / "actions_with_scripts_jinja", target_dir=settings.TMP_DIR + ) self.assertEqual(Action.objects.filter(scripts_jinja="").count(), Action.objects.count()) - with open(settings.DOWNLOAD_DIR / bundle_file, encoding=settings.ENCODING_UTF_8) as f: + with open(settings.TMP_DIR / bundle_file, encoding=settings.ENCODING_UTF_8) as f: response = (self.client.v2 / "bundles").post(data={"file": f}, format_="multipart") self.assertEqual(response.status_code, HTTP_201_CREATED) diff --git a/python/cm/bundle.py b/python/cm/bundle.py index fd2197796e..5a7f543d4b 100644 --- a/python/cm/bundle.py +++ b/python/cm/bundle.py @@ -13,7 +13,9 @@ from collections import defaultdict from collections.abc import Iterable from pathlib import Path +from tempfile import gettempdir import os +import fcntl import shutil import hashlib import tarfile @@ -245,12 +247,42 @@ def get_verification_status(bundle_archive: Path | None, signature_file: Path | def upload_file(file) -> Path: - file_path = settings.DOWNLOAD_DIR / file.name - with file_path.open(mode="wb+") as f: + # save to tempdir + tmp_path = Path(gettempdir(), file.name) + with tmp_path.open(mode="wb+") as f: for chunk in file.chunks(): f.write(chunk) + hash_ = get_hash_safe(path=str(tmp_path)) - return file_path + with Path(gettempdir(), "upload.lock").open(mode="w") as lock: + try: + # consistently check hash duplicates in DOWNLOAD_DIR + fcntl.flock(lock.fileno(), fcntl.LOCK_EX) + + if duplicate_path := _get_file_hashes(path=settings.DOWNLOAD_DIR).get(hash_): + tmp_path.unlink() + raise AdcmEx( + code="BUNDLE_CONFLICT", msg=f"Bundle with the same content is already uploaded {duplicate_path}" + ) + + # move to downloads + new_path = settings.DOWNLOAD_DIR / file.name + shutil.move(src=tmp_path, dst=new_path) + + return new_path + + finally: + fcntl.flock(lock.fileno(), fcntl.LOCK_UN) + + +def _get_file_hashes(path: Path) -> dict[str, Path]: + result = {} + for entry in path.iterdir(): + if not entry.is_file(): + continue + result[get_hash(bundle_file=str(entry))] = entry + + return result def update_bundle(bundle): From 204643c3f7516e7744e1249113d82b776e725f5c Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Fri, 6 Sep 2024 08:47:42 +0000 Subject: [PATCH 59/98] ADCM-5917: Prepare model for save host information --- python/cm/collect_statistics/collectors.py | 58 ++++++++-- .../cm/migrations/0129_auto_20240904_1045.py | 43 ++++++++ python/cm/models.py | 13 +++ python/cm/tests/test_management_commands.py | 103 +++++++++++++++++- 4 files changed, 205 insertions(+), 12 deletions(-) create mode 100644 python/cm/migrations/0129_auto_20240904_1045.py diff --git a/python/cm/collect_statistics/collectors.py b/python/cm/collect_statistics/collectors.py index c3883be269..0a75072863 100644 --- a/python/cm/collect_statistics/collectors.py +++ b/python/cm/collect_statistics/collectors.py @@ -12,14 +12,16 @@ from collections import defaultdict from hashlib import md5 -from typing import Collection, Literal +from itertools import chain +from typing import Callable, Collection, Literal from django.db.models import Count, F, Q from pydantic import BaseModel from rbac.models import Policy, Role, User from typing_extensions import TypedDict -from cm.models import Bundle, Cluster, HostComponent, HostProvider +from cm.collect_statistics.types import HostDeviceFacts, HostFacts, HostOSFacts +from cm.models import Bundle, Cluster, HostComponent, HostInfo, HostProvider class BundleData(TypedDict): @@ -40,6 +42,7 @@ class ClusterData(TypedDict): host_count: int bundle: dict host_component_map: list[dict] + hosts: list[dict] class HostProviderData(TypedDict): @@ -69,6 +72,20 @@ class RBACEntities(BaseModel): roles: list[RoleData] +def _get_hosts_by_edition(data: ADCMEntities, edition: Literal["community", "enterprise"]) -> list[dict]: + return list( + chain.from_iterable(cluster["hosts"] for cluster in data.clusters if cluster["bundle"]["edition"] == edition) + ) + + +def map_community_bundle_data(data: ADCMEntities) -> ADCMEntities: + community_hosts = _get_hosts_by_edition(data=data, edition="community") + for host in community_hosts: + host["info"]["os"] = HostOSFacts(**{k: v for k, v in host["info"]["os"].items() if k == "family"}) + + return data + + class RBACCollector: def __init__(self, date_format: str): self._date_format = date_format @@ -89,11 +106,17 @@ def __call__(self) -> RBACEntities: class BundleCollector: - __slots__ = ("_date_format", "_filters") - - def __init__(self, date_format: str, filters: Collection[Q]): + __slots__ = ("_date_format", "_filters", "_mapper") + + def __init__( + self, + date_format: str, + filters: Collection[Q] = (), + mapper: Callable[[ADCMEntities], ADCMEntities] = lambda x: x, + ): self._date_format = date_format self._filters = filters + self._mapper = mapper def __call__(self) -> ADCMEntities: bundles: dict[int, BundleData] = { @@ -129,18 +152,35 @@ def __call__(self) -> ADCMEntities: ) ) + host_data = defaultdict(list) + for host_info in HostInfo.objects.select_related("host").filter( + host__cluster_id__in=cluster_general_info.keys() + ): + device_facts = [HostDeviceFacts(**device) for device in host_info.value["devices"]] + host_facts = HostFacts( + cpu_vcores=host_info.value["cpu_vcores"], + os=host_info.value["os"], + ram=host_info.value["ram"], + devices=device_facts, + ) + + host_data[host_info.host.cluster_id].append({"name": host_info.host.fqdn, "info": host_facts}) + clusters_data = [ ClusterData( name=data["name"], host_count=data["host_count"], bundle=bundles[data["bundle_id"]], host_component_map=hostcomponent_data.get(cluster_id, []), + hosts=host_data.get(cluster_id, []), ) for cluster_id, data in cluster_general_info.items() ] - return ADCMEntities( - clusters=clusters_data, - bundles=bundles.values(), - providers=hostproviders_data, + return self._mapper( + ADCMEntities( + clusters=clusters_data, + bundles=bundles.values(), + providers=hostproviders_data, + ) ) diff --git a/python/cm/migrations/0129_auto_20240904_1045.py b/python/cm/migrations/0129_auto_20240904_1045.py new file mode 100644 index 0000000000..37ce5a201a --- /dev/null +++ b/python/cm/migrations/0129_auto_20240904_1045.py @@ -0,0 +1,43 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by Django 3.2.23 on 2024-09-04 10:45 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [ + ("cm", "0128_activate_statistics_collection"), + ] + + operations = [ + migrations.CreateModel( + name="HostInfo", + fields=[ + ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ("value", models.JSONField()), + ("hash", models.CharField(max_length=255)), + ("date", models.DateTimeField(auto_now=True)), + ("host", models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to="cm.host")), + ], + ), + migrations.AddIndex( + model_name="hostinfo", + index=models.Index(fields=["host"], name="cm_hostinfo_host_id_65bc27_idx"), + ), + migrations.AlterUniqueTogether( + name="hostinfo", + unique_together={("host",)}, + ), + ] diff --git a/python/cm/models.py b/python/cm/models.py index c6bf298072..29fb609240 100644 --- a/python/cm/models.py +++ b/python/cm/models.py @@ -1695,3 +1695,16 @@ def get_model_by_type(object_type): # This function should return a Model, this is necessary for the correct # construction of the schema. return Cluster + + +class HostInfo(models.Model): + host = models.OneToOneField(Host, on_delete=models.CASCADE, null=False) + value = models.JSONField() + hash = models.CharField(max_length=255) + date = models.DateTimeField(auto_now=True) + + class Meta: + unique_together = ("host",) + indexes = [ + models.Index(fields=["host"]), + ] diff --git a/python/cm/tests/test_management_commands.py b/python/cm/tests/test_management_commands.py index 5a1e5bf327..e169203140 100644 --- a/python/cm/tests/test_management_commands.py +++ b/python/cm/tests/test_management_commands.py @@ -31,13 +31,13 @@ from requests.exceptions import ConnectionError from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED, HTTP_405_METHOD_NOT_ALLOWED -from cm.collect_statistics.collectors import BundleCollector +from cm.collect_statistics.collectors import BundleCollector, map_community_bundle_data from cm.collect_statistics.encoders import TarFileEncoder from cm.collect_statistics.errors import RetriesExceededError, SenderConnectionError from cm.collect_statistics.gather_hardware_info import get_inventory from cm.collect_statistics.senders import SenderSettings, StatisticSender from cm.collect_statistics.storages import JSONFile, StorageError, TarFileWithJSONFileStorage -from cm.models import ADCM, Bundle, ServiceComponent +from cm.models import ADCM, Bundle, Host, HostInfo, ServiceComponent from cm.services.job.inventory import get_objects_configurations from cm.tests.utils import gen_cluster, gen_provider @@ -165,7 +165,6 @@ def test_collect_community_bundle_collector(self) -> None: host_1 = self.add_host(provider=provider_full_1, fqdn="host-1", cluster=cluster_reg_1) host_2 = self.add_host(provider=provider_full_1, fqdn="host-2", cluster=cluster_reg_2) - self.add_host(provider=provider_reg_1, fqdn="host-3", cluster=cluster_reg_1) self.add_services_to_cluster(["service_one_component"], cluster=cluster_reg_1) service_2 = self.add_services_to_cluster(["service_two_components"], cluster=cluster_reg_1).get() @@ -184,6 +183,8 @@ def test_collect_community_bundle_collector(self) -> None: current_year = str(timezone.now().year) host_1_name_hash = md5(host_1.fqdn.encode("utf-8")).hexdigest() # noqa: S324 host_2_name_hash = md5(host_2.fqdn.encode("utf-8")).hexdigest() # noqa: S324 + self.add_host(provider=provider_reg_1, fqdn="host-3", cluster=cluster_reg_1) + expected_bundles = [ {"name": bundle.name, "version": bundle.version, "edition": "community", "date": current_year} for bundle in ( @@ -211,6 +212,7 @@ def test_collect_community_bundle_collector(self) -> None: "host_count": 0, "bundle": expected_bundles[1], "host_component_map": [], + "hosts": [], }, { "name": cluster_reg_1.name, @@ -231,6 +233,7 @@ def test_collect_community_bundle_collector(self) -> None: ], key=order_hc_by, ), + "hosts": [], }, { "name": cluster_reg_2.name, @@ -243,6 +246,7 @@ def test_collect_community_bundle_collector(self) -> None: "service_name": service_3.name, }, ], + "hosts": [], }, ], key=by_name, @@ -329,6 +333,97 @@ def test_inventory(self): self.assertDictEqual(actual_inventory, expected_inventory) + def test_host_info_dump_mapping(self): + bundle_cluster_reg = self.add_bundle(self.bundles_dir / "cluster_1") + + bundle_prov_reg = self.add_bundle(self.bundles_dir / "provider") + bundle_prov_full = self.add_bundle(self.bundles_dir / "provider_full_config") + + cluster_reg_1 = self.add_cluster(bundle=bundle_cluster_reg, name="Regular 1") + cluster_reg_2 = self.add_cluster(bundle=bundle_cluster_reg, name="Regular 2") + + provider_full_1 = self.add_provider(bundle=bundle_prov_full, name="Prov Full 1") + provider_reg_1 = self.add_provider(bundle=bundle_prov_reg, name="Prov Reg 1") + + self.add_host(provider=provider_full_1, fqdn="host-1", cluster=cluster_reg_1) + self.add_host(provider=provider_full_1, fqdn="host-2", cluster=cluster_reg_2) + self.add_host(provider=provider_reg_1, fqdn="host-3", cluster=cluster_reg_1) + + host_values = [ + { + "cpu_vcores": 8, + "os": {"family": "RedHat"}, + "ram": 12457, + "devices": [{"name": "vda", "removable": 0, "size": "20.00 GB"}], + }, + { + "cpu_vcores": 8, + "devices": [ + { + "name": "vda", + "removable": "0", + "rotational": "0", + "size": "20.00 GB", + "description": "Virtual I/O device", + } + ], + "os": {"distribution": "CentOS", "family": "RedHat", "version": "7.9"}, + "ram": 15884, + }, + { + "cpu_vcores": 6, + "os": {"distribution": "CentOS", "version": "7.9"}, + "ram": 12457, + "devices": [{"name": "vda", "removable": 0, "size": "20.00 GB"}], + }, + ] + + for cluster in [cluster_reg_1, cluster_reg_2]: + host = Host.objects.filter(cluster__name=cluster.name) + for host_object in host: + host_hash = md5(host_object.fqdn.encode(encoding="utf-8")).hexdigest() # noqa: S324 + HostInfo.objects.create(host=host_object, value=host_values.pop(), hash=host_hash) + self.assertEqual(HostInfo.objects.count(), 3) + + with self.subTest("test community edition"): + collect = BundleCollector( + date_format="%Y", filters=[Q(edition="community")], mapper=map_community_bundle_data + ) + actual = collect().model_dump() + + for cluster in actual["clusters"]: + for host in cluster["hosts"]: + if host["name"] == "host-1": + self.assertEqual(host["info"]["os"], {}) + else: + self.assertEqual(host["info"]["os"], {"family": "RedHat"}) + + with self.subTest("test enterprise edition"): + for bundle in Bundle.objects.all(): + bundle.edition = "enterprise" + bundle.save() + collect = BundleCollector(date_format="%Y", filters=[Q(edition="enterprise")]) + actual = collect().model_dump() + + for cluster in actual["clusters"]: + for host in cluster["hosts"]: + if host["name"] == "host-1": + self.assertEqual(host["info"]["os"], {"distribution": "CentOS", "version": "7.9"}) + elif host["name"] == "host-3": + self.assertEqual( + host["info"]["os"], {"distribution": "CentOS", "family": "RedHat", "version": "7.9"} + ) + else: + self.assertEqual(host["info"]["os"], {"family": "RedHat"}) + + with self.subTest("test mapper and filter mismatch"): + collect = BundleCollector(date_format="%Y", filters=[Q(edition="community")]) + actual = collect().model_dump() + + self.assertListEqual(actual["bundles"], []) + self.assertListEqual(actual["clusters"], []) + self.assertListEqual(actual["providers"], []) + class TestStorage(BaseAPITestCase): def setUp(self) -> None: @@ -352,6 +447,7 @@ def setUp(self) -> None: host_2 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_2") host_3 = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_3") host_unmapped = self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_unmapped") + self.add_host(bundle=self.provider_bundle, provider=self.provider, fqdn="test_host_not_in_cluster") for host in (host_1, host_2, host_3, host_unmapped): @@ -438,6 +534,7 @@ def _get_expected_data() -> dict: "service_name": "service_1", }, ], + "hosts": [], }, { "name": "cluster_2", From d59963671eab68a3310d2c00694b329cb537c661 Mon Sep 17 00:00:00 2001 From: Aleksandr Alferov Date: Fri, 6 Sep 2024 14:48:48 +0300 Subject: [PATCH 60/98] ADCM-5680 Fix imports --- python/api_v2/imports/views.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/api_v2/imports/views.py b/python/api_v2/imports/views.py index 4ba4470307..f40013d6f2 100644 --- a/python/api_v2/imports/views.py +++ b/python/api_v2/imports/views.py @@ -20,8 +20,8 @@ get_object_for_user, ) from api_v2.api_schema import DefaultParams, ErrorSerializer -from api_v2.imports.serializers import ImportPostSerializer, ImportSerializer -from api_v2.imports.utils import cook_data_for_multibind, get_imports +from api_v2.generic.imports.serializers import ImportPostSerializer, ImportSerializer +from api_v2.generic.imports.utils import cook_data_for_multibind, get_imports from api_v2.views import ADCMGenericViewSet from audit.utils import audit from cm.api import multi_bind From 62649969bb5a23ae717a187ce9fd46094c9e1afe Mon Sep 17 00:00:00 2001 From: Skrynnik Daniil Date: Fri, 6 Sep 2024 16:25:18 +0300 Subject: [PATCH 61/98] ADCM-5937: fix checking service requires on mapping save --- python/api_v2/cluster/data_containers.py | 16 +--------------- python/api_v2/cluster/utils.py | 5 ++++- .../bundles/hc_mapping_constraints/config.yaml | 3 --- python/api_v2/tests/test_mapping.py | 14 ++++---------- 4 files changed, 9 insertions(+), 29 deletions(-) diff --git a/python/api_v2/cluster/data_containers.py b/python/api_v2/cluster/data_containers.py index 0822ef3196..ba31683292 100644 --- a/python/api_v2/cluster/data_containers.py +++ b/python/api_v2/cluster/data_containers.py @@ -43,6 +43,7 @@ class MappingData: existing_mapping: list[HostComponentData] orm_objects: dict[Literal["hosts", "cluster", "providers"], dict[int, Any] | Any] not_found_object_ids: dict[Literal["hosts", "components"], set] + existing_services_names: list[str] @cached_property def mapping_difference(self) -> dict[Literal["add", "remove", "remain"], list[MappingEntryData]]: @@ -70,21 +71,6 @@ def mapping_difference(self) -> dict[Literal["add", "remove", "remain"], list[Ma ], } - @cached_property - def mapping_names(self) -> dict[Literal["services", "components"], set[str]]: - return { - "services": { - self.prototypes[map_.service.prototype_id].name - for map_ in self.mapping - if self.prototypes[map_.service.prototype_id].type == "service" - }, - "components": { - self.prototypes[map_.service.prototype_id].name - for map_ in self.mapping - if self.prototypes[map_.service.prototype_id].type == "component" - }, - } - @cached_property def mapping_prototypes(self) -> list[dict[Literal["service", "component"], PrototypeData]]: return [ diff --git a/python/api_v2/cluster/utils.py b/python/api_v2/cluster/utils.py index f36a94aeaa..24aac2ccc3 100644 --- a/python/api_v2/cluster/utils.py +++ b/python/api_v2/cluster/utils.py @@ -138,6 +138,7 @@ def retrieve_mapping_data( "existing_mapping": [], "orm_objects": {"cluster": cluster, "hosts": {}, "providers": {}}, "not_found_object_ids": {}, + "existing_services_names": [], } for service in ( @@ -146,6 +147,8 @@ def retrieve_mapping_data( .prefetch_related("servicecomponent_set", "servicecomponent_set__prototype") ): service: ClusterObject + + mapping_data["existing_services_names"].append(service.prototype.name) mapping_data["services"][service.pk] = ServiceData.model_validate(obj=service) mapping_data["prototypes"][service.prototype.pk] = PrototypeData.model_validate(obj=service.prototype) for component in service.servicecomponent_set.all(): @@ -333,7 +336,7 @@ def _check_single_mapping_requires(mapping_entry: MappingEntryData, mapping_data ]: require: RequiresData - if require.service not in mapping_data.mapping_names["services"]: + if require.service not in mapping_data.existing_services_names: if source_type == ObjectType.COMPONENT.value: reference = f'component "{component_prototype.name}" of service "{service_prototype.name}"' else: diff --git a/python/api_v2/tests/bundles/hc_mapping_constraints/config.yaml b/python/api_v2/tests/bundles/hc_mapping_constraints/config.yaml index 3fe3161d8c..4a6eeaede6 100644 --- a/python/api_v2/tests/bundles/hc_mapping_constraints/config.yaml +++ b/python/api_v2/tests/bundles/hc_mapping_constraints/config.yaml @@ -32,9 +32,6 @@ - name: service_required type: service version: *version - components: - component_in_required_service: - constraint: [ 0, + ] - name: service_with_component_required type: service diff --git a/python/api_v2/tests/test_mapping.py b/python/api_v2/tests/test_mapping.py index 9d0fee0022..6299b1237b 100644 --- a/python/api_v2/tests/test_mapping.py +++ b/python/api_v2/tests/test_mapping.py @@ -403,21 +403,15 @@ def test_required_service_success(self): component_1 = ServiceComponent.objects.get( prototype__name="component_1", service=service_requires_service, cluster=self.cluster ) - - service_required = self.add_services_to_cluster(service_names=["service_required"], cluster=self.cluster).get() - component_in_required_service = ServiceComponent.objects.get( - prototype__name="component_in_required_service", service=service_required, cluster=self.cluster - ) + # required service must be added (not exactly mapped) on mapping save + self.add_services_to_cluster(service_names=["service_required"], cluster=self.cluster).get() response: Response = self.client.v2[self.cluster, "mapping"].post( - data=[ - {"hostId": self.host_1.pk, "componentId": component_1.pk}, - {"hostId": self.host_1.pk, "componentId": component_in_required_service.pk}, - ], + data=[{"hostId": self.host_1.pk, "componentId": component_1.pk}], ) self.assertEqual(response.status_code, HTTP_201_CREATED) - self.assertEqual(HostComponent.objects.count(), 2) + self.assertEqual(HostComponent.objects.count(), 1) def test_no_required_component_fail(self): service_requires_component = self.add_services_to_cluster( From 5433b44df2878075690ecfe48807f6fe833dcccf Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Mon, 9 Sep 2024 07:39:54 +0000 Subject: [PATCH 62/98] ADCM-5926 Fix command/playbook errors --- .../ansible/action_plugins/adcm_add_host_info.py | 6 ++++-- .../ansible/collect_host_info.yaml | 4 +++- python/cm/collect_statistics/collectors.py | 13 +++++++------ python/cm/management/commands/collect_statistics.py | 6 ++++-- python/cm/management/commands/gather_host_facts.py | 2 +- python/cm/tests/test_management_commands.py | 2 +- 6 files changed, 20 insertions(+), 13 deletions(-) diff --git a/python/cm/collect_statistics/ansible/action_plugins/adcm_add_host_info.py b/python/cm/collect_statistics/ansible/action_plugins/adcm_add_host_info.py index b4d6c51bd9..91c676f4b2 100644 --- a/python/cm/collect_statistics/ansible/action_plugins/adcm_add_host_info.py +++ b/python/cm/collect_statistics/ansible/action_plugins/adcm_add_host_info.py @@ -20,7 +20,6 @@ import traceback from ansible.plugins.action import ActionBase -from core.types import HostID sys.path.append("/adcm/python") @@ -28,6 +27,7 @@ from cm.collect_statistics.types import HostDeviceFacts, HostFacts, HostOSFacts from cm.models import HostInfo +from core.types import HostID # To parse output of lshw command that'll be like: # @@ -94,7 +94,9 @@ def prepare_hosts_facts_for_storage(self, task_vars: dict) -> dict[HostID, DataT continue try: - disk_descriptions = _extract_disk_info(hostvars[host_name]["disk_command_out"]) + # if data extraction failed, there won't be such key + disk_command_out = hostvars[host_name].get("disk_command_out", "") + disk_descriptions = _extract_disk_info(disk_command_out) if disk_command_out else {} host_id = hostvars[host_name]["adcm_hostid"] diff --git a/python/cm/collect_statistics/ansible/collect_host_info.yaml b/python/cm/collect_statistics/ansible/collect_host_info.yaml index 265a7fc9e7..18144d0074 100644 --- a/python/cm/collect_statistics/ansible/collect_host_info.yaml +++ b/python/cm/collect_statistics/ansible/collect_host_info.yaml @@ -8,8 +8,10 @@ - shell: cmd: "lshw -short -C disk" register: lshw_result + ignore_errors: yes - set_fact: - disk_command_out: lshw_result.stdout + disk_command_out: "{{ lshw_result.stdout }}" + when: "{{ lshw_result }}" - name: Save hosts info to DB adcm_add_host_info: run_once: true diff --git a/python/cm/collect_statistics/collectors.py b/python/cm/collect_statistics/collectors.py index 0a75072863..b502a2e3bb 100644 --- a/python/cm/collect_statistics/collectors.py +++ b/python/cm/collect_statistics/collectors.py @@ -20,7 +20,7 @@ from rbac.models import Policy, Role, User from typing_extensions import TypedDict -from cm.collect_statistics.types import HostDeviceFacts, HostFacts, HostOSFacts +from cm.collect_statistics.types import HostDeviceFacts, HostFacts from cm.models import Bundle, Cluster, HostComponent, HostInfo, HostProvider @@ -81,7 +81,8 @@ def _get_hosts_by_edition(data: ADCMEntities, edition: Literal["community", "ent def map_community_bundle_data(data: ADCMEntities) -> ADCMEntities: community_hosts = _get_hosts_by_edition(data=data, edition="community") for host in community_hosts: - host["info"]["os"] = HostOSFacts(**{k: v for k, v in host["info"]["os"].items() if k == "family"}) + family = host["info"]["os"].get("family", "") + host["info"]["os"] = {"family": family} if family else {} return data @@ -106,17 +107,17 @@ def __call__(self) -> RBACEntities: class BundleCollector: - __slots__ = ("_date_format", "_filters", "_mapper") + __slots__ = ("_date_format", "_filters", "_postprocess_result") def __init__( self, date_format: str, filters: Collection[Q] = (), - mapper: Callable[[ADCMEntities], ADCMEntities] = lambda x: x, + postprocess_result: Callable[[ADCMEntities], ADCMEntities] = lambda x: x, ): self._date_format = date_format self._filters = filters - self._mapper = mapper + self._postprocess_result = postprocess_result def __call__(self) -> ADCMEntities: bundles: dict[int, BundleData] = { @@ -177,7 +178,7 @@ def __call__(self) -> ADCMEntities: for cluster_id, data in cluster_general_info.items() ] - return self._mapper( + return self._postprocess_result( ADCMEntities( clusters=clusters_data, bundles=bundles.values(), diff --git a/python/cm/management/commands/collect_statistics.py b/python/cm/management/commands/collect_statistics.py index 01b354d0ed..e7202b1381 100644 --- a/python/cm/management/commands/collect_statistics.py +++ b/python/cm/management/commands/collect_statistics.py @@ -23,7 +23,7 @@ from django.utils import timezone from cm.adcm_config.config import get_adcm_config -from cm.collect_statistics.collectors import ADCMEntities, BundleCollector, RBACCollector +from cm.collect_statistics.collectors import ADCMEntities, BundleCollector, RBACCollector, map_community_bundle_data from cm.collect_statistics.encoders import TarFileEncoder from cm.collect_statistics.senders import SenderSettings, StatisticSender from cm.collect_statistics.storages import JSONFile, TarFileWithJSONFileStorage @@ -37,7 +37,9 @@ logger = getLogger("background_tasks") -collect_not_enterprise = BundleCollector(date_format=DATE_TIME_FORMAT, filters=[~Q(edition="enterprise")]) +collect_not_enterprise = BundleCollector( + date_format=DATE_TIME_FORMAT, filters=[~Q(edition="enterprise")], postprocess_result=map_community_bundle_data +) collect_all = BundleCollector(date_format=DATE_TIME_FORMAT, filters=[]) diff --git a/python/cm/management/commands/gather_host_facts.py b/python/cm/management/commands/gather_host_facts.py index 3662466b58..4a5ee6efb6 100644 --- a/python/cm/management/commands/gather_host_facts.py +++ b/python/cm/management/commands/gather_host_facts.py @@ -28,7 +28,7 @@ class Command(BaseCommand): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._inventory_dir = settings.DATA_DIR / "tmp" / "gather_host_facts" - self._workdir = settings.CODE_DIR / "collect_statistics" / "ansible" + self._workdir = settings.CODE_DIR / "cm" / "collect_statistics" / "ansible" def handle(self, *_, **__) -> None: self._inventory_dir.mkdir(exist_ok=True, parents=True) diff --git a/python/cm/tests/test_management_commands.py b/python/cm/tests/test_management_commands.py index e169203140..7a6a9abba4 100644 --- a/python/cm/tests/test_management_commands.py +++ b/python/cm/tests/test_management_commands.py @@ -387,7 +387,7 @@ def test_host_info_dump_mapping(self): with self.subTest("test community edition"): collect = BundleCollector( - date_format="%Y", filters=[Q(edition="community")], mapper=map_community_bundle_data + date_format="%Y", filters=[Q(edition="community")], postprocess_result=map_community_bundle_data ) actual = collect().model_dump() From 1981f3e6dc71d922b8f6ec053919a2c452290be1 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 10 Sep 2024 07:29:42 +0000 Subject: [PATCH 63/98] ADCM-5693 Change host data collection for statistics --- python/cm/collect_statistics/collectors.py | 65 +++----- .../management/commands/collect_statistics.py | 6 +- python/cm/tests/test_management_commands.py | 153 +++++++++--------- 3 files changed, 101 insertions(+), 123 deletions(-) diff --git a/python/cm/collect_statistics/collectors.py b/python/cm/collect_statistics/collectors.py index b502a2e3bb..20b575a04d 100644 --- a/python/cm/collect_statistics/collectors.py +++ b/python/cm/collect_statistics/collectors.py @@ -12,15 +12,14 @@ from collections import defaultdict from hashlib import md5 -from itertools import chain -from typing import Callable, Collection, Literal +from typing import Collection, Literal +from core.types import BundleID, ClusterID from django.db.models import Count, F, Q from pydantic import BaseModel from rbac.models import Policy, Role, User from typing_extensions import TypedDict -from cm.collect_statistics.types import HostDeviceFacts, HostFacts from cm.models import Bundle, Cluster, HostComponent, HostInfo, HostProvider @@ -72,21 +71,6 @@ class RBACEntities(BaseModel): roles: list[RoleData] -def _get_hosts_by_edition(data: ADCMEntities, edition: Literal["community", "enterprise"]) -> list[dict]: - return list( - chain.from_iterable(cluster["hosts"] for cluster in data.clusters if cluster["bundle"]["edition"] == edition) - ) - - -def map_community_bundle_data(data: ADCMEntities) -> ADCMEntities: - community_hosts = _get_hosts_by_edition(data=data, edition="community") - for host in community_hosts: - family = host["info"]["os"].get("family", "") - host["info"]["os"] = {"family": family} if family else {} - - return data - - class RBACCollector: def __init__(self, date_format: str): self._date_format = date_format @@ -106,21 +90,19 @@ def __call__(self) -> RBACEntities: ) +def get_host_name_hash(name: str) -> str: + return md5(name.encode(encoding="utf-8")).hexdigest() # noqa: S324 + + class BundleCollector: __slots__ = ("_date_format", "_filters", "_postprocess_result") - def __init__( - self, - date_format: str, - filters: Collection[Q] = (), - postprocess_result: Callable[[ADCMEntities], ADCMEntities] = lambda x: x, - ): + def __init__(self, date_format: str, filters: Collection[Q] = ()): self._date_format = date_format self._filters = filters - self._postprocess_result = postprocess_result def __call__(self) -> ADCMEntities: - bundles: dict[int, BundleData] = { + bundles: dict[BundleID, BundleData] = { entry.pop("id"): BundleData(date=entry.pop("date").strftime(self._date_format), **entry) for entry in Bundle.objects.filter(*self._filters).values("id", *BundleData.__annotations__.keys()) } @@ -132,7 +114,7 @@ def __call__(self) -> ADCMEntities: .annotate(host_count=Count("host")) ] - cluster_general_info: dict[int, dict[Literal["name", "bundle_id", "host_count"], int | str]] = { + cluster_general_info: dict[ClusterID, dict[Literal["name", "bundle_id", "host_count"], int | str]] = { entry.pop("id"): entry for entry in Cluster.objects.filter(prototype__bundle_id__in=bundles.keys()) .values("id", "name", bundle_id=F("prototype__bundle_id")) @@ -148,24 +130,21 @@ def __call__(self) -> ADCMEntities: ): hostcomponent_data[entry.pop("cluster_id")].append( HostComponentData( - host_name=md5(entry.pop("host_name").encode(encoding="utf-8")).hexdigest(), # noqa: S324 + host_name=get_host_name_hash(entry.pop("host_name")), **entry, ) ) host_data = defaultdict(list) - for host_info in HostInfo.objects.select_related("host").filter( - host__cluster_id__in=cluster_general_info.keys() - ): - device_facts = [HostDeviceFacts(**device) for device in host_info.value["devices"]] - host_facts = HostFacts( - cpu_vcores=host_info.value["cpu_vcores"], - os=host_info.value["os"], - ram=host_info.value["ram"], - devices=device_facts, - ) + for host_name, host_cluster_id, host_facts in HostInfo.objects.values_list( + "host__fqdn", "host__cluster_id", "value" + ).filter(host__cluster_id__in=cluster_general_info.keys()): + related_bundle_edition = bundles[cluster_general_info[host_cluster_id]["bundle_id"]]["edition"] + if related_bundle_edition != "enterprise": + # we gather only family if edition isn't enterprise and + host_facts["os"] = {"family": family} if (family := host_facts["os"].get("family")) else {} - host_data[host_info.host.cluster_id].append({"name": host_info.host.fqdn, "info": host_facts}) + host_data[host_cluster_id].append({"name": get_host_name_hash(host_name), "info": host_facts}) clusters_data = [ ClusterData( @@ -178,10 +157,4 @@ def __call__(self) -> ADCMEntities: for cluster_id, data in cluster_general_info.items() ] - return self._postprocess_result( - ADCMEntities( - clusters=clusters_data, - bundles=bundles.values(), - providers=hostproviders_data, - ) - ) + return ADCMEntities(clusters=clusters_data, bundles=bundles.values(), providers=hostproviders_data) diff --git a/python/cm/management/commands/collect_statistics.py b/python/cm/management/commands/collect_statistics.py index e7202b1381..01b354d0ed 100644 --- a/python/cm/management/commands/collect_statistics.py +++ b/python/cm/management/commands/collect_statistics.py @@ -23,7 +23,7 @@ from django.utils import timezone from cm.adcm_config.config import get_adcm_config -from cm.collect_statistics.collectors import ADCMEntities, BundleCollector, RBACCollector, map_community_bundle_data +from cm.collect_statistics.collectors import ADCMEntities, BundleCollector, RBACCollector from cm.collect_statistics.encoders import TarFileEncoder from cm.collect_statistics.senders import SenderSettings, StatisticSender from cm.collect_statistics.storages import JSONFile, TarFileWithJSONFileStorage @@ -37,9 +37,7 @@ logger = getLogger("background_tasks") -collect_not_enterprise = BundleCollector( - date_format=DATE_TIME_FORMAT, filters=[~Q(edition="enterprise")], postprocess_result=map_community_bundle_data -) +collect_not_enterprise = BundleCollector(date_format=DATE_TIME_FORMAT, filters=[~Q(edition="enterprise")]) collect_all = BundleCollector(date_format=DATE_TIME_FORMAT, filters=[]) diff --git a/python/cm/tests/test_management_commands.py b/python/cm/tests/test_management_commands.py index 7a6a9abba4..c9cf911d22 100644 --- a/python/cm/tests/test_management_commands.py +++ b/python/cm/tests/test_management_commands.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from copy import deepcopy from hashlib import md5 from operator import itemgetter from pathlib import Path @@ -31,13 +32,13 @@ from requests.exceptions import ConnectionError from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED, HTTP_405_METHOD_NOT_ALLOWED -from cm.collect_statistics.collectors import BundleCollector, map_community_bundle_data +from cm.collect_statistics.collectors import BundleCollector, get_host_name_hash from cm.collect_statistics.encoders import TarFileEncoder from cm.collect_statistics.errors import RetriesExceededError, SenderConnectionError from cm.collect_statistics.gather_hardware_info import get_inventory from cm.collect_statistics.senders import SenderSettings, StatisticSender from cm.collect_statistics.storages import JSONFile, StorageError, TarFileWithJSONFileStorage -from cm.models import ADCM, Bundle, Host, HostInfo, ServiceComponent +from cm.models import ADCM, Bundle, HostInfo, ServiceComponent from cm.services.job.inventory import get_objects_configurations from cm.tests.utils import gen_cluster, gen_provider @@ -335,94 +336,100 @@ def test_inventory(self): def test_host_info_dump_mapping(self): bundle_cluster_reg = self.add_bundle(self.bundles_dir / "cluster_1") + bundle_cluster_enterprise = self.add_bundle(self.bundles_dir / "cluster_full_config") + bundle_cluster_enterprise.edition = "enterprise" + bundle_cluster_enterprise.save(update_fields=["edition"]) bundle_prov_reg = self.add_bundle(self.bundles_dir / "provider") bundle_prov_full = self.add_bundle(self.bundles_dir / "provider_full_config") cluster_reg_1 = self.add_cluster(bundle=bundle_cluster_reg, name="Regular 1") - cluster_reg_2 = self.add_cluster(bundle=bundle_cluster_reg, name="Regular 2") + enterprise_cluster = self.add_cluster(bundle=bundle_cluster_enterprise, name="Regular 2 ee") provider_full_1 = self.add_provider(bundle=bundle_prov_full, name="Prov Full 1") provider_reg_1 = self.add_provider(bundle=bundle_prov_reg, name="Prov Reg 1") - self.add_host(provider=provider_full_1, fqdn="host-1", cluster=cluster_reg_1) - self.add_host(provider=provider_full_1, fqdn="host-2", cluster=cluster_reg_2) - self.add_host(provider=provider_reg_1, fqdn="host-3", cluster=cluster_reg_1) - - host_values = [ - { - "cpu_vcores": 8, - "os": {"family": "RedHat"}, - "ram": 12457, - "devices": [{"name": "vda", "removable": 0, "size": "20.00 GB"}], + host_1 = self.add_host(provider=provider_full_1, fqdn="host-1", cluster=cluster_reg_1) + host_2 = self.add_host(provider=provider_full_1, fqdn="host-2", cluster=enterprise_cluster) + host_3 = self.add_host(provider=provider_reg_1, fqdn="host-3", cluster=cluster_reg_1) + + host_info = { + host_1.id: { + "name": get_host_name_hash(host_1.fqdn), + "info": { + "cpu_vcores": 8, + "os": {"family": "RedHat"}, + "ram": 12457, + "devices": [{"name": "vda", "removable": 0, "size": "20.00 GB"}], + }, }, - { - "cpu_vcores": 8, - "devices": [ - { - "name": "vda", - "removable": "0", - "rotational": "0", - "size": "20.00 GB", - "description": "Virtual I/O device", - } - ], - "os": {"distribution": "CentOS", "family": "RedHat", "version": "7.9"}, - "ram": 15884, + host_2.id: { + "name": get_host_name_hash(host_2.fqdn), + "info": { + "cpu_vcores": 8, + "devices": [ + { + "name": "vda", + "removable": "0", + "rotational": "0", + "size": "20.00 GB", + "description": "Virtual I/O device", + } + ], + "os": {"distribution": "CentOS", "family": "RedHat", "version": "7.9"}, + "ram": 15884, + }, }, - { - "cpu_vcores": 6, - "os": {"distribution": "CentOS", "version": "7.9"}, - "ram": 12457, - "devices": [{"name": "vda", "removable": 0, "size": "20.00 GB"}], + host_3.id: { + "name": get_host_name_hash(host_3.fqdn), + "info": { + "cpu_vcores": 6, + "os": {"distribution": "CentOS", "version": "7.9"}, + "ram": 12457, + "devices": [{"name": "vda", "removable": 0, "size": "20.00 GB"}], + }, }, - ] + } + + expected_values = deepcopy(host_info) + # because it's not enterprise and there's no family + expected_values[host_3.id]["info"]["os"] = {} + + for host in (host_1, host_2, host_3): + HostInfo.objects.create(host=host, value=host_info[host.id]["info"], hash="") - for cluster in [cluster_reg_1, cluster_reg_2]: - host = Host.objects.filter(cluster__name=cluster.name) - for host_object in host: - host_hash = md5(host_object.fqdn.encode(encoding="utf-8")).hexdigest() # noqa: S324 - HostInfo.objects.create(host=host_object, value=host_values.pop(), hash=host_hash) self.assertEqual(HostInfo.objects.count(), 3) - with self.subTest("test community edition"): - collect = BundleCollector( - date_format="%Y", filters=[Q(edition="community")], postprocess_result=map_community_bundle_data - ) - actual = collect().model_dump() - - for cluster in actual["clusters"]: - for host in cluster["hosts"]: - if host["name"] == "host-1": - self.assertEqual(host["info"]["os"], {}) - else: - self.assertEqual(host["info"]["os"], {"family": "RedHat"}) - - with self.subTest("test enterprise edition"): - for bundle in Bundle.objects.all(): - bundle.edition = "enterprise" - bundle.save() - collect = BundleCollector(date_format="%Y", filters=[Q(edition="enterprise")]) - actual = collect().model_dump() - - for cluster in actual["clusters"]: - for host in cluster["hosts"]: - if host["name"] == "host-1": - self.assertEqual(host["info"]["os"], {"distribution": "CentOS", "version": "7.9"}) - elif host["name"] == "host-3": - self.assertEqual( - host["info"]["os"], {"distribution": "CentOS", "family": "RedHat", "version": "7.9"} - ) - else: - self.assertEqual(host["info"]["os"], {"family": "RedHat"}) - - with self.subTest("test mapper and filter mismatch"): + with self.subTest("community"): collect = BundleCollector(date_format="%Y", filters=[Q(edition="community")]) - actual = collect().model_dump() + result = collect().model_dump() + + actual_hosts = { + cluster["name"]: sorted(cluster["hosts"], key=itemgetter("name")) for cluster in result["clusters"] + } + expected_hosts = { + cluster_reg_1.name: sorted( + (expected_values[host_1.id], expected_values[host_3.id]), key=itemgetter("name") + ) + } + + self.assertDictEqual(actual_hosts, expected_hosts) + + with self.subTest("all"): + collect = BundleCollector(date_format="%Y", filters=[]) + result = collect().model_dump() + + actual_hosts = { + cluster["name"]: sorted(cluster["hosts"], key=itemgetter("name")) for cluster in result["clusters"] + } + expected_hosts = { + enterprise_cluster.name: [expected_values[host_2.id]], + cluster_reg_1.name: sorted( + (expected_values[host_1.id], expected_values[host_3.id]), key=itemgetter("name") + ), + } - self.assertListEqual(actual["bundles"], []) - self.assertListEqual(actual["clusters"], []) - self.assertListEqual(actual["providers"], []) + self.assertDictEqual(actual_hosts, expected_hosts) class TestStorage(BaseAPITestCase): From d36573b41f4a8edd0ed37d44320c95e6b3037f1a Mon Sep 17 00:00:00 2001 From: Egor Araslanov Date: Tue, 10 Sep 2024 12:48:53 +0500 Subject: [PATCH 64/98] ADCM-5693 Save `removable` and `rotational` as bool to `HostInfo` --- .../ansible/action_plugins/adcm_add_host_info.py | 11 +++++++++-- python/cm/collect_statistics/types.py | 4 ++-- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/python/cm/collect_statistics/ansible/action_plugins/adcm_add_host_info.py b/python/cm/collect_statistics/ansible/action_plugins/adcm_add_host_info.py index 91c676f4b2..08623c06e6 100644 --- a/python/cm/collect_statistics/ansible/action_plugins/adcm_add_host_info.py +++ b/python/cm/collect_statistics/ansible/action_plugins/adcm_add_host_info.py @@ -65,6 +65,13 @@ def _extract_disk_info(lshw_out: str) -> dict[str, str]: } +def _safe_str_to_bool(s: str) -> bool: + try: + return bool(int(s)) + except ValueError: + return False + + class DataToStore(NamedTuple): facts: HostFacts hash_value: str @@ -111,8 +118,8 @@ def prepare_hosts_facts_for_storage(self, task_vars: dict) -> dict[HostID, DataT devices=[ HostDeviceFacts( name=device_name, - removable=device["removable"], - rotational=device["rotational"], + removable=_safe_str_to_bool(device["removable"]), + rotational=_safe_str_to_bool(device["rotational"]), size=device["size"], description=disk_descriptions.get(device_name, ""), ) diff --git a/python/cm/collect_statistics/types.py b/python/cm/collect_statistics/types.py index a31d0a424f..58655c54e9 100644 --- a/python/cm/collect_statistics/types.py +++ b/python/cm/collect_statistics/types.py @@ -53,8 +53,8 @@ def decode(self, data: T) -> T: class HostDeviceFacts(TypedDict): name: str - removable: str - rotational: str + removable: bool + rotational: bool size: str description: str From 2d1bfd6ac5d10262660cd232766a77d4b4dbf2fa Mon Sep 17 00:00:00 2001 From: Egor Araslanov Date: Wed, 11 Sep 2024 09:41:38 +0500 Subject: [PATCH 65/98] ADCM-5693 Bump version of data format --- python/cm/management/commands/collect_statistics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/cm/management/commands/collect_statistics.py b/python/cm/management/commands/collect_statistics.py index 01b354d0ed..160cb51af7 100644 --- a/python/cm/management/commands/collect_statistics.py +++ b/python/cm/management/commands/collect_statistics.py @@ -107,7 +107,7 @@ def handle(self, *_, mode: str, **__): "version": settings.ADCM_VERSION, "is_internal": is_internal(), }, - "format_version": 0.2, + "format_version": 0.3, } logger.debug(msg="Statistics collector: RBAC data preparation") rbac_entries_data: dict = RBACCollector(date_format=DATE_TIME_FORMAT)().model_dump() From b9cfc3760bb7ab45746f9e910f5fe4b16ee2b9bc Mon Sep 17 00:00:00 2001 From: Aleksandr Alferov Date: Wed, 18 Sep 2024 23:25:14 +0300 Subject: [PATCH 66/98] Bump ADCM dev version --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 209922ab3d..9a34ff5493 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ APP_IMAGE ?= hub.adsw.io/adcm/adcm APP_TAG ?= $(subst /,_,$(BRANCH_NAME)) SELENOID_HOST ?= 10.92.2.65 SELENOID_PORT ?= 4444 -ADCM_VERSION = "2.3.0-dev" +ADCM_VERSION = "2.4.0-dev" PY_FILES = python dev/linters conf/adcm/python_scripts .PHONY: help From 1cd9314ba21ca24235e1eff76d33c634e2608425 Mon Sep 17 00:00:00 2001 From: Aleksandr Alferov Date: Thu, 19 Sep 2024 07:38:37 +0000 Subject: [PATCH 67/98] ADCM-5967 Fix query builder for remove hosts from config groups --- python/api_v2/tests/test_group_config.py | 36 ++++++++- python/api_v2/tests/test_mapping.py | 97 ++++++++++-------------- python/cm/services/host_group_common.py | 42 +++++----- 3 files changed, 96 insertions(+), 79 deletions(-) diff --git a/python/api_v2/tests/test_group_config.py b/python/api_v2/tests/test_group_config.py index 729e51d152..0cede5d624 100644 --- a/python/api_v2/tests/test_group_config.py +++ b/python/api_v2/tests/test_group_config.py @@ -72,7 +72,6 @@ def setUp(self) -> None: object_type=ContentType.objects.get_for_model(self.service_2), object_id=self.service_2.pk, ) - self.service_1_group_config.hosts.add(self.host) self.host_for_service = self.add_host( bundle=self.provider_bundle, provider=self.provider, fqdn="host_for_service" ) @@ -87,7 +86,10 @@ def setUp(self) -> None: self.component_2 = ServiceComponent.objects.get( cluster=self.cluster_1, service=self.service_1, prototype__name="component_2" ) - self.set_hostcomponent(cluster=self.cluster_1, entries=[(self.host_for_service, self.component_1)]) + self.set_hostcomponent( + cluster=self.cluster_1, entries=[(self.host, self.component_1), (self.host_for_service, self.component_1)] + ) + self.service_1_group_config.hosts.add(self.host) class TestGroupConfigNaming(BaseServiceGroupConfigTestCase): @@ -639,13 +641,15 @@ def setUp(self) -> None: object_type=ContentType.objects.get_for_model(self.component_2), object_id=self.component_2.pk, ) - self.component_1_group_config.hosts.add(self.host) self.host_for_component = self.add_host( bundle=self.provider_bundle, provider=self.provider, fqdn="host_for_component" ) self.add_host_to_cluster(cluster=self.cluster_1, host=self.host_for_component) - self.set_hostcomponent(cluster=self.cluster_1, entries=[(self.host_for_component, self.component_1)]) + self.set_hostcomponent( + cluster=self.cluster_1, entries=[(self.host, self.component_1), (self.host_for_component, self.component_1)] + ) + self.component_1_group_config.hosts.add(self.host) def test_list_success(self): response = self.client.v2[self.component_1, CONFIG_GROUPS].get() @@ -856,6 +860,30 @@ def test_permissions_another_object_role_list_denied(self): self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) + def test_adcm_5967_move_host_in_context_of_one_service(self): + self.assertEqual(self.component_1_group_config.hosts.count(), 1) + self.assertEqual(self.service_1_group_config.hosts.count(), 1) + + self.set_hostcomponent( + cluster=self.cluster_1, entries=[(self.host_for_component, self.component_1), (self.host, self.component_2)] + ) + + self.assertEqual(self.component_1_group_config.hosts.count(), 0) + self.assertEqual(self.service_1_group_config.hosts.count(), 1) + self.assertListEqual(list(self.service_1_group_config.hosts.all()), [self.host]) + + def test_adcm_5967_remove_host_from_service(self): + self.assertEqual(self.component_1_group_config.hosts.count(), 1) + self.assertEqual(self.service_1_group_config.hosts.count(), 1) + + self.set_hostcomponent( + cluster=self.cluster_1, + entries=[(self.host_for_component, self.component_1), (self.host_for_component, self.component_2)], + ) + + self.assertEqual(self.component_1_group_config.hosts.count(), 0) + self.assertEqual(self.service_1_group_config.hosts.count(), 0) + class TestHostProviderGroupConfig(BaseAPITestCase): def setUp(self) -> None: diff --git a/python/api_v2/tests/test_mapping.py b/python/api_v2/tests/test_mapping.py index 00d328038f..b5c2020bad 100644 --- a/python/api_v2/tests/test_mapping.py +++ b/python/api_v2/tests/test_mapping.py @@ -24,7 +24,6 @@ ServiceComponent, Upgrade, ) -from rest_framework.response import Response from rest_framework.status import ( HTTP_200_OK, HTTP_201_CREATED, @@ -268,7 +267,7 @@ def test_host_not_in_cluster_fail(self): prototype__name="component_1", service=service_no_requires, cluster=self.cluster ) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": component_1.pk}, {"hostId": self.host_not_in_cluster.pk, "componentId": component_1.pk}, @@ -295,7 +294,7 @@ def test_foreign_host_fail(self): prototype__name="component_1", service=service_no_requires, cluster=self.cluster ) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": component_1.pk}, {"hostId": self.foreign_host.pk, "componentId": component_1.pk}, @@ -322,7 +321,7 @@ def test_non_existent_host_fail(self): ) non_existent_host_pk = self.get_non_existent_pk(model=Host) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": component_1.pk}, {"hostId": non_existent_host_pk, "componentId": component_1.pk}, @@ -351,7 +350,7 @@ def test_non_existent_component_fail(self): ) non_existent_component_pk = self.get_non_existent_pk(model=ServiceComponent) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": component_1.pk}, {"hostId": self.host_1.pk, "componentId": non_existent_component_pk}, @@ -379,7 +378,7 @@ def test_no_required_service_fail(self): prototype__name="component_1", service=service_requires_service, cluster=self.cluster ) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": component_1.pk}, ], @@ -406,7 +405,7 @@ def test_required_service_success(self): # required service must be added (not exactly mapped) on mapping save self.add_services_to_cluster(service_names=["service_required"], cluster=self.cluster).get() - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[{"hostId": self.host_1.pk, "componentId": component_1.pk}], ) @@ -431,7 +430,7 @@ def test_no_required_component_fail(self): cluster=self.cluster, ) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": component_1.pk}, {"hostId": self.host_1.pk, "componentId": not_required_component.pk}, @@ -471,7 +470,7 @@ def test_no_required_component_but_unrequired_component_present_fail(self): cluster=self.cluster, ) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": component_1.pk}, {"hostId": self.host_1.pk, "componentId": not_required_component.pk}, @@ -511,7 +510,7 @@ def test_required_component_success(self): cluster=self.cluster, ) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": component_1.pk}, {"hostId": self.host_1.pk, "componentId": required_component.pk}, @@ -531,7 +530,7 @@ def test_no_bound_fail(self): cluster=self.cluster, ) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": bound_component.pk}, ], @@ -571,7 +570,7 @@ def test_bound_on_different_host_fail(self): cluster=self.cluster, ) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": bound_component.pk}, {"hostId": self.host_2.pk, "componentId": bound_target_component.pk}, @@ -613,7 +612,7 @@ def test_bound_success(self): cluster=self.cluster, ) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": bound_component.pk}, {"hostId": self.host_1.pk, "componentId": bound_target_component.pk}, @@ -633,7 +632,7 @@ def test_one_constraint_zero_in_hc_fail(self): cluster=self.cluster, ) - response: Response = self.client.v2[self.cluster, "mapping"].post(data=[]) + response = self.client.v2[self.cluster, "mapping"].post(data=[]) self.assertEqual(response.status_code, HTTP_409_CONFLICT) self.assertDictEqual( @@ -659,7 +658,7 @@ def test_one_constraint_two_in_hc_fail(self): cluster=self.cluster, ) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": component.pk}, {"hostId": self.host_2.pk, "componentId": component.pk}, @@ -690,7 +689,7 @@ def test_one_constraint_success(self): cluster=self.cluster, ) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[{"hostId": self.host_1.pk, "componentId": component.pk}], ) @@ -707,7 +706,7 @@ def test_zero_one_constraint_two_in_hc_fail(self): cluster=self.cluster, ) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": component.pk}, {"hostId": self.host_2.pk, "componentId": component.pk}, @@ -740,7 +739,7 @@ def test_zero_one_constraint_success(self): for data in ([], [{"hostId": self.host_1.pk, "componentId": component.pk}]): with self.subTest(f"[0,1] constraint, data: {data}"): - response: Response = self.client.v2[self.cluster, "mapping"].post(data=data) + response = self.client.v2[self.cluster, "mapping"].post(data=data) self.assertEqual(response.status_code, HTTP_201_CREATED) self.assertEqual(HostComponent.objects.count(), len(data)) @@ -768,7 +767,7 @@ def test_one_two_constraint_fail(self): ], ): with self.subTest(f"[1,2] constraint, data: {data}"): - response: Response = self.client.v2[self.cluster, "mapping"].post(data=data) + response = self.client.v2[self.cluster, "mapping"].post(data=data) self.assertEqual(response.status_code, HTTP_409_CONFLICT) self.assertDictEqual( @@ -799,7 +798,7 @@ def test_one_two_constraint_success(self): ], ): with self.subTest(f"[1,2] constraint, data: {data}"): - response: Response = self.client.v2[self.cluster, "mapping"].post(data=data) + response = self.client.v2[self.cluster, "mapping"].post(data=data) self.assertEqual(response.status_code, HTTP_201_CREATED) self.assertEqual(HostComponent.objects.count(), len(data)) @@ -826,7 +825,7 @@ def test_one_odd_first_variant_constraint_fail(self): ], ): with self.subTest(f"[1,odd] constraint, data: {data}"): - response: Response = self.client.v2[self.cluster, "mapping"].post(data=data) + response = self.client.v2[self.cluster, "mapping"].post(data=data) self.assertEqual(response.status_code, HTTP_409_CONFLICT) self.assertDictEqual( @@ -858,7 +857,7 @@ def test_one_odd_first_variant_constraint_success(self): ], ): with self.subTest(f"[1,odd] constraint, data: {data}"): - response: Response = self.client.v2[self.cluster, "mapping"].post(data=data) + response = self.client.v2[self.cluster, "mapping"].post(data=data) self.assertEqual(response.status_code, HTTP_201_CREATED) self.assertEqual(HostComponent.objects.count(), len(data)) @@ -884,7 +883,7 @@ def test_one_odd_second_variant_constraint_fail(self): ], ): with self.subTest(f"[odd] constraint, data: {data}"): - response: Response = self.client.v2[self.cluster, "mapping"].post(data=data) + response = self.client.v2[self.cluster, "mapping"].post(data=data) self.assertEqual(response.status_code, HTTP_409_CONFLICT) self.assertDictEqual( @@ -916,7 +915,7 @@ def test_one_odd_second_variant_constraint_success(self): ], ): with self.subTest(f"[odd] constraint, data: {data}"): - response: Response = self.client.v2[self.cluster, "mapping"].post(data=data) + response = self.client.v2[self.cluster, "mapping"].post(data=data) self.assertEqual(response.status_code, HTTP_201_CREATED) self.assertEqual(HostComponent.objects.count(), len(data)) @@ -931,7 +930,7 @@ def test_zero_odd_constraint_fail(self): cluster=self.cluster, ) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": component.pk}, {"hostId": self.host_2.pk, "componentId": component.pk}, @@ -970,7 +969,7 @@ def test_zero_odd_constraint_success(self): ], ): with self.subTest(f"[0,odd], data: {data}"): - response: Response = self.client.v2[self.cluster, "mapping"].post(data=data) + response = self.client.v2[self.cluster, "mapping"].post(data=data) self.assertEqual(response.status_code, HTTP_201_CREATED) self.assertEqual(HostComponent.objects.count(), len(data)) @@ -985,7 +984,7 @@ def test_one_plus_constraint_fail(self): cluster=self.cluster, ) - response: Response = self.client.v2[self.cluster, "mapping"].post(data=[]) + response = self.client.v2[self.cluster, "mapping"].post(data=[]) self.assertEqual(response.status_code, HTTP_409_CONFLICT) self.assertDictEqual( @@ -1022,7 +1021,7 @@ def test_one_plus_constraint_success(self): ], ): with self.subTest(f"[1,+], data: {data}"): - response: Response = self.client.v2[self.cluster, "mapping"].post(data=data) + response = self.client.v2[self.cluster, "mapping"].post(data=data) self.assertEqual(response.status_code, HTTP_201_CREATED) self.assertEqual(HostComponent.objects.count(), len(data)) @@ -1037,7 +1036,7 @@ def test_plus_constraint_fail(self): cluster=self.cluster, ) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": component.pk}, {"hostId": self.host_2.pk, "componentId": component.pk}, @@ -1067,7 +1066,7 @@ def test_plus_constraint_success(self): ) data = [{"hostId": host.pk, "componentId": component.pk} for host in self.cluster.host_set.all()] - response: Response = self.client.v2[self.cluster, "mapping"].post(data=data) + response = self.client.v2[self.cluster, "mapping"].post(data=data) self.assertEqual(response.status_code, HTTP_201_CREATED) self.assertEqual(HostComponent.objects.count(), len(data)) @@ -1086,7 +1085,7 @@ def test_no_required_service_not_in_hc_fail(self): prototype__name="component_1", service=service_no_requires, cluster=self.cluster ) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[{"hostId": self.host_1.pk, "componentId": component_1.pk}], ) @@ -1115,7 +1114,7 @@ def test_host_in_mm_fail(self): self.host_1.maintenance_mode = MaintenanceMode.ON self.host_1.save(update_fields=["maintenance_mode"]) - response: Response = self.client.v2[self.cluster, "mapping"].post( + response = self.client.v2[self.cluster, "mapping"].post( data=[ {"hostId": self.host_1.pk, "componentId": component_1.pk}, {"hostId": self.host_2.pk, "componentId": component_1.pk}, @@ -1234,7 +1233,7 @@ def _prepare_config_group_via_api( def test_host_removed_from_component_group_config_on_mapping_change(self): mapping_data = [{"hostId": self.host_1.pk, "componentId": self.component_1_from_s1.pk}] - response: Response = self.client.v2[self.cluster_1, "mapping"].post(data=mapping_data) + response = self.client.v2[self.cluster_1, "mapping"].post(data=mapping_data) self.assertEqual(response.status_code, HTTP_201_CREATED) group_config = self._prepare_config_group_via_api( @@ -1242,7 +1241,7 @@ def test_host_removed_from_component_group_config_on_mapping_change(self): ) mapping_data[0].update({"componentId": self.component_2_from_s1.pk}) - response: Response = self.client.v2[self.cluster_1, "mapping"].post(data=mapping_data) + response = self.client.v2[self.cluster_1, "mapping"].post(data=mapping_data) self.assertEqual(response.status_code, HTTP_201_CREATED) group_config.refresh_from_db() @@ -1252,7 +1251,7 @@ def test_host_not_removed_from_component_group_config_on_mapping_remain(self): endpoint = self.client.v2[self.cluster_1, "mapping"] mapping_data = [{"hostId": self.host_1.pk, "componentId": self.component_1_from_s1.pk}] - response: Response = endpoint.post(data=mapping_data) + response = endpoint.post(data=mapping_data) self.assertEqual(response.status_code, HTTP_201_CREATED) group_config = self._prepare_config_group_via_api( @@ -1260,34 +1259,16 @@ def test_host_not_removed_from_component_group_config_on_mapping_remain(self): ) mapping_data.append({"hostId": self.host_2.pk, "componentId": self.component_2_from_s1.pk}) - response: Response = endpoint.post(data=mapping_data) + response = endpoint.post(data=mapping_data) self.assertEqual(response.status_code, HTTP_201_CREATED) group_config.refresh_from_db() self.assertSetEqual(set(group_config.hosts.values_list("pk", flat=True)), {self.host_1.pk}) - def test_host_removed_from_service_group_config_on_mapping_change(self): - endpoint = self.client.v2[self.cluster_1] / "mapping" - mapping_data = [{"hostId": self.host_1.pk, "componentId": self.component_1_from_s1.pk}] - - response: Response = endpoint.post(data=mapping_data) - self.assertEqual(response.status_code, HTTP_201_CREATED) - - group_config = self._prepare_config_group_via_api( - obj=self.service_1, hosts=[self.host_1], name="service config group" - ) - - mapping_data[0].update({"componentId": self.component_2_from_s1.pk}) - response: Response = endpoint.post(data=mapping_data) - self.assertEqual(response.status_code, HTTP_201_CREATED) - - group_config.refresh_from_db() - self.assertEqual(group_config.hosts.count(), 0) - def test_host_not_removed_from_service_group_config_on_mapping_remain(self): mapping_data = [{"hostId": self.host_1.pk, "componentId": self.component_1_from_s1.pk}] - response: Response = self.client.v2[self.cluster_1, "mapping"].post(data=mapping_data) + response = self.client.v2[self.cluster_1, "mapping"].post(data=mapping_data) self.assertEqual(response.status_code, HTTP_201_CREATED) group_config = self._prepare_config_group_via_api( @@ -1295,7 +1276,7 @@ def test_host_not_removed_from_service_group_config_on_mapping_remain(self): ) mapping_data.insert(0, {"hostId": self.host_2.pk, "componentId": self.component_2_from_s1.pk}) - response: Response = self.client.v2[self.cluster_1, "mapping"].post(data=mapping_data) + response = self.client.v2[self.cluster_1, "mapping"].post(data=mapping_data) self.assertEqual(response.status_code, HTTP_201_CREATED) group_config.refresh_from_db() @@ -1304,7 +1285,7 @@ def test_host_not_removed_from_service_group_config_on_mapping_remain(self): def test_host_not_removed_from_cluster_group_config_on_mapping_change(self): mapping_data = [{"hostId": self.host_1.pk, "componentId": self.component_1_from_s1.pk}] - response: Response = self.client.v2[self.cluster_1, "mapping"].post(data=mapping_data) + response = self.client.v2[self.cluster_1, "mapping"].post(data=mapping_data) self.assertEqual(response.status_code, HTTP_201_CREATED) group_config = self._prepare_config_group_via_api( @@ -1312,7 +1293,7 @@ def test_host_not_removed_from_cluster_group_config_on_mapping_change(self): ) mapping_data[0].update({"componentId": self.component_2_from_s1.pk}) - response: Response = self.client.v2[self.cluster_1, "mapping"].post(data=mapping_data) + response = self.client.v2[self.cluster_1, "mapping"].post(data=mapping_data) self.assertEqual(response.status_code, HTTP_201_CREATED) group_config.refresh_from_db() diff --git a/python/cm/services/host_group_common.py b/python/cm/services/host_group_common.py index 028c4a2e84..30fa2d0e5f 100644 --- a/python/cm/services/host_group_common.py +++ b/python/cm/services/host_group_common.py @@ -29,23 +29,31 @@ def remove_unmapped_hosts_from_groups(self, unmapped_hosts: MovedHosts) -> None: if not (unmapped_hosts.services or unmapped_hosts.components): return - hosts_in_service_groups = Q( - Q(**{f"{self.group_hosts_field_name}__object_type": ContentType.objects.get_for_model(ClusterObject)}), - self._combine_with_or( - Q(host_id__in=hosts, **{f"{self.group_hosts_field_name}__object_id": service_id}) - for service_id, hosts in unmapped_hosts.services.items() - ), - ) - - hosts_in_component_groups = Q( - Q(**{f"{self.group_hosts_field_name}__object_type": ContentType.objects.get_for_model(ServiceComponent)}), - self._combine_with_or( - Q(host_id__in=hosts, **{f"{self.group_hosts_field_name}__object_id": component_id}) - for component_id, hosts in unmapped_hosts.components.items() - ), - ) - - self.group_hosts_model.objects.filter(hosts_in_service_groups | hosts_in_component_groups).delete() + hosts_in_service_groups, hosts_in_component_groups = Q(), Q() + object_type = f"{self.group_hosts_field_name}__object_type" + object_id = f"{self.group_hosts_field_name}__object_id" + + if unmapped_hosts.services: + hosts_in_service_groups = Q( + Q(**{object_type: ContentType.objects.get_for_model(ClusterObject)}), + self._combine_with_or( + Q(host_id__in=hosts, **{object_id: service_id}) + for service_id, hosts in unmapped_hosts.services.items() + ), + ) + + if unmapped_hosts.components: + hosts_in_component_groups = Q( + Q(**{object_type: ContentType.objects.get_for_model(ServiceComponent)}), + self._combine_with_or( + Q(host_id__in=hosts, **{object_id: component_id}) + for component_id, hosts in unmapped_hosts.components.items() + ), + ) + + self.group_hosts_model.objects.filter( + self._combine_with_or(filter(bool, [hosts_in_service_groups, hosts_in_component_groups])) + ).delete() def _combine_with_or(self, clauses: Iterable[Q]) -> Q: return reduce(or_, clauses, Q()) From 6698011bf41811dafa4014adc25685ab3d766650 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Thu, 19 Sep 2024 08:02:53 +0000 Subject: [PATCH 68/98] ADCM-5965 Rework cluster mapping save AND some of concern checks --- python/adcm/tests/base.py | 17 +- .../executors/change_maintenance_mode.py | 4 +- .../executors/delete_service.py | 27 +- .../ansible_plugin/executors/hostcomponent.py | 13 +- python/api/cluster/serializers.py | 28 +- python/api/tests/test_api.py | 58 ++- python/api/tests/test_service.py | 42 -- python/api_v2/cluster/data_containers.py | 152 ------ python/api_v2/cluster/utils.py | 361 +------------ python/api_v2/cluster/views.py | 40 +- python/api_v2/service/utils.py | 4 +- .../bundles/bugs/ADCM-5965/new/config.yaml | 37 ++ .../bundles/bugs/ADCM-5965/old/config.yaml | 12 + python/api_v2/tests/test_mapping.py | 44 +- python/api_v2/tests/test_upgrade.py | 31 +- python/cm/api.py | 162 +----- python/cm/issue.py | 51 +- python/cm/services/cluster.py | 12 +- python/cm/services/concern/_operaitons.py | 16 +- python/cm/services/concern/cases.py | 8 +- python/cm/services/concern/checks.py | 484 +++--------------- python/cm/services/concern/distribution.py | 30 +- python/cm/services/concern/locks.py | 13 + python/cm/services/concern/repo.py | 82 +++ python/cm/services/job/action.py | 50 +- python/cm/services/job/checks.py | 114 ++--- python/cm/services/job/inventory/_base.py | 7 +- .../cm/services/job/run/_target_factories.py | 15 +- .../cm/services/job/run/_task_finalizers.py | 35 +- python/cm/services/mapping.py | 177 +++++++ python/cm/services/status/notify.py | 4 +- python/cm/status_api.py | 6 +- python/cm/tests/test_action.py | 18 +- python/cm/tests/test_cluster.py | 6 +- python/cm/tests/test_hc.py | 10 +- .../cm/tests/test_inventory/test_inventory.py | 29 +- python/cm/tests/test_requires.py | 17 +- python/cm/tests/test_upgrade.py | 11 +- python/cm/upgrade.py | 19 +- python/core/cluster/operations.py | 31 ++ python/core/cluster/types.py | 13 +- python/core/concern/__init__.py | 11 + python/core/concern/checks/__init__.py | 16 + python/core/concern/checks/_mapping.py | 188 +++++++ .../core/concern/checks/_service_requires.py | 48 ++ python/core/concern/types.py | 88 ++++ python/core/converters.py | 20 + python/rbac/tests/test_policy/test_rbac.py | 81 +-- 48 files changed, 1237 insertions(+), 1505 deletions(-) delete mode 100644 python/api_v2/cluster/data_containers.py create mode 100644 python/api_v2/tests/bundles/bugs/ADCM-5965/new/config.yaml create mode 100644 python/api_v2/tests/bundles/bugs/ADCM-5965/old/config.yaml create mode 100644 python/cm/services/concern/repo.py create mode 100644 python/cm/services/mapping.py create mode 100644 python/core/concern/__init__.py create mode 100644 python/core/concern/checks/__init__.py create mode 100644 python/core/concern/checks/_mapping.py create mode 100644 python/core/concern/checks/_service_requires.py create mode 100644 python/core/concern/types.py create mode 100644 python/core/converters.py diff --git a/python/adcm/tests/base.py b/python/adcm/tests/base.py index 9002adcf1c..b662705132 100644 --- a/python/adcm/tests/base.py +++ b/python/adcm/tests/base.py @@ -23,7 +23,7 @@ from api_v2.generic.config.utils import convert_adcm_meta_to_attr, convert_attr_to_adcm_meta from api_v2.prototype.utils import accept_license from api_v2.service.utils import bulk_add_services_to_cluster -from cm.api import add_cluster, add_hc, add_host, add_host_provider, add_host_to_cluster, update_obj_config +from cm.api import add_cluster, add_host, add_host_provider, add_host_to_cluster, update_obj_config from cm.bundle import prepare_bundle, process_file from cm.converters import orm_object_to_core_type from cm.models import ( @@ -45,7 +45,9 @@ ServiceComponent, ) from cm.services.job.prepare import prepare_task_for_action +from cm.services.mapping import change_host_component_mapping from cm.utils import deep_merge +from core.cluster.types import HostComponentEntry from core.job.dto import TaskPayloadDTO from core.job.types import Task from core.rbac.dto import UserCreateDTO @@ -488,13 +490,14 @@ def add_services_to_cluster(service_names: list[str], cluster: Cluster) -> Query @staticmethod def set_hostcomponent(cluster: Cluster, entries: Iterable[tuple[Host, ServiceComponent]]) -> list[HostComponent]: - return add_hc( - cluster=cluster, - hc_in=[ - {"host_id": host.pk, "component_id": component.pk, "service_id": component.service_id} - for host, component in entries - ], + change_host_component_mapping( + cluster_id=cluster.id, + bundle_id=cluster.bundle_id, + flat_mapping=( + HostComponentEntry(host_id=host.id, component_id=component.id) for host, component in entries + ), ) + return list(HostComponent.objects.filter(cluster_id=cluster.id)) @staticmethod def get_non_existent_pk(model: type[ADCMEntity | ADCMModel | User | Role | Group | Policy]): diff --git a/python/ansible_plugin/executors/change_maintenance_mode.py b/python/ansible_plugin/executors/change_maintenance_mode.py index adab145a5d..e5c272a777 100644 --- a/python/ansible_plugin/executors/change_maintenance_mode.py +++ b/python/ansible_plugin/executors/change_maintenance_mode.py @@ -14,7 +14,7 @@ from typing import Any, Collection from cm.models import Host, MaintenanceMode -from cm.services.cluster import retrieve_clusters_topology +from cm.services.cluster import retrieve_cluster_topology from cm.services.concern.distribution import redistribute_issues_and_flags from cm.services.status.notify import reset_objects_in_mm from cm.status_api import send_object_update_event @@ -100,7 +100,7 @@ def __call__( if not value: # In terms of concerns CHANGING and ON is the same, # so recalculation is required only for turning it OFF - redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((target_object.cluster_id,)))) + redistribute_issues_and_flags(topology=retrieve_cluster_topology(target_object.cluster_id)) with suppress(Exception): send_object_update_event(object_=target_object, changes={"maintenanceMode": target_object.maintenance_mode}) diff --git a/python/ansible_plugin/executors/delete_service.py b/python/ansible_plugin/executors/delete_service.py index 505e93da5d..18d2b0e505 100644 --- a/python/ansible_plugin/executors/delete_service.py +++ b/python/ansible_plugin/executors/delete_service.py @@ -12,8 +12,10 @@ from typing import Collection -from cm.api import delete_service, save_hc -from cm.models import ClusterBind, ClusterObject, HostComponent +from cm.api import delete_service +from cm.models import ClusterBind, ClusterObject, HostComponent, Prototype +from cm.services.mapping import change_host_component_mapping +from core.cluster.types import HostComponentEntry from core.types import ADCMCoreType, CoreObjectDescriptor from django.db.transaction import atomic @@ -67,15 +69,18 @@ def __call__( ) with atomic(): - # clean up hc - new_hc_list = [ - (hostcomponent.service, hostcomponent.host, hostcomponent.component) - for hostcomponent in HostComponent.objects.filter(cluster=service.cluster) - .exclude(service=service) - .select_related("host", "service", "component") - .order_by("id") - ] - save_hc(service.cluster, new_hc_list) + bundle_id = Prototype.objects.values_list("bundle_id", flat=True).get(id=service.prototype_id) + change_host_component_mapping( + cluster_id=service.cluster_id, + bundle_id=bundle_id, + flat_mapping=( + HostComponentEntry(**entry) + for entry in HostComponent.objects.values("host_id", "component_id") + .filter(cluster=service.cluster) + .exclude(service=service) + ), + skip_checks=True, + ) # remove existing binds ClusterBind.objects.filter(source_service=service).delete() diff --git a/python/ansible_plugin/executors/hostcomponent.py b/python/ansible_plugin/executors/hostcomponent.py index 321f95a7d0..00110a5393 100644 --- a/python/ansible_plugin/executors/hostcomponent.py +++ b/python/ansible_plugin/executors/hostcomponent.py @@ -12,8 +12,10 @@ from typing import Any, Collection, Literal -from cm.api import add_hc, get_hc +from cm.api import get_hc from cm.models import Cluster, Host, JobLog, ServiceComponent +from cm.services.mapping import change_host_component_mapping +from core.cluster.types import HostComponentEntry from core.types import ADCMCoreType, CoreObjectDescriptor from pydantic import field_validator @@ -121,6 +123,13 @@ def __call__( hostcomponent.remove(item) - add_hc(cluster, hostcomponent) + change_host_component_mapping( + cluster_id=cluster.id, + bundle_id=cluster.prototype.bundle_id, + flat_mapping=( + HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) + for entry in hostcomponent + ), + ) return CallResult(value=None, changed=True, error=None) diff --git a/python/api/cluster/serializers.py b/python/api/cluster/serializers.py index d830d7b2d2..86a6bd3ca6 100644 --- a/python/api/cluster/serializers.py +++ b/python/api/cluster/serializers.py @@ -12,14 +12,16 @@ from adcm.serializers import EmptySerializer from cm.adcm_config.config import get_main_info -from cm.api import add_cluster, add_hc, bind, multi_bind +from cm.api import add_cluster, bind, multi_bind from cm.errors import AdcmEx from cm.issue import update_hierarchy_issues -from cm.models import Action, Cluster, Host, Prototype, ServiceComponent +from cm.models import Action, Cluster, Host, HostComponent, Prototype, ServiceComponent from cm.schemas import RequiresUISchema +from cm.services.mapping import change_host_component_mapping from cm.status_api import get_cluster_status, get_hc_status from cm.upgrade import get_upgrade from cm.validators import ClusterUniqueValidator, StartMidEndValidator +from core.cluster.types import HostComponentEntry from django.conf import settings from rest_framework.exceptions import ValidationError from rest_framework.serializers import ( @@ -310,6 +312,8 @@ def validate_hc(hostcomponent): if not isinstance(hostcomponent, list): raise AdcmEx("INVALID_INPUT", "hc field should be a list") + added = set() + for item in hostcomponent: for key in ("component_id", "host_id", "service_id"): if key not in item: @@ -317,12 +321,30 @@ def validate_hc(hostcomponent): raise AdcmEx("INVALID_INPUT", msg.format(key)) + entry = tuple(item.values()) + if entry in added: + raise AdcmEx( + "INVALID_INPUT", + msg=f"duplicated entry: {item}", + ) + + added.add(entry) + return hostcomponent def create(self, validated_data): hostcomponent = validated_data.get("hc") + new_mapping_entries = tuple( + HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) for entry in hostcomponent + ) + + cluster = self.context.get("cluster") + + change_host_component_mapping( + cluster_id=cluster.id, bundle_id=cluster.prototype.bundle_id, flat_mapping=new_mapping_entries + ) - return add_hc(self.context.get("cluster"), hostcomponent) + return HostComponent.objects.filter(cluster_id=cluster.id) class HCComponentSerializer(ComponentShortSerializer): diff --git a/python/api/tests/test_api.py b/python/api/tests/test_api.py index deecbfbf90..403deee6f6 100755 --- a/python/api/tests/test_api.py +++ b/python/api/tests/test_api.py @@ -10,11 +10,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Iterable from unittest.mock import patch from uuid import uuid4 from adcm.tests.base import APPLICATION_JSON, BaseTestCase -from cm.api import save_hc from cm.hierarchy import Tree from cm.issue import lock_affected_objects from cm.models import ( @@ -28,6 +28,7 @@ Prototype, ServiceComponent, ) +from cm.services.mapping import change_host_component_mapping from cm.tests.utils import ( gen_adcm, gen_component, @@ -39,6 +40,7 @@ gen_service, gen_task_log, ) +from core.cluster.types import HostComponentEntry from django.urls import reverse from django.utils import timezone from rest_framework.response import Response @@ -505,13 +507,13 @@ def test_hostcomponent(self): adh_bundle_id, cluster_proto = self.get_cluster_proto_id() ssh_bundle_id, _, host_id = self.get_host_in_cluster(self.host) service_proto_id = self.get_service_proto_id() - response: Response = self.client.post( + response = self.client.post( path=reverse(viewname="v1:cluster"), data={"name": self.cluster, "prototype_id": cluster_proto}, ) cluster_id = response.json()["id"] - response: Response = self.client.post( + response = self.client.post( path=reverse(viewname="v1:service", kwargs={"cluster_id": cluster_id}), data={"prototype_id": service_proto_id}, ) @@ -521,41 +523,41 @@ def test_hostcomponent(self): service_id = response.json()["id"] hc_url = reverse(viewname="v1:host-component", kwargs={"cluster_id": cluster_id}) - response: Response = self.client.post(hc_url, {"hc": {}}, content_type=APPLICATION_JSON) + response = self.client.post(hc_url, {"hc": {}}, content_type=APPLICATION_JSON) self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) self.assertEqual(response.json()["code"], "INVALID_INPUT") self.assertEqual(response.json()["desc"], "hc field is required") comp_id = self.get_component_id(cluster_id, service_id, self.component) - response: Response = self.client.post( + response = self.client.post( hc_url, {"hc": [{"service_id": service_id, "host_id": 100500, "component_id": comp_id}]}, content_type=APPLICATION_JSON, ) - self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + self.assertEqual(response.status_code, HTTP_409_CONFLICT) self.assertEqual(response.json()["code"], "HOST_NOT_FOUND") - response: Response = self.client.post( + response = self.client.post( hc_url, {"hc": [{"service_id": service_id, "host_id": host_id, "component_id": 100500}]}, content_type=APPLICATION_JSON, ) - self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) + self.assertEqual(response.status_code, HTTP_409_CONFLICT) self.assertEqual(response.json()["code"], "COMPONENT_NOT_FOUND") - response: Response = self.client.post( + response = self.client.post( hc_url, {"hc": [{"service_id": service_id, "host_id": host_id, "component_id": comp_id}]}, content_type=APPLICATION_JSON, ) self.assertEqual(response.status_code, HTTP_409_CONFLICT) - self.assertEqual(response.json()["code"], "FOREIGN_HOST") + self.assertEqual(response.json()["code"], "HOST_NOT_FOUND") - response: Response = self.client.post( + response = self.client.post( path=reverse(viewname="v1:host", kwargs={"cluster_id": cluster_id}), data={"host_id": host_id}, content_type=APPLICATION_JSON, @@ -563,7 +565,7 @@ def test_hostcomponent(self): self.assertEqual(response.status_code, HTTP_201_CREATED) - response: Response = self.client.post( + response = self.client.post( hc_url, {"hc": {"host_id": host_id, "component_id": comp_id}}, content_type=APPLICATION_JSON, @@ -573,7 +575,7 @@ def test_hostcomponent(self): self.assertEqual(response.json()["code"], "INVALID_INPUT") self.assertEqual(response.json()["desc"], "hc field should be a list") - response: Response = self.client.post( + response = self.client.post( hc_url, {"hc": [{"component_id": comp_id}]}, content_type=APPLICATION_JSON, @@ -582,12 +584,12 @@ def test_hostcomponent(self): self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) self.assertEqual(response.json()["code"], "INVALID_INPUT") - response: Response = self.client.post(hc_url, {"hc": [{"host_id": host_id}]}, content_type=APPLICATION_JSON) + response = self.client.post(hc_url, {"hc": [{"host_id": host_id}]}, content_type=APPLICATION_JSON) self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) self.assertEqual(response.json()["code"], "INVALID_INPUT") - response: Response = self.client.post( + response = self.client.post( hc_url, { "hc": [ @@ -638,8 +640,8 @@ def test_hostcomponent(self): content_type=APPLICATION_JSON, ) - self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) - self.assertEqual(response.json()["code"], "CLUSTER_SERVICE_NOT_FOUND") + self.assertEqual(response.status_code, HTTP_409_CONFLICT) + self.assertEqual(response.json()["code"], "COMPONENT_NOT_FOUND") response: Response = self.client.post( path=reverse(viewname="v1:service", kwargs={"cluster_id": cluster_id2}), @@ -658,7 +660,7 @@ def test_hostcomponent(self): ) self.assertEqual(response.status_code, HTTP_409_CONFLICT) - self.assertEqual(response.json()["code"], "FOREIGN_HOST") + self.assertEqual(response.json()["code"], "HOST_NOT_FOUND") response: Response = self.client.delete(f"{hc_url}{str(hs_id)}/") @@ -887,7 +889,19 @@ def setUp(self): state="installed", ) - @patch("cm.api.reset_hc_map") + def save_hc( + self, cluster: Cluster, hc_list: Iterable[tuple[ClusterObject, Host, ServiceComponent]] + ) -> list[HostComponent]: + change_host_component_mapping( + cluster_id=cluster.id, + bundle_id=cluster.bundle_id, + flat_mapping=( + HostComponentEntry(host_id=host.id, component_id=component.id) for (_, host, component) in hc_list + ), + ) + return list(HostComponent.objects.filter(cluster=cluster)) + + @patch("cm.services.mapping.reset_hc_map") def test_save_hc(self, mock_reset_hc_map): cluster_object = ClusterObject.objects.create(prototype=self.service_prototype, cluster=self.cluster) host = Host.objects.create(prototype=self.cluster_prototype, cluster=self.cluster) @@ -911,7 +925,7 @@ def test_save_hc(self, mock_reset_hc_map): ) host_comp_list = [(cluster_object, host, service_component)] - hc_list = save_hc(self.cluster, host_comp_list) + hc_list = self.save_hc(self.cluster, host_comp_list) self.assertListEqual(hc_list, [HostComponent.objects.first()]) @@ -967,7 +981,7 @@ def test_save_hc__big_update__locked_hierarchy( (service, host_1, component_1), (service, host_3, component_2), ] - save_hc(self.cluster, new_hc_list) + self.save_hc(self.cluster, new_hc_list) # refresh due to new instances were updated in save_hc() host_1.refresh_from_db() @@ -1017,7 +1031,7 @@ def test_save_hc__big_update__unlocked_hierarchy(self, mock_update, mock_load): (service, host_1, component_1), (service, host_3, component_2), ] - save_hc(self.cluster, new_hc_list) + self.save_hc(self.cluster, new_hc_list) # refresh due to new instances were updated in save_hc() host_1.refresh_from_db() diff --git a/python/api/tests/test_service.py b/python/api/tests/test_service.py index d0055a6edb..63c8b764a5 100644 --- a/python/api/tests/test_service.py +++ b/python/api/tests/test_service.py @@ -287,48 +287,6 @@ def test_delete_with_action_not_created_state(self): def test_upload_with_cyclic_requires(self): self.upload_and_load_bundle(path=Path(self.base_dir, "python/api/tests/files/bundle_cluster_requires.tar")) - def test_delete_service_with_requires_fail(self): - host = self.get_host(bundle_path="python/api/tests/files/bundle_test_provider_concern.tar") - cluster = self.get_cluster(bundle_path="python/api/tests/files/bundle_cluster_requires.tar") - self.client.post( - path=reverse(viewname="v1:host", kwargs={"cluster_id": cluster.pk}), - data={"host_id": host.pk}, - ) - - service_1_prototype = Prototype.objects.get(name="service_1", type="service") - service_1_response: Response = self.client.post( - path=reverse(viewname="v1:service", kwargs={"cluster_id": cluster.pk}), - data={"prototype_id": service_1_prototype.pk}, - ) - service_1 = ClusterObject.objects.get(pk=service_1_response.data["id"]) - - service_2_prototype = Prototype.objects.get(name="service_2", type="service") - service_2_response: Response = self.client.post( - path=reverse(viewname="v1:service", kwargs={"cluster_id": cluster.pk}), - data={"prototype_id": service_2_prototype.pk}, - ) - service_2 = ClusterObject.objects.get(pk=service_2_response.data["id"]) - - component_2_1 = ServiceComponent.objects.get(service=service_2, prototype__name="component_1") - component_1_1 = ServiceComponent.objects.get(service=service_1, prototype__name="component_1") - - self.client.post( - path=reverse(viewname="v1:host-component", kwargs={"cluster_id": cluster.pk}), - data={ - "hc": [ - {"service_id": service_2.pk, "component_id": component_2_1.pk, "host_id": host.pk}, - {"service_id": service_1.pk, "component_id": component_1_1.pk, "host_id": host.pk}, - ], - }, - content_type=APPLICATION_JSON, - ) - - response: Response = self.client.delete( - path=reverse(viewname="v1:service-details", kwargs={"service_id": service_1.pk}) - ) - - self.assertEqual(response.status_code, HTTP_409_CONFLICT) - def test_delete_required_fail(self): self.service.prototype.required = True self.service.prototype.save(update_fields=["required"]) diff --git a/python/api_v2/cluster/data_containers.py b/python/api_v2/cluster/data_containers.py deleted file mode 100644 index ba31683292..0000000000 --- a/python/api_v2/cluster/data_containers.py +++ /dev/null @@ -1,152 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from dataclasses import dataclass -from functools import cached_property -from typing import Any, Literal - -from cm.data_containers import ( - ClusterData, - ComponentData, - HostComponentData, - HostData, - PrototypeData, - ServiceData, -) -from cm.models import Host - - -@dataclass -class MappingEntryData: - host: HostData - component: ComponentData - service: ServiceData - - -@dataclass -class MappingData: - cluster: ClusterData - services: dict[int, ServiceData] - components: dict[int, ComponentData] - hosts: dict[int, HostData] - prototypes: dict[int, PrototypeData] - mapping: list[MappingEntryData] - existing_mapping: list[HostComponentData] - orm_objects: dict[Literal["hosts", "cluster", "providers"], dict[int, Any] | Any] - not_found_object_ids: dict[Literal["hosts", "components"], set] - existing_services_names: list[str] - - @cached_property - def mapping_difference(self) -> dict[Literal["add", "remove", "remain"], list[MappingEntryData]]: - input_mapping_ids = {(map_.host.id, map_.component.id, map_.service.id) for map_ in self.mapping} - existing_mapping_ids = {(map_.host_id, map_.component_id, map_.service_id) for map_ in self.existing_mapping} - - return { - "add": [ - MappingEntryData( - host=self.hosts[ids[0]], component=self.components[ids[1]], service=self.services[ids[2]] - ) - for ids in input_mapping_ids.difference(existing_mapping_ids) - ], - "remove": [ - MappingEntryData( - host=self.hosts[ids[0]], component=self.components[ids[1]], service=self.services[ids[2]] - ) - for ids in existing_mapping_ids.difference(input_mapping_ids) - ], - "remain": [ - MappingEntryData( - host=self.hosts[ids[0]], component=self.components[ids[1]], service=self.services[ids[2]] - ) - for ids in input_mapping_ids.intersection(existing_mapping_ids) - ], - } - - @cached_property - def mapping_prototypes(self) -> list[dict[Literal["service", "component"], PrototypeData]]: - return [ - { - "service": self.prototypes[map_.service.prototype_id], - "component": self.prototypes[map_.component.prototype_id], - } - for map_ in self.mapping - ] - - @cached_property - def objects_by_prototype_name( - self, - ) -> dict[ - Literal["services", "components"], - dict[str, dict[Literal["object", "prototype"], ServiceData | ComponentData | PrototypeData]], - ]: - return { - "components": { - self.prototypes[obj.prototype_id].name: {"object": obj, "prototype": self.prototypes[obj.prototype_id]} - for obj in self.components.values() - }, - "services": { - self.prototypes[obj.prototype_id].name: {"object": obj, "prototype": self.prototypes[obj.prototype_id]} - for obj in self.services.values() - }, - } - - @cached_property - def added_hosts(self) -> list[Host]: - existing_host_ids = {map_.host_id for map_ in self.existing_mapping} - - return [ - self.orm_objects["hosts"][map_.host.id] for map_ in self.mapping if map_.host.id not in existing_host_ids - ] - - @cached_property - def removed_hosts(self) -> list[Host]: - mapping_host_ids = {map_.host.id for map_ in self.mapping} - - return [ - self.orm_objects["hosts"][map_.host_id] - for map_ in self.existing_mapping - if map_.host_id not in mapping_host_ids - ] - - def entry_prototypes(self, entry: MappingEntryData) -> tuple[PrototypeData, PrototypeData]: - service_prototype = self.prototypes[entry.service.prototype_id] - component_prototype = self.prototypes[entry.component.prototype_id] - - return service_prototype, component_prototype - - def get_bound_entries(self, entry: MappingEntryData) -> list[MappingEntryData]: - _, component_prototype = self.entry_prototypes(entry=entry) - bound_to = component_prototype.bound_to - - bound_targets: list[MappingEntryData] = [] - for mapping_entry in self.mapping: - service_prototype, component_prototype = self.entry_prototypes(entry=mapping_entry) - if all( - ( - service_prototype.name == bound_to.service, - component_prototype.name == bound_to.component, - ) - ): - bound_targets.append(mapping_entry) - - return bound_targets - - def service_components(self, service: ServiceData) -> list[tuple[ComponentData, PrototypeData]]: - service_prototype = self.prototypes[service.prototype_id] - - target_components = [] - for component in self.components.values(): - component_prototype = self.prototypes[component.prototype_id] - if component_prototype.parent_id == service_prototype.id: - target_components.append((component, component_prototype)) - - return target_components diff --git a/python/api_v2/cluster/utils.py b/python/api_v2/cluster/utils.py index b4a79ef9f2..c4c6d3486d 100644 --- a/python/api_v2/cluster/utils.py +++ b/python/api_v2/cluster/utils.py @@ -11,53 +11,9 @@ # limitations under the License. from collections import defaultdict -from itertools import chain -from typing import Literal -from cm.data_containers import ( - ClusterData, - ComponentData, - Empty, - HostComponentData, - HostData, - PrototypeData, - RequiresData, - ServiceData, -) -from cm.errors import AdcmEx -from cm.issue import ( - add_concern_to_object, - remove_concern_from_object, -) -from cm.models import ( - Cluster, - ClusterObject, - ConcernCause, - Host, - HostComponent, - MaintenanceMode, - ObjectType, - Prototype, - ServiceComponent, -) -from cm.services.action_host_group import ActionHostGroupRepo -from cm.services.cluster import retrieve_clusters_topology -from cm.services.concern import delete_issue -from cm.services.concern.checks import extract_data_for_requirements_check, is_constraint_requirements_unsatisfied -from cm.services.concern.distribution import redistribute_issues_and_flags -from cm.services.concern.locks import get_lock_on_object -from cm.services.group_config import ConfigHostGroupRepo -from cm.services.status.notify import reset_hc_map, reset_objects_in_mm -from cm.status_api import send_host_component_map_update_event -from core.cluster.operations import find_hosts_difference -from core.types import ADCMCoreType, CoreObjectDescriptor -from django.contrib.contenttypes.models import ContentType -from django.db.models import QuerySet -from django.db.transaction import atomic, on_commit -from rbac.models import Policy -from rest_framework.status import HTTP_409_CONFLICT +from cm.models import ObjectType, Prototype -from api_v2.cluster.data_containers import MappingData, MappingEntryData from api_v2.prototype.utils import get_license_text @@ -125,318 +81,3 @@ def get_depend_on( get_depend_on(prototype=required_service, depend_on=depend_on, checked_objects=checked_objects) return depend_on - - -def retrieve_mapping_data( - cluster: Cluster, plain_hc: list[dict[Literal["host_id", "component_id"], int]] -) -> MappingData: - mapping_data = { - "cluster": ClusterData.model_validate(obj=cluster), - "services": {}, - "components": {}, - "hosts": {}, - "prototypes": {}, - "mapping": [], - "existing_mapping": [], - "orm_objects": {"cluster": cluster, "hosts": {}, "providers": {}}, - "not_found_object_ids": {}, - "existing_services_names": [], - } - - for service in ( - ClusterObject.objects.filter(cluster=cluster) - .select_related("prototype") - .prefetch_related("servicecomponent_set", "servicecomponent_set__prototype") - ): - service: ClusterObject - - mapping_data["existing_services_names"].append(service.prototype.name) - mapping_data["services"][service.pk] = ServiceData.model_validate(obj=service) - mapping_data["prototypes"][service.prototype.pk] = PrototypeData.model_validate(obj=service.prototype) - for component in service.servicecomponent_set.all(): - component: ServiceComponent - mapping_data["components"][component.pk] = ComponentData.model_validate(obj=component) - mapping_data["prototypes"][component.prototype.pk] = PrototypeData.model_validate(obj=component.prototype) - - for host in Host.objects.filter(cluster=cluster).select_related("provider"): - host: Host - mapping_data["hosts"][host.pk] = HostData.model_validate(obj=host) - mapping_data["orm_objects"]["hosts"][host.pk] = host - mapping_data["orm_objects"]["providers"][host.provider.pk] = host.provider - - for map_ in HostComponent.objects.filter(cluster=cluster): - mapping_data["existing_mapping"].append(HostComponentData.model_validate(obj=map_)) - - mapping_data = MappingData(**mapping_data) - - mapping_data.mapping = [ - MappingEntryData( - host=mapping_data.hosts[record["host_id"]], - component=mapping_data.components[record["component_id"]], - service=mapping_data.services[mapping_data.components[record["component_id"]].service_id], - ) - for record in plain_hc - if record["host_id"] in mapping_data.hosts and record["component_id"] in mapping_data.components - ] - mapping_data.not_found_object_ids = { - "hosts": {record["host_id"] for record in plain_hc if record["host_id"] not in mapping_data.hosts}, - "components": { - record["component_id"] for record in plain_hc if record["component_id"] not in mapping_data.components - }, - } - - return mapping_data - - -def save_mapping(mapping_data: MappingData) -> QuerySet[HostComponent]: - """ - Save given hosts-components mapping if all sanity checks pass - """ - - _check_mapping_data(mapping_data=mapping_data) - return _save_mapping(mapping_data=mapping_data) - - -def _check_mapping_data(mapping_data: MappingData) -> None: - if mapping_data.not_found_object_ids["hosts"]: - ids_repr = ", ".join([f'"{host_id}"' for host_id in mapping_data.not_found_object_ids["hosts"]]) - raise AdcmEx( - code="HOST_NOT_FOUND", - http_code=HTTP_409_CONFLICT, - msg=f'Host(s) {ids_repr} do not belong to cluster "{mapping_data.cluster.name}"', - ) - if mapping_data.not_found_object_ids["components"]: - ids_repr = ", ".join([f'"{component_id}"' for component_id in mapping_data.not_found_object_ids["components"]]) - raise AdcmEx( - code="COMPONENT_NOT_FOUND", - http_code=HTTP_409_CONFLICT, - msg=f'Component(s) {ids_repr} do not belong to cluster "{mapping_data.cluster.name}"', - ) - - seen = set() - duplicates = set() - for map_ in mapping_data.mapping: - ids = (map_.host.id, map_.component.id, map_.service.id) - if ids in seen: - duplicates.add(ids) - seen.add(ids) - - if duplicates: - error_mapping_repr = ", ".join(f"component {map_ids[1]} - host {map_ids[0]}" for map_ids in sorted(duplicates)) - raise AdcmEx(code="INVALID_INPUT", msg=f"Mapping entries duplicates found: {error_mapping_repr}.") - - hosts_mm_states_in_add_remove_groups = { - diff.host.maintenance_mode for diff in mapping_data.mapping_difference["add"] - } | {diff.host.maintenance_mode for diff in mapping_data.mapping_difference["remove"]} - if MaintenanceMode.ON.value in hosts_mm_states_in_add_remove_groups: - raise AdcmEx("INVALID_HC_HOST_IN_MM") - - for mapping_entry in mapping_data.mapping: - service_prototype, component_prototype = mapping_data.entry_prototypes(entry=mapping_entry) - - if service_prototype.requires or component_prototype.requires: - _check_single_mapping_requires(mapping_entry=mapping_entry, mapping_data=mapping_data) - - if not isinstance(component_prototype.bound_to, Empty): - _check_single_mapping_bound_to(mapping_entry=mapping_entry, mapping_data=mapping_data) - - requirements_data = extract_data_for_requirements_check( - cluster=mapping_data.orm_objects["cluster"], - input_mapping=[ - {"host_id": entry.host.id, "component_id": entry.component.id, "service_id": entry.service.id} - for entry in mapping_data.mapping - ], - ) - constraint_not_ok, error_message = is_constraint_requirements_unsatisfied( - topology=requirements_data.topology, - component_prototype_map=requirements_data.component_prototype_map, - prototype_requirements=requirements_data.prototype_requirements, - components_map=requirements_data.objects_map_by_type["component"], - ) - if constraint_not_ok and error_message is not None: - raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=error_message) - - for service in mapping_data.services.values(): - service_prototype = mapping_data.prototypes[service.prototype_id] - if service_prototype.requires: - _check_single_service_requires( - service_prototype=service_prototype, cluster_objects=mapping_data.objects_by_prototype_name - ) - - -@atomic -def _save_mapping(mapping_data: MappingData) -> QuerySet[HostComponent]: - original_topology = next(retrieve_clusters_topology(cluster_ids=(mapping_data.cluster.id,))) - - on_commit(func=reset_hc_map) - on_commit(func=reset_objects_in_mm) - - cluster = mapping_data.orm_objects["cluster"] - - lock = get_lock_on_object(object_=cluster) - if lock: - for removed_host in mapping_data.removed_hosts: - remove_concern_from_object(object_=removed_host, concern=lock) - - for added_host in mapping_data.added_hosts: - add_concern_to_object(object_=added_host, concern=lock) - - mapping_objects: list[HostComponent] = [] - for map_ in mapping_data.mapping: - mapping_objects.append( - HostComponent( - cluster_id=mapping_data.cluster.id, - host_id=map_.host.id, - service_id=map_.service.id, - component_id=map_.component.id, - ) - ) - - HostComponent.objects.filter(cluster_id=mapping_data.cluster.id).delete() - HostComponent.objects.bulk_create(objs=mapping_objects) - - updated_topology = next(retrieve_clusters_topology(cluster_ids=(mapping_data.cluster.id,))) - - unmapped_hosts = find_hosts_difference(old_topology=original_topology, new_topology=updated_topology).unmapped - ActionHostGroupRepo().remove_unmapped_hosts_from_groups(unmapped_hosts) - ConfigHostGroupRepo().remove_unmapped_hosts_from_groups(unmapped_hosts) - - delete_issue( - owner=CoreObjectDescriptor(id=mapping_data.cluster.id, type=ADCMCoreType.CLUSTER), - cause=ConcernCause.HOSTCOMPONENT, - ) - redistribute_issues_and_flags(topology=updated_topology) - - _handle_mapping_policies(mapping_data=mapping_data) - send_host_component_map_update_event(cluster=mapping_data.orm_objects["cluster"]) - - return HostComponent.objects.filter(cluster_id=mapping_data.cluster.id) - - -def _handle_mapping_policies(mapping_data: MappingData) -> None: - service_ids_in_mappings: set[int] = set( - chain( - (map_.service.id for map_ in mapping_data.mapping), - (map_.service_id for map_ in mapping_data.existing_mapping), - ) - ) - for policy in Policy.objects.filter( - object__object_id__in=service_ids_in_mappings, - object__content_type=ContentType.objects.get_for_model(model=ClusterObject), - ): - policy.apply() - - for policy in Policy.objects.filter( - object__object_id=mapping_data.cluster.id, - object__content_type=ContentType.objects.get_for_model(model=Cluster), - ): - policy.apply() - - -def _check_single_mapping_requires(mapping_entry: MappingEntryData, mapping_data: MappingData) -> None: - service_prototype, component_prototype = mapping_data.entry_prototypes(entry=mapping_entry) - - for require, source_type in [ - *zip(component_prototype.requires, [component_prototype.type] * len(component_prototype.requires)), - *zip(service_prototype.requires, [service_prototype.type] * len(service_prototype.requires)), - ]: - require: RequiresData - - if require.service not in mapping_data.existing_services_names: - if source_type == ObjectType.COMPONENT.value: - reference = f'component "{component_prototype.name}" of service "{service_prototype.name}"' - else: - reference = f'service "{service_prototype.name}"' - - raise AdcmEx( - code="COMPONENT_CONSTRAINT_ERROR", msg=f'No required service "{require.service}" for {reference}' - ) - - if require.component is None: - continue - - if not [ - mapping_entry - for mapping_entry in mapping_data.mapping_prototypes - if mapping_entry[ObjectType.SERVICE.value].name == require.service - and mapping_entry[ObjectType.COMPONENT.value].name == require.component - ]: - if source_type == ObjectType.COMPONENT.value: - reference = f'component "{component_prototype.name}" of service "{service_prototype.name}"' - else: - reference = f'service "{service_prototype.name}"' - - raise AdcmEx( - code="COMPONENT_CONSTRAINT_ERROR", - msg=f'No required component "{require.component}" of service "{require.service}" for {reference}', - ) - - -def _check_single_mapping_bound_to(mapping_entry: MappingEntryData, mapping_data: MappingData) -> None: - service_prototype, component_prototype = mapping_data.entry_prototypes(entry=mapping_entry) - bound_entries = mapping_data.get_bound_entries(entry=mapping_entry) - - if not bound_entries: - bound_target_ref = ( - f'component "{component_prototype.bound_to.component}" of service "{component_prototype.bound_to.service}"' - ) - bound_requester_ref = ( - f'component "{component_prototype.display_name}" of service "{service_prototype.display_name}"' - ) - - msg = f'No {bound_target_ref} on host "{mapping_entry.host.fqdn}" for {bound_requester_ref}' - raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=msg) - - for bound_entry in bound_entries: - if not any( - map_ - for map_ in mapping_data.mapping - if map_.host.id == bound_entry.host.id and map_.component.prototype_id == component_prototype.id - ): - bound_target_ref = f'component "{component_prototype.name}" of service "{service_prototype.name}"' - requester_service_prototype, requester_component_prototype = mapping_data.entry_prototypes( - entry=bound_entry - ) - bound_requester_ref = ( - f'component "{requester_component_prototype.name}" of service "{requester_service_prototype.name}"' - ) - - raise AdcmEx( - code="COMPONENT_CONSTRAINT_ERROR", - msg=f'No {bound_target_ref} on host "{bound_entry.host.fqdn}" for {bound_requester_ref}', - ) - - -def _check_single_service_requires( - service_prototype: PrototypeData, - cluster_objects: dict[ - Literal["services", "components"], - dict[str, dict[Literal["object", "prototype"], ServiceData | ComponentData | PrototypeData]], - ], -) -> None: - for require in service_prototype.requires: - required_service: ServiceData | None = cluster_objects["services"].get(require.service, {}).get("object") - required_service_prototype: PrototypeData | None = ( - cluster_objects["services"].get(require.service, {}).get("prototype") - ) - - target_reference = f'service "{require.service}"' - is_requirements_satisfied: bool = required_service is not None and required_service_prototype is not None - - if require.component is not None: - required_component: ComponentData | None = ( - cluster_objects["components"].get(require.component, {}).get("object") - ) - required_component_prototype: PrototypeData | None = ( - cluster_objects["components"].get(require.component, {}).get("prototype") - ) - - if required_component is None or required_component_prototype is None: - target_reference = f'component "{require.component}" of service "{require.service}"' - is_requirements_satisfied = False - - if not is_requirements_satisfied: - raise AdcmEx( - code="SERVICE_CONFLICT", - msg=f"No required {target_reference} for {service_prototype.reference}", - ) diff --git a/python/api_v2/cluster/views.py b/python/api_v2/cluster/views.py index bd732e221c..972c705f8a 100644 --- a/python/api_v2/cluster/views.py +++ b/python/api_v2/cluster/views.py @@ -44,13 +44,16 @@ ) from cm.services.cluster import ( perform_host_to_cluster_map, + retrieve_cluster_topology, retrieve_clusters_objects_maintenance_mode, - retrieve_clusters_topology, ) +from cm.services.mapping import change_host_component_mapping from cm.services.status import notify from core.cluster.errors import HostAlreadyBoundError, HostBelongsToAnotherClusterError, HostDoesNotExistError -from core.cluster.operations import calculate_maintenance_mode_for_cluster_objects -from core.cluster.types import MaintenanceModeOfObjects +from core.cluster.operations import ( + calculate_maintenance_mode_for_cluster_objects, +) +from core.cluster.types import HostComponentEntry, MaintenanceModeOfObjects from django.contrib.contenttypes.models import ContentType from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view from guardian.mixins import PermissionListMixin @@ -89,7 +92,6 @@ ServicePrototypeSerializer, SetMappingSerializer, ) -from api_v2.cluster.utils import retrieve_mapping_data, save_mapping from api_v2.component.serializers import ComponentMappingSerializer from api_v2.generic.action.api_schema import document_action_viewset from api_v2.generic.action.audit import audit_action_viewset @@ -426,10 +428,32 @@ def mapping(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG00 serializer = SetMappingSerializer(data=request.data, many=True) serializer.is_valid(raise_exception=True) - mapping_data = retrieve_mapping_data(cluster=cluster, plain_hc=serializer.validated_data) - new_mapping = save_mapping(mapping_data=mapping_data) - return Response(data=self.get_serializer(instance=new_mapping, many=True).data, status=HTTP_201_CREATED) + new_mapping_entries = tuple(HostComponentEntry(**entry) for entry in serializer.validated_data) + if len(new_mapping_entries) != len(set(new_mapping_entries)): + checked = set() + duplicates = set() + + for entry in new_mapping_entries: + if entry in checked: + duplicates.add(entry) + else: + checked.add(entry) + + error_mapping_repr = ", ".join( + f"component {entry.component_id} - host {entry.host_id}" for entry in sorted(duplicates) + ) + raise AdcmEx("INVALID_INPUT", msg=f"Mapping entries duplicates found: {error_mapping_repr}.") + + cluster_id = cluster.id + bundle_id = Prototype.objects.values_list("bundle_id", flat=True).get(id=cluster.prototype_id) + + change_host_component_mapping(cluster_id=cluster_id, bundle_id=bundle_id, flat_mapping=new_mapping_entries) + + return Response( + data=self.get_serializer(instance=HostComponent.objects.filter(cluster_id=cluster_id), many=True).data, + status=HTTP_201_CREATED, + ) @action( methods=["get"], @@ -462,7 +486,7 @@ def mapping_components(self, request: Request, *args, **kwargs): # noqa: ARG002 objects_mm = ( calculate_maintenance_mode_for_cluster_objects( - topology=next(retrieve_clusters_topology(cluster_ids=(cluster.id,))), + topology=retrieve_cluster_topology(cluster.id), own_maintenance_mode=retrieve_clusters_objects_maintenance_mode(cluster_ids=(cluster.id,)), ) if is_mm_available diff --git a/python/api_v2/service/utils.py b/python/api_v2/service/utils.py index bd2c7a465a..a0933cc412 100644 --- a/python/api_v2/service/utils.py +++ b/python/api_v2/service/utils.py @@ -24,7 +24,7 @@ Prototype, ServiceComponent, ) -from cm.services.cluster import retrieve_clusters_topology +from cm.services.cluster import retrieve_cluster_topology from cm.services.concern.cases import recalculate_own_concerns_on_add_services from cm.services.concern.distribution import redistribute_issues_and_flags from cm.services.status.notify import reset_hc_map @@ -59,7 +59,7 @@ def bulk_add_services_to_cluster(cluster: Cluster, prototypes: QuerySet[Prototyp "servicecomponent_set" ).all(), # refresh values from db to update `config` field ) - redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((cluster.pk,)))) + redistribute_issues_and_flags(topology=retrieve_cluster_topology(cluster.pk)) re_apply_object_policy(apply_object=cluster) reset_hc_map() diff --git a/python/api_v2/tests/bundles/bugs/ADCM-5965/new/config.yaml b/python/api_v2/tests/bundles/bugs/ADCM-5965/new/config.yaml new file mode 100644 index 0000000000..55f35d50b0 --- /dev/null +++ b/python/api_v2/tests/bundles/bugs/ADCM-5965/new/config.yaml @@ -0,0 +1,37 @@ +- name: cluster_with_constraints + type: cluster + version: '2.0' + upgrade: + - &upgrade + name: Simple upgrade + scripts: + - name: before + script: ./succeed.yaml + script_type: ansible + - name: switch + script: bundle_switch + script_type: internal + - name: after + script: ./succeed.yaml + script_type: ansible + states: + available: any + versions: + max: 1.9 + min: 0.5 + - <<: *upgrade + name: With HC + hc_acl: + - action: add + component: component + service: service_with_constraints + +- components: + component: + constraint: + - 1 + - + + dummy: {} + name: service_with_constraints + type: service + version: '1.0' diff --git a/python/api_v2/tests/bundles/bugs/ADCM-5965/old/config.yaml b/python/api_v2/tests/bundles/bugs/ADCM-5965/old/config.yaml new file mode 100644 index 0000000000..5377fc4e53 --- /dev/null +++ b/python/api_v2/tests/bundles/bugs/ADCM-5965/old/config.yaml @@ -0,0 +1,12 @@ +- name: cluster_with_constraints + type: cluster + version: '1.0' +- components: + component: + constraint: + - 0 + - 1 + dummy: {} + name: service_with_constraints + type: service + version: '1.0' diff --git a/python/api_v2/tests/test_mapping.py b/python/api_v2/tests/test_mapping.py index 00d328038f..da99e07845 100644 --- a/python/api_v2/tests/test_mapping.py +++ b/python/api_v2/tests/test_mapping.py @@ -389,7 +389,7 @@ def test_no_required_service_fail(self): self.assertDictEqual( response.json(), { - "code": "COMPONENT_CONSTRAINT_ERROR", + "code": "SERVICE_CONFLICT", "level": "error", "desc": f'No required service "service_required" for service "{service_requires_service.display_name}"', }, @@ -538,18 +538,9 @@ def test_no_bound_fail(self): ) self.assertEqual(response.status_code, HTTP_409_CONFLICT) - self.assertDictEqual( - response.json(), - { - "code": "COMPONENT_CONSTRAINT_ERROR", - "level": "error", - "desc": ( - f'No component "bound_target_component" of service "bound_target_service" ' - f'on host "{self.host_1.fqdn}" for component "{bound_component.display_name}" ' - f'of service "{bound_component.service.display_name}"' - ), - }, - ) + data = response.json() + self.assertEqual(data["code"], "COMPONENT_CONSTRAINT_ERROR") + self.assertIn("Component `bound_to` restriction violated.", data["desc"]) self.assertEqual(HostComponent.objects.count(), 0) def test_bound_on_different_host_fail(self): @@ -579,19 +570,9 @@ def test_bound_on_different_host_fail(self): ) self.assertEqual(response.status_code, HTTP_409_CONFLICT) - self.assertDictEqual( - response.json(), - { - "code": "COMPONENT_CONSTRAINT_ERROR", - "level": "error", - "desc": ( - f'No component "{bound_component.display_name}" of service ' - f'"{bound_component.service.display_name}" on host "{self.host_2.display_name}" for ' - f'component "{bound_target_component.display_name}" ' - f'of service "{bound_target_component.service.display_name}"' - ), - }, - ) + data = response.json() + self.assertEqual(data["code"], "COMPONENT_CONSTRAINT_ERROR") + self.assertIn("Component `bound_to` restriction violated.", data["desc"]) self.assertEqual(HostComponent.objects.count(), 0) def test_bound_success(self): @@ -1097,8 +1078,7 @@ def test_no_required_service_not_in_hc_fail(self): "code": "SERVICE_CONFLICT", "level": "error", "desc": ( - f'No required service "service_required" for service ' - f'"{service_requires_service.display_name}" {service_requires_service.prototype.version}' + f'No required service "service_required" for service ' f'"{service_requires_service.display_name}"' ), }, ) @@ -1190,11 +1170,9 @@ def test_save_mapping_with_unsatisfied_bound_to_fail(self) -> None: ) self.assertEqual(response.status_code, HTTP_409_CONFLICT) - self.assertIn( - 'No component "will_have_bound_to" of service "service_with_bound_to" on host "h1" ' - 'for component "component_1" of service "service_1"', - response.json()["desc"], - ) + data = response.json() + self.assertEqual(data["code"], "COMPONENT_CONSTRAINT_ERROR") + self.assertIn("Component `bound_to` restriction violated.", data["desc"]) class GroupConfigRelatedTests(BaseAPITestCase): diff --git a/python/api_v2/tests/test_upgrade.py b/python/api_v2/tests/test_upgrade.py index e9a175b693..d92d902144 100644 --- a/python/api_v2/tests/test_upgrade.py +++ b/python/api_v2/tests/test_upgrade.py @@ -24,7 +24,7 @@ from cm.tests.mocks.task_runner import RunTaskMock from init_db import init from rbac.upgrade.role import init_roles -from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND +from rest_framework.status import HTTP_200_OK, HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT from rest_framework.test import APITestCase from api_v2.prototype.utils import accept_license @@ -584,6 +584,35 @@ def test_upgrade_adcm_3899_success(self): self.assertEqual(response.status_code, HTTP_200_OK) self.assertEqual(len(response.json()), 4) + def test_adcm_5965_no_constraints_check(self) -> None: + bundle = self.add_bundle(self.test_bundles_dir / "bugs" / "ADCM-5965" / "old") + new_bundle = self.add_bundle(self.test_bundles_dir / "bugs" / "ADCM-5965" / "new") + + cluster = self.add_cluster(bundle=bundle, name="Cluster For Upgrade") + service = self.add_services_to_cluster(["service_with_constraints"], cluster=cluster).get() + component = ServiceComponent.objects.get(prototype__name="dummy", service=service) + + host_1 = self.add_host(provider=self.provider, fqdn="host-1", cluster=cluster) + + self.set_hostcomponent(cluster=cluster, entries=((host_1, component),)) + + with self.subTest("No HC"): + upgrade = Upgrade.objects.get(name="Simple upgrade", bundle=new_bundle) + response = self.client.v2[cluster, "upgrades", upgrade, "run"].post() + + self.assertEqual(response.status_code, HTTP_409_CONFLICT) + self.assertIn("COMPONENT_CONSTRAINT_ERROR", response.json()["code"]) + + with self.subTest("With HC"): + upgrade = Upgrade.objects.get(name="With HC", bundle=new_bundle) + # Passing the same HC unchanged, expect it to first be checked against upgrade restrictions + response = self.client.v2[cluster, "upgrades", upgrade, "run"].post( + data={"hostComponentMap": [{"hostId": host_1.id, "componentId": component.id}]} + ) + + self.assertEqual(response.status_code, HTTP_409_CONFLICT) + self.assertIn("COMPONENT_CONSTRAINT_ERROR", response.json()["code"]) + class TestAdcmUpgrade(APITestCase): @classmethod diff --git a/python/cm/api.py b/python/cm/api.py index 5824170e89..9719632f65 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -16,13 +16,12 @@ import json from adcm_version import compare_prototype_versions -from core.cluster.operations import find_hosts_difference from core.types import ADCMCoreType, CoreObjectDescriptor from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.core.exceptions import MultipleObjectsReturned from django.db.transaction import atomic, on_commit -from rbac.models import Policy, re_apply_object_policy +from rbac.models import re_apply_object_policy from rbac.roles import apply_policy_for_new_config from cm.adcm_config.config import ( @@ -65,8 +64,7 @@ ServiceComponent, TaskLog, ) -from cm.services.action_host_group import ActionHostGroupRepo -from cm.services.cluster import retrieve_clusters_topology +from cm.services.cluster import retrieve_cluster_topology from cm.services.concern import create_issue, delete_issue, retrieve_issue from cm.services.concern._operaitons import delete_concerns_of_removed_objects from cm.services.concern.cases import ( @@ -75,23 +73,17 @@ recalculate_own_concerns_on_add_services, ) from cm.services.concern.checks import ( - cluster_mapping_has_issue, - extract_data_for_requirements_check, - is_bound_to_requirements_unsatisfied, - is_constraint_requirements_unsatisfied, - is_requires_requirements_unsatisfied, + cluster_mapping_has_issue_orm_version, object_configuration_has_issue, object_imports_has_issue, ) from cm.services.concern.distribution import distribute_concern_on_related_objects, redistribute_issues_and_flags from cm.services.concern.flags import BuiltInFlag, raise_flag from cm.services.concern.locks import get_lock_on_object -from cm.services.group_config import ConfigHostGroupRepo from cm.services.status.notify import reset_hc_map, reset_objects_in_mm from cm.status_api import ( send_config_creation_event, send_delete_service_event, - send_host_component_map_update_event, ) from cm.utils import obj_ref @@ -148,7 +140,7 @@ def add_cluster(prototype: Prototype, name: str, description: str = "") -> Clust ) if recalculate_own_concerns_on_add_clusters(cluster): # TODO: redistribute only new issues. See ADCM-5798 - redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((cluster.pk,)))) + redistribute_issues_and_flags(topology=retrieve_cluster_topology(cluster.pk)) reset_hc_map() @@ -268,7 +260,7 @@ def delete_service(service: ClusterObject) -> None: cluster = service.cluster cluster_cod = CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER) - if not cluster_mapping_has_issue(cluster=cluster): + if not cluster_mapping_has_issue_orm_version(cluster=cluster): delete_issue( owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), cause=ConcernCause.HOSTCOMPONENT ) @@ -352,7 +344,7 @@ def remove_host_from_cluster(host: Host) -> Host: # if there's no lock on cluster, nothing should be removed remove_concern_from_object(object_=host, concern=get_lock_on_object(object_=cluster)) - if not cluster_mapping_has_issue(cluster): + if not cluster_mapping_has_issue_orm_version(cluster): delete_issue( owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), cause=ConcernCause.HOSTCOMPONENT ) @@ -395,7 +387,7 @@ def add_service_to_cluster(cluster: Cluster, proto: Prototype) -> ClusterObject: add_components_to_service(cluster=cluster, service=service) recalculate_own_concerns_on_add_services(cluster=cluster, services=(service,)) - redistribute_issues_and_flags(next(retrieve_clusters_topology((cluster.id,)))) + redistribute_issues_and_flags(retrieve_cluster_topology(cluster.id)) re_apply_object_policy(apply_object=cluster) @@ -564,146 +556,6 @@ def make_host_comp_list(cluster: Cluster, hc_in: list[dict]) -> list[tuple[Clust return host_comp_list -def check_hc(cluster: Cluster, hc_in: list[dict]) -> list[tuple[ClusterObject, Host, ServiceComponent]]: - check_sub_key(hc_in=hc_in) - host_comp_list = make_host_comp_list(cluster=cluster, hc_in=hc_in) - - requirements_data = extract_data_for_requirements_check(cluster=cluster, input_mapping=hc_in) - - requires_not_ok, error_message = is_requires_requirements_unsatisfied( - topology=requirements_data.topology, - component_prototype_map=requirements_data.component_prototype_map, - prototype_requirements=requirements_data.prototype_requirements, - existing_objects_map=requirements_data.existing_objects_map, - existing_objects_by_type=requirements_data.objects_map_by_type, - ) - if requires_not_ok and error_message is not None: - raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=error_message) - - bound_not_ok, error_message = is_bound_to_requirements_unsatisfied( - topology=requirements_data.topology, - component_prototype_map=requirements_data.component_prototype_map, - prototype_requirements=requirements_data.prototype_requirements, - existing_objects_map=requirements_data.existing_objects_map, - ) - if bound_not_ok and error_message: - raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=error_message) - - constraint_not_ok, error_message = is_constraint_requirements_unsatisfied( - topology=requirements_data.topology, - component_prototype_map=requirements_data.component_prototype_map, - prototype_requirements=requirements_data.prototype_requirements, - components_map=requirements_data.objects_map_by_type["component"], - ) - if constraint_not_ok and error_message is not None: - raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=error_message) - - check_maintenance_mode(cluster=cluster, host_comp_list=host_comp_list) - - return host_comp_list - - -def check_maintenance_mode( - cluster: Cluster, host_comp_list: list[tuple[ClusterObject, Host, ServiceComponent]] -) -> None: - for service, host, comp in host_comp_list: - try: - HostComponent.objects.get(cluster=cluster, service=service, host=host, component=comp) - except HostComponent.DoesNotExist: - if host.maintenance_mode == MaintenanceMode.ON: - raise_adcm_ex("INVALID_HC_HOST_IN_MM") - - -def still_existed_hc(cluster: Cluster, host_comp_list: list[tuple[ClusterObject, Host, ServiceComponent]]) -> list: - result = [] - for service, host, comp in host_comp_list: - try: - existed_hc = HostComponent.objects.get(cluster=cluster, service=service, host=host, component=comp) - result.append(existed_hc) - except HostComponent.DoesNotExist: - continue - - return result - - -def save_hc( - cluster: Cluster, host_comp_list: list[tuple[ClusterObject, Host, ServiceComponent]] -) -> list[HostComponent]: - hc_queryset = HostComponent.objects.filter(cluster=cluster).order_by("id") - service_set = {hc.service for hc in hc_queryset.select_related("service")} - old_hosts = {i.host for i in hc_queryset.select_related("host")} - new_hosts = {i[1] for i in host_comp_list} - - previous_topology = next(retrieve_clusters_topology(cluster_ids=(cluster.id,))) - - lock = get_lock_on_object(object_=cluster) - if lock: - for removed_host in old_hosts.difference(new_hosts): - remove_concern_from_object(object_=removed_host, concern=lock) - - for added_host in new_hosts.difference(old_hosts): - add_concern_to_object(object_=added_host, concern=lock) - - hc_queryset.delete() - host_component_list = [] - - for service, host, comp in host_comp_list: - host_component = HostComponent( - cluster=cluster, - service=service, - host=host, - component=comp, - ) - host_component.save() - host_component_list.append(host_component) - - updated_topology = next(retrieve_clusters_topology(cluster_ids=(cluster.id,))) - unmapped_hosts = find_hosts_difference(new_topology=updated_topology, old_topology=previous_topology).unmapped - ActionHostGroupRepo().remove_unmapped_hosts_from_groups(unmapped_hosts) - ConfigHostGroupRepo().remove_unmapped_hosts_from_groups(unmapped_hosts) - - # HC may break - # We can't be sure this method is called after some sort of "check" - cluster_cod = CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER) - if not cluster_mapping_has_issue(cluster=cluster): - delete_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) - elif retrieve_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) is None: - create_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) - - redistribute_issues_and_flags(topology=updated_topology) - - reset_hc_map() - reset_objects_in_mm() - - for host_component_item in host_component_list: - service_set.add(host_component_item.service) - - if service_set: - service_list = list(service_set) - service_content_type = ContentType.objects.get_for_model(model=service_list[0]) - for service in service_list: - for policy in Policy.objects.filter( - object__object_id=service.pk, object__content_type=service_content_type - ): - policy.apply() - - for policy in Policy.objects.filter( - object__object_id=service_list[0].cluster.pk, - object__content_type=ContentType.objects.get_for_model(model=service_list[0].cluster), - ): - policy.apply() - - send_host_component_map_update_event(cluster=cluster) - return host_component_list - - -def add_hc(cluster: Cluster, hc_in: list[dict]) -> list[HostComponent]: - host_comp_list = check_hc(cluster=cluster, hc_in=hc_in) - - with atomic(): - return save_hc(cluster=cluster, host_comp_list=host_comp_list) - - def get_bind( cluster: Cluster, service: ClusterObject | None, source_cluster: Cluster, source_service: ClusterObject | None ): diff --git a/python/cm/issue.py b/python/cm/issue.py index 87466b4e82..b295799bd8 100755 --- a/python/cm/issue.py +++ b/python/cm/issue.py @@ -14,7 +14,7 @@ from typing import Iterable from api_v2.concern.serializers import ConcernSerializer -from core.types import CoreObjectDescriptor, PrototypeID +from core.types import CoreObjectDescriptor from django.conf import settings from django.db.transaction import on_commit from djangorestframework_camel_case.util import camelize @@ -26,7 +26,6 @@ from cm.logger import logger from cm.models import ( ADCMEntity, - Bundle, Cluster, ClusterObject, ConcernCause, @@ -40,9 +39,7 @@ ) from cm.services.concern import create_issue, retrieve_issue from cm.services.concern.checks import ( - cluster_mapping_has_issue, - extract_data_for_requirements_check, - is_constraint_requirements_unsatisfied, + cluster_mapping_has_issue_orm_version, object_configuration_has_issue, object_has_required_services_issue, object_imports_has_issue, @@ -75,53 +72,11 @@ def check_service_requires(cluster: Cluster, proto: Prototype) -> None: ) -def check_component_constraint( - cluster: Cluster, service_prototype: Prototype, hc_in: list, old_bundle: Bundle | None = None -) -> None: - target_prototypes: set[PrototypeID] = set() - - for component_prototype in Prototype.objects.filter(parent=service_prototype, type="component"): - if old_bundle: - try: - old_service_proto = Prototype.objects.get( - name=service_prototype.name, - type="service", - bundle=old_bundle, - ) - Prototype.objects.get( - parent=old_service_proto, - bundle=old_bundle, - type="component", - name=component_prototype.name, - ) - except Prototype.DoesNotExist: - continue - - target_prototypes.add(component_prototype.pk) - - requirements_data = extract_data_for_requirements_check( - cluster=cluster, - input_mapping=[ - {"host_id": host.id, "component_id": component.id, "service_id": service.id} - for service, host, component in hc_in - ], - target_component_prototypes=target_prototypes, - ) - constraint_not_ok, error_message = is_constraint_requirements_unsatisfied( - topology=requirements_data.topology, - component_prototype_map=requirements_data.component_prototype_map, - prototype_requirements=requirements_data.prototype_requirements, - components_map=requirements_data.objects_map_by_type["component"], - ) - if constraint_not_ok and error_message is not None: - raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=error_message) - - _issue_check_map = { ConcernCause.CONFIG: object_configuration_has_issue, ConcernCause.IMPORT: object_imports_has_issue, ConcernCause.SERVICE: object_has_required_services_issue, - ConcernCause.HOSTCOMPONENT: cluster_mapping_has_issue, + ConcernCause.HOSTCOMPONENT: cluster_mapping_has_issue_orm_version, ConcernCause.REQUIREMENT: service_requirements_has_issue, } _prototype_issue_map = { diff --git a/python/cm/services/cluster.py b/python/cm/services/cluster.py index d8333a5b4a..74971d21dd 100644 --- a/python/cm/services/cluster.py +++ b/python/cm/services/cluster.py @@ -101,7 +101,7 @@ def perform_host_to_cluster_map( # this import should be resolved later: # concerns management should be passed in here the same way as `status_service`, # because it's a dependency that shouldn't be directly set - from cm.services.concern.checks import cluster_mapping_has_issue + from cm.services.concern.checks import cluster_mapping_has_issue_orm_version from cm.services.concern.distribution import distribute_concern_on_related_objects with atomic(): @@ -109,7 +109,7 @@ def perform_host_to_cluster_map( cluster = Cluster.objects.get(id=cluster_id) cluster_cod = CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER) - if not cluster_mapping_has_issue(cluster=cluster): + if not cluster_mapping_has_issue_orm_version(cluster=cluster): delete_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) elif not cluster.get_own_issue(cause=ConcernCause.HOSTCOMPONENT): concern = create_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) @@ -122,7 +122,11 @@ def perform_host_to_cluster_map( return hosts -def retrieve_clusters_topology( +def retrieve_cluster_topology(cluster_id: ClusterID) -> ClusterTopology: + return next(retrieve_multiple_clusters_topology(cluster_ids=(cluster_id,))) + + +def retrieve_multiple_clusters_topology( cluster_ids: Iterable[ClusterID], input_mapping: dict[ClusterID, list[MappingDict]] | None = None ) -> Generator[ClusterTopology, None, None]: return build_clusters_topology(cluster_ids=cluster_ids, db=ClusterDB, input_mapping=input_mapping) @@ -137,7 +141,7 @@ def retrieve_related_cluster_topology(orm_object: Cluster | ClusterObject | Serv message = f"Can't detect cluster variables for {orm_object}" raise RuntimeError(message) - return next(retrieve_clusters_topology([cluster_id])) + return next(retrieve_multiple_clusters_topology([cluster_id])) def retrieve_clusters_objects_maintenance_mode(cluster_ids: Iterable[ClusterID]) -> MaintenanceModeOfObjects: diff --git a/python/cm/services/concern/_operaitons.py b/python/cm/services/concern/_operaitons.py index a760e310e9..bfddb0cb1b 100644 --- a/python/cm/services/concern/_operaitons.py +++ b/python/cm/services/concern/_operaitons.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from itertools import chain from typing import Iterable from core.types import ADCMCoreType, CoreObjectDescriptor, ObjectID @@ -71,7 +72,7 @@ def create_issue(owner: CoreObjectDescriptor, cause: ConcernCause) -> ConcernIte def _get_target_and_placeholder_types( concern_message: ConcernMessage, owner: CoreObjectDescriptor ) -> tuple[Prototype | None, PlaceholderTypeDTO]: - owner_prototype = Prototype.objects.values("type", "bundle_id", "requires").get( + owner_prototype = Prototype.objects.values("id", "type", "bundle_id", "requires").get( pk=core_type_to_model(owner.type).objects.values_list("prototype_id", flat=True).get(pk=owner.id) ) target = None @@ -108,7 +109,14 @@ def _get_target_and_placeholder_types( cluster_id = ClusterObject.objects.values_list("cluster_id", flat=True).get(pk=owner.id) placeholder_type_dto = PlaceholderTypeDTO(source="cluster_services", target="prototype") - required_services_names = {require["service"] for require in owner_prototype["requires"]} + required_services_names = {require["service"] for require in owner_prototype["requires"]} | set( + chain.from_iterable( + (require["service"] for require in requires if "service" in require and "component" not in require) + for requires in Prototype.objects.filter(parent_id=owner_prototype["id"]).values_list( + "requires", flat=True + ) + ) + ) existing_required_services = set( ClusterObject.objects.values_list("prototype__name", flat=True).filter( cluster_id=cluster_id, prototype__name__in=required_services_names @@ -119,4 +127,8 @@ def _get_target_and_placeholder_types( name__in=absent_services_names, type=ObjectType.SERVICE, bundle_id=owner_prototype["bundle_id"] ).first() + case _: + message = f"Can't detect target and placeholder for {concern_message}" + raise RuntimeError(message) + return target, placeholder_type_dto diff --git a/python/cm/services/concern/cases.py b/python/cm/services/concern/cases.py index ebba24f822..d25e36cf6f 100644 --- a/python/cm/services/concern/cases.py +++ b/python/cm/services/concern/cases.py @@ -20,7 +20,7 @@ from cm.models import Cluster, ClusterObject, ConcernCause, ConcernItem, ConcernType, Host, ServiceComponent from cm.services.concern import create_issue, delete_issue, retrieve_issue from cm.services.concern.checks import ( - cluster_mapping_has_issue, + cluster_mapping_has_issue_orm_version, object_configuration_has_issue, object_has_required_services_issue, object_imports_has_issue, @@ -35,7 +35,7 @@ def recalculate_own_concerns_on_add_clusters(cluster: Cluster) -> OwnObjectConce cluster_checks = ( (ConcernCause.CONFIG, object_configuration_has_issue), (ConcernCause.IMPORT, object_imports_has_issue), - (ConcernCause.HOSTCOMPONENT, cluster_mapping_has_issue), + (ConcernCause.HOSTCOMPONENT, cluster_mapping_has_issue_orm_version), (ConcernCause.SERVICE, object_has_required_services_issue), ) @@ -56,7 +56,7 @@ def recalculate_own_concerns_on_add_services( # create new concerns cluster_own_hc_issue = retrieve_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) - if cluster_own_hc_issue is None and cluster_mapping_has_issue(cluster=cluster): + if cluster_own_hc_issue is None and cluster_mapping_has_issue_orm_version(cluster=cluster): issue = create_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) new_concerns[ADCMCoreType.CLUSTER][cluster.pk].add(issue.pk) @@ -104,7 +104,7 @@ def recalculate_concerns_on_cluster_upgrade(cluster: Cluster) -> None: cluster_checks = ( (ConcernCause.CONFIG, object_configuration_has_issue), (ConcernCause.IMPORT, object_imports_has_issue), - (ConcernCause.HOSTCOMPONENT, cluster_mapping_has_issue), + (ConcernCause.HOSTCOMPONENT, cluster_mapping_has_issue_orm_version), (ConcernCause.SERVICE, object_has_required_services_issue), ) diff --git a/python/cm/services/concern/checks.py b/python/cm/services/concern/checks.py index be7ba3b6ed..958a650dd0 100644 --- a/python/cm/services/concern/checks.py +++ b/python/cm/services/concern/checks.py @@ -10,16 +10,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import defaultdict, deque -from functools import partial +from collections import deque from operator import attrgetter -from typing import Callable, Iterable, Literal, NamedTuple, TypeAlias +from typing import Iterable, Literal, NamedTuple, TypeAlias -from core.cluster.types import ClusterTopology, ServiceTopology -from core.types import ClusterID, ComponentID, ConfigID, HostID, MappingDict, ObjectID, PrototypeID, ServiceID +from core.cluster.types import ClusterTopology +from core.concern.checks import find_cluster_mapping_issues, find_unsatisfied_service_requirements +from core.concern.types import ComponentRestrictionOwner, ServiceDependencies, ServiceRestrictionOwner +from core.converters import named_mapping_from_topology +from core.types import ClusterID, ConfigID, ObjectID from django.db.models import Q -from typing_extensions import Self +from cm.errors import AdcmEx from cm.models import ( Cluster, ClusterBind, @@ -27,19 +29,22 @@ Host, HostProvider, ObjectConfig, - ObjectType, Prototype, PrototypeImport, ServiceComponent, ) -from cm.services.cluster import retrieve_clusters_topology +from cm.services.cluster import retrieve_cluster_topology +from cm.services.concern.repo import ( + BundleRestrictions, + MappingRestrictions, + retrieve_bundle_restrictions, +) from cm.services.config import retrieve_config_attr_pairs from cm.services.config.spec import FlatSpec, retrieve_flat_spec_for_objects ObjectWithConfig: TypeAlias = Cluster | ClusterObject | ServiceComponent | HostProvider | Host HasIssue: TypeAlias = bool RequiresEntry: TypeAlias = dict[Literal["service", "component"], str] -ConstraintDBFormat: TypeAlias = tuple[str] | tuple[int | str, int | str] class MissingRequirement(NamedTuple): @@ -47,124 +52,6 @@ class MissingRequirement(NamedTuple): name: str -class Constraint(NamedTuple): - internal: ConstraintDBFormat - checks: tuple[Callable[[int, int], bool], ...] - - @classmethod - def from_db_repr(cls, constraint: ConstraintDBFormat) -> Self: - match constraint: - case [0, "+"]: - # no checks actually required, it's the "default" - checks = () - case ["+"]: - checks = (check_on_all,) - case ["odd"]: - checks = (check_is_odd,) - case [int(exact)]: - checks = (partial(check_exact, argument=exact),) - case [int(min_), "odd"]: - checks = (partial(check_equal_or_greater, argument=min_), partial(check_is_odd, allow_zero=min_ == 0)) - case [int(min_), "+"]: - checks = (partial(check_equal_or_greater, argument=min_),) - case [int(min_), int(max_)]: - checks = (partial(check_equal_or_greater, argument=min_), partial(check_equal_or_less, argument=max_)) - case _: - checks = () - - return Constraint(internal=constraint, checks=checks) - - def is_met_for(self, mapped_hosts: int, hosts_in_cluster: int) -> bool: - return all(check(mapped_hosts, hosts_in_cluster) for check in self.checks) - - -class ServiceExternalRequirement(NamedTuple): - name: str - - def __str__(self): - return f'service "{self.name}"' - - -class ComponentExternalRequirement(NamedTuple): - name: str - service_name: str - - def __str__(self): - return f'component "{self.name}" of service "{self.service_name}"' - - -class ServiceRequirements(NamedTuple): - requires: tuple[ServiceExternalRequirement | ComponentExternalRequirement, ...] - - @property - def is_requires_check_required(self) -> bool: - return len(self.requires) > 0 - - -class ComponentMappingRequirements(NamedTuple): - constraint: Constraint - requires: tuple[ServiceExternalRequirement | ComponentExternalRequirement, ...] - bound_to: ComponentExternalRequirement | None - - @property - def is_constraint_check_required(self) -> bool: - return len(self.constraint.checks) > 0 - - @property - def is_requires_check_required(self) -> bool: - return len(self.requires) > 0 - - @property - def is_bound_to_check_required(self) -> bool: - return self.bound_to is not None - - -class RequirementsCheckDTO(NamedTuple): - topology: ClusterTopology - component_prototype_map: dict[ComponentID, tuple[PrototypeID, ServiceID, PrototypeID]] - prototype_requirements: dict[PrototypeID, ComponentMappingRequirements | ServiceRequirements] - existing_objects_map: dict[ComponentExternalRequirement | ServiceExternalRequirement, ComponentID | ServiceID] - - @property - def prototype_requirements_only_component_requires( - self, - ) -> dict[PrototypeID, ComponentMappingRequirements | ServiceRequirements]: - res = {} - - for prototype_id, requirements in self.prototype_requirements.items(): - new_requires = tuple(req for req in requirements.requires if isinstance(req, ComponentExternalRequirement)) - - if isinstance(requirements, ComponentMappingRequirements): - new_requirements = ComponentMappingRequirements( - constraint=requirements.constraint, requires=new_requires, bound_to=requirements.bound_to - ) - elif isinstance(requirements, ServiceRequirements): - new_requirements = ServiceRequirements(requires=new_requires) - else: - raise NotImplementedError(f"Unexpected requirements type: {type(requirements)}") - - res[prototype_id] = new_requirements - - return res - - @property - def objects_map_by_type( - self, - ) -> dict[ - Literal["service", "component"], - dict[ServiceID | ComponentID, ServiceExternalRequirement | ComponentExternalRequirement], - ]: - existing_objects = defaultdict(dict) - - for entity_reqs, entity_id in self.existing_objects_map.items(): - if isinstance(entity_reqs, ComponentExternalRequirement): - existing_objects["component"][entity_id] = entity_reqs - elif isinstance(entity_reqs, ServiceExternalRequirement): - existing_objects["service"][entity_id] = entity_reqs - - return existing_objects - - def object_configuration_has_issue(target: ObjectWithConfig) -> HasIssue: config_spec = next(iter(retrieve_flat_spec_for_objects(prototypes=(target.prototype_id,)).values()), None) if not config_spec: @@ -243,46 +130,39 @@ def filter_objects_with_configuration_issues(config_spec: FlatSpec, *objects: Ob def service_requirements_has_issue(service: ClusterObject) -> HasIssue: - return bool(find_unsatisfied_requirements(cluster_id=service.cluster_id, requires=service.prototype.requires)) - - -def find_unsatisfied_requirements( - cluster_id: ClusterID, requires: list[RequiresEntry] -) -> tuple[MissingRequirement, ...]: - if not requires: - return () - - names_of_required_services: set[str] = set() - required_components: set[tuple[str, str]] = set() - - for requirement in requires: - service_name = requirement["service"] - - if component_name := requirement.get("component"): - required_components.add((service_name, component_name)) - else: - names_of_required_services.add(service_name) - - missing_requirements = deque() - - if names_of_required_services: - for missing_service_name in names_of_required_services.difference( - ClusterObject.objects.values_list("prototype__name", flat=True).filter(cluster_id=cluster_id) + bundle_restrictions = retrieve_bundle_restrictions(service.prototype.bundle_id) + service_name = service.prototype.name + service_related_restrictions = {} + for key, required_services in bundle_restrictions.service_requires.items(): + if (isinstance(key, ServiceRestrictionOwner) and key.name == service_name) or ( + isinstance(key, ComponentRestrictionOwner) and key.service == service_name ): - missing_requirements.append(MissingRequirement(type="service", name=missing_service_name)) + service_related_restrictions[key] = required_services - if required_components: - for _, missing_component_name in required_components.difference( - ServiceComponent.objects.values_list("service__prototype__name", "prototype__name").filter( - cluster_id=cluster_id - ) - ): - missing_requirements.append(MissingRequirement(type="component", name=missing_component_name)) + return bool( + find_unsatisfied_service_requirements( + services_restrictions=service_related_restrictions, + named_mapping=named_mapping_from_topology(retrieve_cluster_topology(service.cluster_id)), + ) + ) - return tuple(missing_requirements) + +def check_service_requirements( + services_restrictions: ServiceDependencies, + topology: ClusterTopology, +): + issues = find_unsatisfied_service_requirements( + services_restrictions=services_restrictions, named_mapping=named_mapping_from_topology(topology) + ) + if issues: + issue_to_show = issues[0] + raise AdcmEx( + code="SERVICE_CONFLICT", + msg=f'No required service "{issue_to_show.required_service}" for {issue_to_show.dependant_object}', + ) -def cluster_mapping_has_issue(cluster: Cluster) -> HasIssue: +def cluster_mapping_has_issue_orm_version(cluster: Cluster) -> HasIssue: """ Checks: - requires (components only) @@ -290,270 +170,34 @@ def cluster_mapping_has_issue(cluster: Cluster) -> HasIssue: - bound_to """ - requirements_data = extract_data_for_requirements_check(cluster=cluster) + bundle_restrictions = retrieve_bundle_restrictions(bundle_id=int(cluster.prototype.bundle_id)) - bound_not_ok, _ = is_bound_to_requirements_unsatisfied( - topology=requirements_data.topology, - component_prototype_map=requirements_data.component_prototype_map, - prototype_requirements=requirements_data.prototype_requirements, - existing_objects_map=requirements_data.existing_objects_map, - ) - if bound_not_ok: - return True - - requires_not_ok, _ = is_requires_requirements_unsatisfied( - topology=requirements_data.topology, - component_prototype_map=requirements_data.component_prototype_map, - prototype_requirements=requirements_data.prototype_requirements_only_component_requires, - existing_objects_map=requirements_data.existing_objects_map, - existing_objects_by_type=requirements_data.objects_map_by_type, - ) - if requires_not_ok: - return True - - constraint_not_ok, _ = is_constraint_requirements_unsatisfied( - topology=requirements_data.topology, - component_prototype_map=requirements_data.component_prototype_map, - prototype_requirements=requirements_data.prototype_requirements, - components_map=requirements_data.objects_map_by_type["component"], - ) - if constraint_not_ok: - return True + return cluster_mapping_has_issue(cluster_id=cluster.id, bundle_restrictions=bundle_restrictions) - return False +def cluster_mapping_has_issue(cluster_id: ClusterID, bundle_restrictions: BundleRestrictions) -> HasIssue: + topology = retrieve_cluster_topology(cluster_id=cluster_id) -def extract_data_for_requirements_check( - cluster: Cluster, - input_mapping: list[MappingDict] | None = None, - target_component_prototypes: set[PrototypeID] | None = None, -) -> RequirementsCheckDTO: - bundle_id = cluster.prototype.bundle_id - prototype_requirements: dict[PrototypeID, ComponentMappingRequirements | ServiceRequirements] = {} - - query = {"bundle_id": bundle_id, "type__in": {ObjectType.COMPONENT, ObjectType.SERVICE}} - if target_component_prototypes is not None: - query.update({"pk__in": target_component_prototypes}) - - for prototype_id, prototype_type, constraint, requires, bound_to in Prototype.objects.values_list( - "id", "type", "constraint", "requires", "bound_to" - ).filter(**query): - prepared_requires = deque() - for requirement in requires: - service_name = requirement["service"] - if component_name := requirement.get("component"): - prepared_requires.append(ComponentExternalRequirement(name=component_name, service_name=service_name)) - else: - prepared_requires.append(ServiceExternalRequirement(name=service_name)) - - if prototype_type == ObjectType.COMPONENT: - prototype_requirements[prototype_id] = ComponentMappingRequirements( - constraint=Constraint.from_db_repr(constraint), - requires=tuple(prepared_requires), - bound_to=ComponentExternalRequirement(name=bound_to["component"], service_name=bound_to["service"]) - if bound_to - else None, - ) - elif prototype_type == ObjectType.SERVICE: - prototype_requirements[prototype_id] = ServiceRequirements(requires=tuple(prepared_requires)) - else: - raise NotImplementedError(f"Unexpected prototype type: {prototype_type}") - - # prepare data for check - - input_mapping = {cluster.id: input_mapping} if input_mapping else None - topology = next(retrieve_clusters_topology(cluster_ids=(cluster.id,), input_mapping=input_mapping)) - - query = {"cluster": cluster} - if target_component_prototypes is not None: - query.update({"servicecomponent__prototype_id__in": target_component_prototypes}) - - component_prototype_map: dict[ComponentID, tuple[PrototypeID, ServiceID, PrototypeID]] = {} - existing_objects_map: dict[ComponentExternalRequirement | ServiceExternalRequirement, ComponentID | ServiceID] = { - ServiceExternalRequirement(name=service_name): service_id - for service_id, service_name in ClusterObject.objects.values_list("id", "prototype__name") - .filter(**query) - .distinct() - } - - query = {"id__in": topology.component_ids} - if target_component_prototypes is not None: - query.update({"prototype_id__in": target_component_prototypes}) - - for ( - component_id, - prototype_id, - service_id, - service_prototype_id, - component_name, - service_name, - ) in ServiceComponent.objects.values_list( - "id", "prototype_id", "service_id", "service__prototype_id", "prototype__name", "service__prototype__name" - ).filter(**query): - component_prototype_map[component_id] = (prototype_id, service_id, service_prototype_id) - existing_objects_map[ - ComponentExternalRequirement(name=component_name, service_name=service_name) - ] = component_id - - return RequirementsCheckDTO( - topology=topology, - component_prototype_map=component_prototype_map, - prototype_requirements=prototype_requirements, - existing_objects_map=existing_objects_map, + issues = find_cluster_mapping_issues( + restrictions=bundle_restrictions.mapping, + named_mapping=named_mapping_from_topology(topology), + amount_of_hosts_in_cluster=len(topology.hosts), ) - -def is_bound_to_requirements_unsatisfied( - topology: ClusterTopology, - component_prototype_map: dict[ComponentID, tuple[PrototypeID, ServiceID, PrototypeID]], - prototype_requirements: dict[PrototypeID, ComponentMappingRequirements | ServiceRequirements], - existing_objects_map: dict[ComponentExternalRequirement | ServiceExternalRequirement, ComponentID | ServiceID], -) -> tuple[bool, str | None]: - existing_components: dict[ComponentID, ComponentExternalRequirement] = {} - for entity_reqs, entity_id in existing_objects_map.items(): - if isinstance(entity_reqs, ComponentExternalRequirement): - existing_components[entity_id] = entity_reqs - - for component_id, (prototype_id, service_id, _) in component_prototype_map.items(): - requirements = prototype_requirements[prototype_id] - - # only mapped components should be checked for bound_to - if ( - not requirements.is_bound_to_check_required - or not topology.services[service_id].components[component_id].hosts - ): - continue - - bound_requester_reference = str(existing_components[component_id]) - error_message = f"{str(requirements.bound_to).capitalize()} not in hc for {bound_requester_reference}" - - bound_component_id = existing_objects_map.get(requirements.bound_to) - if not bound_component_id: - return True, error_message - - service_id_of_bound_component = existing_objects_map.get( - ServiceExternalRequirement(name=requirements.bound_to.service_name) - ) - if not service_id_of_bound_component: - return True, error_message - - bound_service_topology: ServiceTopology | None = topology.services.get(service_id_of_bound_component) - if not bound_service_topology: - return True, error_message - - error_message = f"No {str(requirements.bound_to).capitalize()} on host for {bound_requester_reference}" - - bound_component_hosts: set[HostID] = set(bound_service_topology.components[bound_component_id].hosts) - current_component_hosts: set[HostID] = set(topology.services[service_id].components[component_id].hosts) - - if bound_component_hosts != current_component_hosts: - return True, error_message - - return False, None + return len(issues) != 0 -def is_requires_requirements_unsatisfied( +def check_mapping_restrictions( + mapping_restrictions: MappingRestrictions, topology: ClusterTopology, - component_prototype_map: dict[ComponentID, tuple[PrototypeID, ServiceID, PrototypeID]], - prototype_requirements: dict[PrototypeID, ComponentMappingRequirements | ServiceRequirements], - existing_objects_map: dict[ComponentExternalRequirement | ServiceExternalRequirement, ComponentID | ServiceID], - existing_objects_by_type: dict[ - Literal["service", "component"], - dict[ServiceID | ComponentID, ServiceExternalRequirement | ComponentExternalRequirement], - ], -) -> tuple[bool, str | None]: - seen_service_prototypes: set[PrototypeID] = set() - - for component_id, (prototype_id, service_id, service_prototype_id) in component_prototype_map.items(): - # only mapped components should be checked for requires - if not topology.services[service_id].components[component_id].hosts: - continue - - component_requirements = prototype_requirements[prototype_id] - service_requirements = None - if service_prototype_id not in seen_service_prototypes: - service_requirements = prototype_requirements[service_prototype_id] - seen_service_prototypes.add(service_prototype_id) - - component_requires = ( - component_requirements.requires if component_requirements.is_requires_check_required else [] - ) - service_requires = ( - service_requirements.requires - if service_requirements is not None and service_requirements.is_requires_check_required - else [] - ) - all_requires = [ - *zip(component_requires, [existing_objects_by_type["component"][component_id]] * len(component_requires)), - *zip(service_requires, [existing_objects_by_type["service"][service_id]] * len(service_requires)), - ] - for required_entity, owner in all_requires: - try: - if isinstance(required_entity, ComponentExternalRequirement): - required_component_id = existing_objects_map[required_entity] - required_service_id = existing_objects_map[ - ServiceExternalRequirement(name=required_entity.service_name) - ] - elif isinstance(required_entity, ServiceExternalRequirement): - required_component_id = None - required_service_id = existing_objects_map[required_entity] - else: - raise NotImplementedError(f"Unexpected required_entity type: {type(required_entity)}") - except KeyError: - return True, f"No required {required_entity} for {owner}" - - if required_component_id is None: - continue - - if not topology.services[required_service_id].components[required_component_id].hosts: - return True, f"No required {required_entity} for {owner}" - - return False, None - - -def is_constraint_requirements_unsatisfied( - topology: ClusterTopology, - component_prototype_map: dict[ComponentID, tuple[PrototypeID, ServiceID, PrototypeID]], - prototype_requirements: dict[PrototypeID, ComponentMappingRequirements | ServiceRequirements], - components_map: dict[ComponentID, ComponentExternalRequirement], -) -> tuple[bool, str | None]: - for component_id, (prototype_id, service_id, _) in component_prototype_map.items(): - requirements = prototype_requirements[prototype_id] - - if requirements.is_constraint_check_required and not requirements.constraint.is_met_for( - mapped_hosts=len(topology.services[service_id].components[component_id].hosts), - hosts_in_cluster=len(topology.hosts), - ): - return ( - True, - f"{str(components_map[component_id]).capitalize()} " - f"has unsatisfied constraint: {requirements.constraint.internal}", - ) - - return False, None - - -# constraint check functions - - -def check_equal_or_less(mapped_hosts: int, _: int, argument: int): - return mapped_hosts <= argument - - -def check_equal_or_greater(mapped_hosts: int, _: int, argument: int): - return mapped_hosts >= argument - - -def check_exact(mapped_hosts: int, _: int, argument: int): - return mapped_hosts == argument - - -def check_is_odd(mapped_hosts: int, _: int, allow_zero: bool = False): - if mapped_hosts == 0 and allow_zero: - return True - - return mapped_hosts % 2 == 1 - - -def check_on_all(mapped_hosts: int, hosts_in_cluster: int): - return mapped_hosts > 0 and mapped_hosts == hosts_in_cluster + *, + error_message_template: str = "{}", +) -> None: + issues = find_cluster_mapping_issues( + restrictions=mapping_restrictions, + named_mapping=named_mapping_from_topology(topology), + amount_of_hosts_in_cluster=len(topology.hosts), + ) + if issues: + issue_to_show = issues[0] + raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=error_message_template.format(issue_to_show.message)) diff --git a/python/cm/services/concern/distribution.py b/python/cm/services/concern/distribution.py index 2906e780e4..5faf3d21d6 100644 --- a/python/cm/services/concern/distribution.py +++ b/python/cm/services/concern/distribution.py @@ -276,6 +276,28 @@ def _find_concern_distribution_targets(owner: CoreObjectDescriptor) -> ConcernRe return targets +# PUBLIC lock/unlock multiple objects + + +def lock_objects(targets: Iterable[CoreObjectDescriptor], lock_id: ConcernID) -> None: + grouped_by_type = defaultdict(set) + for target in targets: + grouped_by_type[target.type].add(target.id) + + _add_concern_links_to_objects_in_db(targets=grouped_by_type, concern_id=lock_id) + + +def unlock_objects(targets: Iterable[CoreObjectDescriptor], lock_id: ConcernID) -> None: + grouped_by_type = defaultdict(set) + for target in targets: + grouped_by_type[target.type].add(target.id) + + _remove_concern_links_from_objects_in_db(targets=grouped_by_type, concern_id=lock_id) + + +# PROTECTED generic-purpose methods + + def _add_concern_links_to_objects_in_db(targets: ConcernRelatedObjects, concern_id: ConcernID) -> None: for core_type, ids in targets.items(): orm_model = core_type_to_model(core_type) @@ -288,7 +310,13 @@ def _add_concern_links_to_objects_in_db(targets: ConcernRelatedObjects, concern_ ) -# PROTECTED generic-purpose methods +def _remove_concern_links_from_objects_in_db(targets: ConcernRelatedObjects, concern_id: ConcernID) -> None: + for core_type, ids in targets.items(): + orm_model = core_type_to_model(core_type) + id_field = f"{orm_model.__name__.lower()}_id" + m2m_model = orm_model.concerns.through + + m2m_model.objects.filter(concernitem_id=concern_id, **{f"{id_field}__in": ids}).delete() def _get_own_concerns_of_objects( diff --git a/python/cm/services/concern/locks.py b/python/cm/services/concern/locks.py index 742a1aacdb..80b5b73adc 100644 --- a/python/cm/services/concern/locks.py +++ b/python/cm/services/concern/locks.py @@ -10,9 +10,22 @@ # See the License for the specific language governing permissions and # limitations under the License. +from core.types import CoreObjectDescriptor +from cm.converters import core_type_to_model from cm.models import Cluster, ClusterObject, ConcernItem, ConcernType, Host, HostProvider, ServiceComponent def get_lock_on_object(object_: Cluster | ClusterObject | ServiceComponent | HostProvider | Host) -> ConcernItem | None: return object_.concerns.filter(type=ConcernType.LOCK).first() + + +def retrieve_lock_on_object(object_: CoreObjectDescriptor) -> ConcernItem | None: + object_model = core_type_to_model(core_type=object_.type) + id_field = f"{object_model.__name__.lower()}_id" + + related_locks_qs = object_model.concerns.through.objects.filter( + concernitem__type=ConcernType.LOCK, **{id_field: object_.id} + ).values_list("concernitem_id", flat=True) + + return ConcernItem.objects.filter(id__in=related_locks_qs).first() diff --git a/python/cm/services/concern/repo.py b/python/cm/services/concern/repo.py new file mode 100644 index 0000000000..d3e3b868ab --- /dev/null +++ b/python/cm/services/concern/repo.py @@ -0,0 +1,82 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import defaultdict, deque + +from core.concern.checks import parse_constraint +from core.concern.types import ( + BundleRestrictions, + ComponentNameKey, + ComponentRestrictionOwner, + MappingRestrictions, + ServiceDependencies, + ServiceRestrictionOwner, +) +from core.types import BundleID + +from cm.models import ObjectType, Prototype + + +def retrieve_bundle_restrictions(bundle_id: BundleID) -> BundleRestrictions: + mapping_restrictions = MappingRestrictions(constraints={}, required=defaultdict(deque), binds={}) + service_requires: ServiceDependencies = defaultdict(set) + + for component_name, service_name, constraint, requires, bound_to in ( + Prototype.objects.select_related("parent") + .values_list("name", "parent__name", "constraint", "requires", "bound_to") + .filter(bundle_id=bundle_id, type=ObjectType.COMPONENT) + ): + key = ComponentRestrictionOwner(service=service_name, component=component_name) + + for requirement in requires: + # Requires that have `component` specified aren't the same with only service specified + # (regardless of restriction source): + # - "service-only" require presence of service in cluster, + # so it's enough to "add" required service. + # - ones with `component` key adds restriction on mapping, + # because such component should be mapped on at least one host. + required_service_name = requirement["service"] + if required_component_name := requirement.get("component"): + # "service" requirements aren't checked for mapping issue + mapping_restrictions.required[key].append( + ComponentNameKey(component=required_component_name, service=required_service_name) + ) + else: + service_requires[key].add(required_service_name) + + constraint = parse_constraint(constraint) + if constraint.checks: + mapping_restrictions.constraints[key] = constraint + + if bound_to: + mapping_restrictions.binds[key] = ComponentNameKey( + component=bound_to["component"], service=bound_to["service"] + ) + + for service_name, requires in Prototype.objects.values_list("name", "requires").filter( + bundle_id=bundle_id, type=ObjectType.SERVICE + ): + if not requires: + continue + + key = ServiceRestrictionOwner(name=service_name) + + for requirement in requires: + required_service_name = requirement["service"] + if component_name := requirement.get("component"): + mapping_restrictions.required[key].append( + ComponentNameKey(component=component_name, service=required_service_name) + ) + else: + service_requires[key].add(required_service_name) + + return BundleRestrictions(service_requires=service_requires, mapping=mapping_restrictions) diff --git a/python/cm/services/job/action.py b/python/cm/services/job/action.py index 7b161be548..8df947f60d 100644 --- a/python/cm/services/job/action.py +++ b/python/cm/services/job/action.py @@ -14,6 +14,8 @@ from functools import partial from typing import TypeAlias +from core.cluster.operations import create_topology_with_new_mapping, find_hosts_difference +from core.cluster.types import HostComponentEntry from core.job.dto import TaskPayloadDTO from core.types import ActionTargetDescriptor, CoreObjectDescriptor from django.conf import settings @@ -22,7 +24,7 @@ from cm.adcm_config.checks import check_attr from cm.adcm_config.config import check_config_spec, get_prototype_config, process_config_spec, process_file_type -from cm.api import get_hc, save_hc +from cm.api import get_hc from cm.converters import model_name_to_core_type, orm_object_to_action_target_type, orm_object_to_core_type from cm.errors import AdcmEx from cm.models import ( @@ -34,17 +36,23 @@ ConcernType, ConfigLog, Host, - HostComponent, HostProvider, JobStatus, ServiceComponent, TaskLog, ) +from cm.services.cluster import retrieve_cluster_topology +from cm.services.concern.repo import retrieve_bundle_restrictions from cm.services.config.spec import convert_to_flat_spec_from_proto_flat_spec -from cm.services.job.checks import check_constraints_for_upgrade, check_hostcomponentmap +from cm.services.job.checks import ( + HC_CONSTRAINT_VIOLATION_ON_UPGRADE_TEMPLATE, + check_hostcomponentmap, + check_mapping_restrictions, +) from cm.services.job.inventory._config import update_configuration_for_inventory_inplace from cm.services.job.prepare import prepare_task_for_action from cm.services.job.run import run_task +from cm.services.mapping import change_host_component_mapping, check_no_host_in_mm from cm.status_api import send_task_status_update_event from cm.variant import process_variant @@ -231,13 +239,26 @@ def _process_hostcomponent( # should be handled one level above raise AdcmEx(code="TASK_ERROR", msg="Only cluster objects can have action with hostcomponentmap") + # `check_hostcomponentmap` won't run checks in these conditions, because it's checking actions with `hc_acl`. + # But this code checks whether existing hostcomponent satisfies constraints from new bundle. if is_upgrade_action and not action.hostcomponentmap: - new_hc = [ - (entry.service, entry.host, entry.component) - for entry in HostComponent.objects.select_related("service", "component", "host").filter(cluster=cluster) - ] - - check_constraints_for_upgrade(cluster=cluster, upgrade=action.upgrade, host_comp_list=new_hc) + topology = retrieve_cluster_topology(cluster_id=cluster.id) + bundle_restrictions = retrieve_bundle_restrictions(bundle_id=int(action.upgrade.bundle_id)) + new_topology = create_topology_with_new_mapping( + topology=topology, + new_mapping=( + HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) + for entry in new_hostcomponent + ), + ) + check_mapping_restrictions( + mapping_restrictions=bundle_restrictions.mapping, + topology=new_topology, + error_message_template=HC_CONSTRAINT_VIOLATION_ON_UPGRADE_TEMPLATE, + ) + host_difference = find_hosts_difference(new_topology=new_topology, old_topology=topology) + check_no_host_in_mm(host_difference.mapped.all) + return None, [], {}, is_upgrade_action host_map, post_upgrade_hc, delta = check_hostcomponentmap(cluster=cluster, action=action, new_hc=new_hostcomponent) @@ -248,14 +269,21 @@ def _finish_task_preparation( task: TaskLog, owner: ObjectWithAction, cluster: Cluster | None, - host_map: list | None, + host_map: list[tuple[ClusterObject, Host, ServiceComponent]] | None, is_upgrade_action: bool, payload: ActionRunPayload, spec: dict, flat_spec: dict, ): if host_map or (is_upgrade_action and host_map is not None): - save_hc(cluster=cluster, host_comp_list=host_map) + change_host_component_mapping( + cluster_id=cluster.id, + bundle_id=cluster.prototype.bundle_id, + flat_mapping=( + HostComponentEntry(host_id=host.id, component_id=component.id) for (_, host, component) in host_map + ), + skip_checks=True, + ) if payload.conf: new_conf = update_configuration_for_inventory_inplace( diff --git a/python/cm/services/job/checks.py b/python/cm/services/job/checks.py index facf4e06a1..74a3533051 100644 --- a/python/cm/services/job/checks.py +++ b/python/cm/services/job/checks.py @@ -12,22 +12,37 @@ import copy -from cm.api import check_hc, check_maintenance_mode, check_sub_key, get_hc, make_host_comp_list +from core.cluster.operations import create_topology_with_new_mapping, find_hosts_difference +from core.cluster.types import ClusterTopology, HostComponentEntry +from rest_framework.status import HTTP_409_CONFLICT + from cm.errors import AdcmEx -from cm.issue import check_component_constraint, check_service_requires -from cm.models import Action, Cluster, ClusterObject, ConcernType, Host, Prototype, ServiceComponent -from cm.services.concern.checks import ( - extract_data_for_requirements_check, - is_bound_to_requirements_unsatisfied, - is_requires_requirements_unsatisfied, +from cm.models import ( + Action, + Cluster, + ClusterObject, + ConcernType, + Host, + Prototype, + ServiceComponent, ) +from cm.services.cluster import retrieve_cluster_topology +from cm.services.concern.checks import check_mapping_restrictions +from cm.services.concern.repo import retrieve_bundle_restrictions from cm.services.job._utils import cook_delta, get_old_hc from cm.services.job.types import HcAclAction +from cm.services.mapping import check_no_host_in_mm + +HC_CONSTRAINT_VIOLATION_ON_UPGRADE_TEMPLATE = ( + "Host-component map of upgraded cluster should satisfy constraints of new bundle. Now error is: {}" +) def check_hostcomponentmap( cluster: Cluster | None, action: Action, new_hc: list[dict] ) -> tuple[list[tuple[ClusterObject, Host, ServiceComponent]] | None, list, dict[str, dict]]: + from cm.api import check_sub_key, get_hc, make_host_comp_list + if not action.hostcomponentmap: return None, [], {} @@ -47,69 +62,50 @@ def check_hostcomponentmap( raise AdcmEx(code="ISSUE_INTEGRITY_ERROR", msg=f"object {host} has issues") post_upgrade_hc, clear_hc = _check_upgrade_hc(action=action, new_hc=new_hc) + check_sub_key(hc_in=clear_hc) old_hc = get_old_hc(saved_hostcomponent=get_hc(cluster=cluster)) + new_entries = tuple( + HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) for entry in clear_hc + ) + + # todo most likely this topology should be created somewhere above and passed in here as argument + topology = retrieve_cluster_topology(cluster_id=cluster.id) + _check_entries_are_related_to_topology(topology=topology, entries=new_entries) + new_topology = create_topology_with_new_mapping( + topology=topology, + new_mapping=( + HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) for entry in clear_hc + ), + ) + host_difference = find_hosts_difference(new_topology=new_topology, old_topology=topology) + check_no_host_in_mm(host_difference.mapped.all) + if not hasattr(action, "upgrade"): - prepared_hc_list = check_hc(cluster=cluster, hc_in=clear_hc) + bundle_restrictions = retrieve_bundle_restrictions(bundle_id=int(cluster.prototype.bundle_id)) + check_mapping_restrictions(mapping_restrictions=bundle_restrictions.mapping, topology=new_topology) + else: - check_sub_key(hc_in=clear_hc) - prepared_hc_list = make_host_comp_list(cluster=cluster, hc_in=clear_hc) - check_constraints_for_upgrade(cluster=cluster, upgrade=action.upgrade, host_comp_list=prepared_hc_list) + bundle_restrictions = retrieve_bundle_restrictions(bundle_id=int(action.upgrade.bundle_id)) + check_mapping_restrictions( + mapping_restrictions=bundle_restrictions.mapping, + topology=new_topology, + error_message_template=HC_CONSTRAINT_VIOLATION_ON_UPGRADE_TEMPLATE, + ) + + prepared_hc_list = make_host_comp_list(cluster=cluster, hc_in=clear_hc) delta = cook_delta(cluster=cluster, new_hc=prepared_hc_list, action_hc=action.hostcomponentmap, old=old_hc) return prepared_hc_list, post_upgrade_hc, delta -def check_constraints_for_upgrade(cluster, upgrade, host_comp_list): - try: - for service in ClusterObject.objects.filter(cluster=cluster): - try: - prototype = Prototype.objects.get(name=service.name, type="service", bundle=upgrade.bundle) - check_component_constraint( - cluster=cluster, - service_prototype=prototype, - hc_in=[i for i in host_comp_list if i[0] == service], - old_bundle=cluster.prototype.bundle, - ) - check_service_requires(cluster=cluster, proto=prototype) - except Prototype.DoesNotExist: - pass - - requirements_data = extract_data_for_requirements_check( - cluster=cluster, - input_mapping=[ - {"host_id": host.id, "component_id": component.id, "service_id": service.id} - for service, host, component in host_comp_list - ], - ) - requires_not_ok, error_message = is_requires_requirements_unsatisfied( - topology=requirements_data.topology, - component_prototype_map=requirements_data.component_prototype_map, - prototype_requirements=requirements_data.prototype_requirements, - existing_objects_map=requirements_data.existing_objects_map, - existing_objects_by_type=requirements_data.objects_map_by_type, - ) - if requires_not_ok and error_message is not None: - raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=error_message) - - bound_not_ok, error_msg = is_bound_to_requirements_unsatisfied( - topology=requirements_data.topology, - component_prototype_map=requirements_data.component_prototype_map, - prototype_requirements=requirements_data.prototype_requirements, - existing_objects_map=requirements_data.existing_objects_map, - ) - if bound_not_ok: - raise AdcmEx(code="COMPONENT_CONSTRAINT_ERROR", msg=error_msg) - check_maintenance_mode(cluster=cluster, host_comp_list=host_comp_list) - except AdcmEx as e: - if e.code == "COMPONENT_CONSTRAINT_ERROR": - e.msg = ( - f"Host-component map of upgraded cluster should satisfy " - f"constraints of new bundle. Now error is: {e.msg}" - ) +def _check_entries_are_related_to_topology(topology: ClusterTopology, entries: tuple[HostComponentEntry, ...]) -> None: + if not {entry.host_id for entry in entries}.issubset(topology.hosts): + raise AdcmEx(code="FOREIGN_HOST", http_code=HTTP_409_CONFLICT) - raise AdcmEx(code=e.code, msg=e.msg) from e + if not {entry.component_id for entry in entries}.issubset(topology.component_ids): + raise AdcmEx(code="COMPONENT_NOT_FOUND", http_code=HTTP_409_CONFLICT) def _check_upgrade_hc(action, new_hc): diff --git a/python/cm/services/job/inventory/_base.py b/python/cm/services/job/inventory/_base.py index a66588a8df..a82c02a36f 100644 --- a/python/cm/services/job/inventory/_base.py +++ b/python/cm/services/job/inventory/_base.py @@ -40,7 +40,10 @@ Prototype, ServiceComponent, ) -from cm.services.cluster import retrieve_clusters_objects_maintenance_mode, retrieve_clusters_topology +from cm.services.cluster import ( + retrieve_cluster_topology, + retrieve_clusters_objects_maintenance_mode, +) from cm.services.group_config import GroupConfigName, retrieve_group_configs_for_hosts from cm.services.job.inventory._before_upgrade import extract_objects_before_upgrade, get_before_upgrades from cm.services.job.inventory._config import ( @@ -158,7 +161,7 @@ def _get_inventory_for_action_from_cluster_bundle( if target_hosts: host_groups["target"] = set(target_hosts) - cluster_topology = next(retrieve_clusters_topology([cluster_id])) + cluster_topology = retrieve_cluster_topology(cluster_id) hosts_in_maintenance_mode: set[int] = set( Host.objects.filter(maintenance_mode=MaintenanceMode.ON).values_list("id", flat=True) diff --git a/python/cm/services/job/run/_target_factories.py b/python/cm/services/job/run/_target_factories.py index 800dea3bdd..51d0834e0c 100644 --- a/python/cm/services/job/run/_target_factories.py +++ b/python/cm/services/job/run/_target_factories.py @@ -17,6 +17,7 @@ import json from ansible_plugin.utils import finish_check +from core.cluster.types import HostComponentEntry from core.job.executors import BundleExecutorConfig, ExecutorConfig from core.job.runners import ExecutionTarget, ExternalSettings from core.job.types import Job, ScriptType, Task @@ -25,7 +26,7 @@ from django.db.transaction import atomic from rbac.roles import re_apply_policy_for_jobs -from cm.api import get_hc, save_hc +from cm.api import get_hc from cm.models import ( AnsibleConfig, Cluster, @@ -54,6 +55,7 @@ JobEnv, ServiceActionType, ) +from cm.services.mapping import change_host_component_mapping from cm.status_api import send_prototype_and_state_update_event from cm.utils import deep_merge @@ -178,7 +180,7 @@ def _switch_hc_if_required(task: TaskLog): task.hostcomponentmap = old_hc task.post_upgrade_hc_map = None - task.save() + task.save(update_fields=["hostcomponentmap", "post_upgrade_hc_map"]) for hostcomponent in new_hc: if "component_prototype_id" in hostcomponent: @@ -189,7 +191,14 @@ def _switch_hc_if_required(task: TaskLog): host_map, *_ = check_hostcomponentmap(cluster, task.action, new_hc) if host_map is not None: - save_hc(cluster, host_map) + change_host_component_mapping( + cluster_id=cluster.id, + bundle_id=cluster.bundle_id, + flat_mapping=( + HostComponentEntry(host_id=host.id, component_id=component.id) for (_, host, component) in host_map + ), + skip_checks=True, + ) # ENVIRONMENT BUILDERS diff --git a/python/cm/services/job/run/_task_finalizers.py b/python/cm/services/job/run/_task_finalizers.py index e927d2545a..6c7170cbe5 100644 --- a/python/cm/services/job/run/_task_finalizers.py +++ b/python/cm/services/job/run/_task_finalizers.py @@ -11,27 +11,24 @@ # limitations under the License. from logging import Logger -from operator import itemgetter from typing import Protocol +from core.cluster.types import HostComponentEntry from core.job.types import Task from core.types import ADCMCoreType, CoreObjectDescriptor from django.conf import settings -from cm.api import save_hc from cm.converters import core_type_to_model from cm.issue import unlock_affected_objects, update_hierarchy_issues from cm.models import ( ActionHostGroup, - ClusterObject, - Host, JobLog, MaintenanceMode, - ServiceComponent, TaskLog, get_object_cluster, ) from cm.services.concern.messages import ConcernMessage, PlaceholderObjectsDTO, build_concern_reason +from cm.services.mapping import change_host_component_mapping from cm.status_api import send_object_update_event # todo "unwrap" these functions to use repo without directly calling ORM, @@ -67,27 +64,17 @@ def set_hostcomponent(task: Task, logger: Logger): return - new_hostcomponent = task.hostcomponent.saved - hosts = { - entry.pk: entry for entry in Host.objects.filter(id__in=set(map(itemgetter("host_id"), new_hostcomponent))) - } - services = { - entry.pk: entry - for entry in ClusterObject.objects.filter(id__in=set(map(itemgetter("service_id"), new_hostcomponent))) - } - components = { - entry.pk: entry - for entry in ServiceComponent.objects.filter(id__in=set(map(itemgetter("component_id"), new_hostcomponent))) - } - - host_comp_list = [ - (services[entry["service_id"]], hosts[entry["host_id"]], components[entry["component_id"]]) - for entry in new_hostcomponent - ] - logger.warning("task #%s is failed, restore old hc", task.id) - save_hc(cluster, host_comp_list) + change_host_component_mapping( + cluster_id=cluster.id, + bundle_id=cluster.prototype.bundle_id, + flat_mapping=( + HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) + for entry in task.hostcomponent.saved + ), + skip_checks=True, + ) def remove_task_lock(task_id: int) -> None: diff --git a/python/cm/services/mapping.py b/python/cm/services/mapping.py new file mode 100644 index 0000000000..57703aa0d8 --- /dev/null +++ b/python/cm/services/mapping.py @@ -0,0 +1,177 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Iterable + +from core.cluster.operations import create_topology_with_new_mapping, find_hosts_difference +from core.cluster.types import ClusterTopology, HostComponentEntry +from core.types import ADCMCoreType, BundleID, ClusterID, CoreObjectDescriptor, HostID +from django.contrib.contenttypes.models import ContentType +from django.db.transaction import atomic +from rbac.models import Policy +from rest_framework.status import HTTP_409_CONFLICT + +from cm.errors import AdcmEx +from cm.models import Cluster, ClusterObject, ConcernCause, Host, HostComponent, MaintenanceMode +from cm.services.action_host_group import ActionHostGroupRepo +from cm.services.cluster import retrieve_cluster_topology +from cm.services.concern import create_issue, delete_issue, retrieve_issue +from cm.services.concern.checks import ( + check_mapping_restrictions, + check_service_requirements, + cluster_mapping_has_issue, +) +from cm.services.concern.distribution import lock_objects, redistribute_issues_and_flags, unlock_objects +from cm.services.concern.locks import retrieve_lock_on_object +from cm.services.concern.repo import BundleRestrictions, retrieve_bundle_restrictions +from cm.services.group_config import ConfigHostGroupRepo +from cm.services.status.notify import reset_hc_map, reset_objects_in_mm +from cm.status_api import send_host_component_map_update_event + + +def change_host_component_mapping( + cluster_id: ClusterID, bundle_id: BundleID, flat_mapping: Iterable[HostComponentEntry], *, skip_checks: bool = False +) -> ClusterTopology: + # force remove duplicates + new_mapping_entries = set(flat_mapping) + + with atomic(): + # prepare + current_topology = retrieve_cluster_topology(cluster_id=cluster_id) + new_topology = _construct_new_topology_or_raise_on_invalid_input( + base_topology=current_topology, new_entries=new_mapping_entries + ) + host_difference = find_hosts_difference(new_topology=new_topology, old_topology=current_topology) + bundle_restrictions = retrieve_bundle_restrictions(bundle_id=bundle_id) + + # business checks + + # sometimes it's required to skip checks (e.g. in plugin calls) + if not skip_checks: + check_service_requirements( + services_restrictions=bundle_restrictions.service_requires, topology=new_topology + ) + check_mapping_restrictions(mapping_restrictions=bundle_restrictions.mapping, topology=new_topology) + check_no_host_in_mm(host_difference.mapped.all) + + # save + _recreate_mapping_in_db(topology=new_topology) + + # updates of related entities + _update_concerns( + old_topology=current_topology, new_topology=new_topology, bundle_restrictions=bundle_restrictions + ) + ActionHostGroupRepo().remove_unmapped_hosts_from_groups(host_difference.unmapped) + ConfigHostGroupRepo().remove_unmapped_hosts_from_groups(host_difference.unmapped) + _update_policies(topology=new_topology) + + # update info in statistics service + reset_hc_map() + reset_objects_in_mm() + send_host_component_map_update_event(cluster_id=cluster_id) + + return new_topology + + +def check_no_host_in_mm(hosts: Iterable[HostID]) -> None: + if Host.objects.filter(id__in=hosts).exclude(maintenance_mode=MaintenanceMode.OFF).exists(): + raise AdcmEx("INVALID_HC_HOST_IN_MM") + + +def _construct_new_topology_or_raise_on_invalid_input( + base_topology: ClusterTopology, new_entries: set[HostComponentEntry] +) -> ClusterTopology: + cluster_id = base_topology.cluster_id + + unrelated_components = {entry.component_id for entry in new_entries}.difference(base_topology.component_ids) + if unrelated_components: + cluster_name = Cluster.objects.values_list("name", flat=True).get(id=cluster_id) + ids_repr = ", ".join(f'"{component_id}"' for component_id in unrelated_components) + raise AdcmEx( + code="COMPONENT_NOT_FOUND", + http_code=HTTP_409_CONFLICT, + msg=f'Component(s) {ids_repr} do not belong to cluster "{cluster_name}"', + ) from None + + unbound_hosts = {entry.host_id for entry in new_entries}.difference(base_topology.hosts) + if unbound_hosts: + cluster_name = Cluster.objects.values_list("name", flat=True).get(id=cluster_id) + ids_repr = ", ".join(f'"{host_id}"' for host_id in sorted(unbound_hosts)) + raise AdcmEx( + code="HOST_NOT_FOUND", + http_code=HTTP_409_CONFLICT, + msg=f'Host(s) {ids_repr} do not belong to cluster "{cluster_name}"', + ) + + return create_topology_with_new_mapping(topology=base_topology, new_mapping=new_entries) + + +def _recreate_mapping_in_db(topology: ClusterTopology) -> None: + cluster_id = topology.cluster_id + HostComponent.objects.filter(cluster_id=cluster_id).delete() + HostComponent.objects.bulk_create( + ( + HostComponent(cluster_id=cluster_id, service_id=service_id, component_id=component_id, host_id=host_id) + for service_id, service in topology.services.items() + for component_id, component in service.components.items() + for host_id in component.hosts + ) + ) + + +def _update_concerns( + old_topology: ClusterTopology, new_topology: ClusterTopology, bundle_restrictions: BundleRestrictions +) -> None: + # todo HC may break (?) + # We can't be sure this method is called after some sort of "check" + cluster = CoreObjectDescriptor(id=old_topology.cluster_id, type=ADCMCoreType.CLUSTER) + if not cluster_mapping_has_issue(cluster_id=cluster.id, bundle_restrictions=bundle_restrictions): + delete_issue(owner=cluster, cause=ConcernCause.HOSTCOMPONENT) + elif retrieve_issue(owner=cluster, cause=ConcernCause.HOSTCOMPONENT) is None: + create_issue(owner=cluster, cause=ConcernCause.HOSTCOMPONENT) + + redistribute_issues_and_flags(topology=new_topology) + + lock = retrieve_lock_on_object(object_=cluster) + if lock: + # Here we want to add locks on hosts that weren't mapped before, but are mapped now. + # And remove from those that aren't mapped to any component anymore. + unmapped_in_previous_topology = old_topology.unmapped_hosts + unmapped_in_new_topology = new_topology.unmapped_hosts + + unmapped = unmapped_in_new_topology - unmapped_in_previous_topology + if unmapped: + unlock_objects( + targets=(CoreObjectDescriptor(id=host_id, type=ADCMCoreType.HOST) for host_id in unmapped), + lock_id=lock.id, + ) + + mapped = unmapped_in_previous_topology - unmapped_in_new_topology + if mapped: + lock_objects( + targets=(CoreObjectDescriptor(id=host_id, type=ADCMCoreType.HOST) for host_id in mapped), + lock_id=lock.id, + ) + + +def _update_policies(topology: ClusterTopology) -> None: + service_content_type = ContentType.objects.get_for_model(model=ClusterObject) + for policy in Policy.objects.filter( + object__object_id__in=topology.services.keys(), object__content_type=service_content_type + ): + policy.apply() + + for policy in Policy.objects.filter( + object__object_id=topology.cluster_id, + object__content_type=ContentType.objects.get_for_model(model=Cluster), + ): + policy.apply() diff --git a/python/cm/services/status/notify.py b/python/cm/services/status/notify.py index b14ddd78d7..1f9bcdf62d 100644 --- a/python/cm/services/status/notify.py +++ b/python/cm/services/status/notify.py @@ -20,7 +20,7 @@ from cm.models import Cluster, ClusterObject, Host, HostComponent, ServiceComponent from cm.services.cluster import ( retrieve_clusters_objects_maintenance_mode, - retrieve_clusters_topology, + retrieve_multiple_clusters_topology, ) from cm.status_api import api_request @@ -74,7 +74,7 @@ def reset_objects_in_mm() -> Response | None: mm_info = retrieve_clusters_objects_maintenance_mode(cluster_ids=cluster_ids) - for topology in retrieve_clusters_topology(cluster_ids=cluster_ids): + for topology in retrieve_multiple_clusters_topology(cluster_ids=cluster_ids): cluster_objects_mm = calculate_maintenance_mode_for_cluster_objects( topology=topology, own_maintenance_mode=mm_info ) diff --git a/python/cm/status_api.py b/python/cm/status_api.py index 5e54ebb493..478219164b 100644 --- a/python/cm/status_api.py +++ b/python/cm/status_api.py @@ -15,7 +15,7 @@ from urllib.parse import urljoin import json -from core.types import CoreObjectDescriptor +from core.types import ClusterID, CoreObjectDescriptor from django.conf import settings from requests import Response from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED @@ -110,8 +110,8 @@ def send_delete_service_event(service_id: int) -> Response | None: ) -def send_host_component_map_update_event(cluster: Cluster) -> None: - post_event(event=EventTypes.UPDATE_HOSTCOMPONENTMAP, object_id=cluster.pk) +def send_host_component_map_update_event(cluster_id: ClusterID) -> None: + post_event(event=EventTypes.UPDATE_HOSTCOMPONENTMAP, object_id=cluster_id) def send_config_creation_event(object_: ADCMEntity) -> None: diff --git a/python/cm/tests/test_action.py b/python/cm/tests/test_action.py index 8cc202ab51..c0549ede38 100644 --- a/python/cm/tests/test_action.py +++ b/python/cm/tests/test_action.py @@ -21,7 +21,7 @@ from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED, HTTP_409_CONFLICT -from cm.api import add_hc, add_service_to_cluster +from cm.api import add_service_to_cluster from cm.models import Action, MaintenanceMode, Prototype, ServiceComponent from cm.services.job.run._target_factories import prepare_ansible_environment from cm.services.job.run.repo import JobRepoImpl @@ -144,7 +144,7 @@ } -class ActionAllowTest(BaseTestCase): +class ActionAllowTest(BusinessLogicMixin, BaseTestCase): def setUp(self) -> None: super().setUp() self.test_files_dir = self.base_dir / "python" / "cm" / "tests" / "files" @@ -152,7 +152,7 @@ def setUp(self) -> None: _, self.cluster, _ = self.upload_bundle_create_cluster_config_log( bundle_path=Path(self.test_files_dir, "cluster_test_host_actions_mm.tar"), cluster_name="test-cluster-1" ) - service = add_service_to_cluster( + add_service_to_cluster( cluster=self.cluster, proto=Prototype.objects.get(name="service_1", display_name="Service 1", type="service"), ) @@ -169,13 +169,13 @@ def setUp(self) -> None: cluster=self.cluster, prototype__name="component_2", prototype__display_name="Component 2 from Service 1" ) - add_hc( + self.set_hostcomponent( cluster=self.cluster, - hc_in=[ - {"host_id": self.host_1.pk, "service_id": service.pk, "component_id": component_1.pk}, - {"host_id": self.host_2.pk, "service_id": service.pk, "component_id": component_1.pk}, - {"host_id": self.host_2.pk, "service_id": service.pk, "component_id": component_2.pk}, - {"host_id": self.host_3.pk, "service_id": service.pk, "component_id": component_2.pk}, + entries=[ + (self.host_1, component_1), + (self.host_2, component_1), + (self.host_2, component_2), + (self.host_3, component_2), ], ) diff --git a/python/cm/tests/test_cluster.py b/python/cm/tests/test_cluster.py index bfba307ddd..d28a7289dd 100644 --- a/python/cm/tests/test_cluster.py +++ b/python/cm/tests/test_cluster.py @@ -19,7 +19,7 @@ from rest_framework import status from cm.models import Bundle, Cluster, Prototype -from cm.services.cluster import retrieve_clusters_topology +from cm.services.cluster import retrieve_multiple_clusters_topology from cm.tests.utils import gen_component, gen_host, gen_service, generate_hierarchy @@ -207,12 +207,12 @@ def test_retrieve_cluster_topology_success(self) -> None: ) with self.assertNumQueries(num=5): - actual_topology = next(retrieve_clusters_topology(cluster_ids=[cluster.pk])) + actual_topology = next(retrieve_multiple_clusters_topology(cluster_ids=[cluster.pk])) self.assertEqual(actual_topology, expected_topology) second_cluster = generate_hierarchy()["cluster"] with self.assertNumQueries(num=5): - result = tuple(retrieve_clusters_topology(cluster_ids=[cluster.pk, second_cluster.pk])) + result = tuple(retrieve_multiple_clusters_topology(cluster_ids=[cluster.pk, second_cluster.pk])) self.assertSetEqual({entry.cluster_id for entry in result}, {cluster.pk, second_cluster.pk}) diff --git a/python/cm/tests/test_hc.py b/python/cm/tests/test_hc.py index 51dfca614e..a1ec3ae50e 100644 --- a/python/cm/tests/test_hc.py +++ b/python/cm/tests/test_hc.py @@ -18,7 +18,7 @@ from rest_framework.response import Response from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED -from cm.api import add_host_to_cluster, save_hc +from cm.api import add_host_to_cluster from cm.errors import AdcmEx from cm.models import Action, Bundle, ClusterObject, Host, Prototype, ServiceComponent from cm.services.job.checks import check_hostcomponentmap @@ -111,7 +111,13 @@ def test_action_hc(self): self.assertNotEqual(hc_list, None) - save_hc(cluster, hc_list) + self.set_hostcomponent( + cluster=cluster, + entries=[ + (Host.objects.get(id=entry["host_id"]), ServiceComponent.objects.get(id=entry["component_id"])) + for entry in hostcomponent + ], + ) act_hc = [{"service": "hadoop", "component": "server", "action": "remove"}] action = Action(name="run", hostcomponentmap=act_hc) hostcomponent = [ diff --git a/python/cm/tests/test_inventory/test_inventory.py b/python/cm/tests/test_inventory/test_inventory.py index e3a8f82090..6a376b6813 100644 --- a/python/cm/tests/test_inventory/test_inventory.py +++ b/python/cm/tests/test_inventory/test_inventory.py @@ -13,11 +13,11 @@ from pathlib import Path -from adcm.tests.base import BaseTestCase +from adcm.tests.base import BaseTestCase, BusinessLogicMixin from core.types import CoreObjectDescriptor from init_db import init as init_adcm -from cm.api import add_hc, add_service_to_cluster, update_obj_config +from cm.api import add_service_to_cluster, update_obj_config from cm.converters import model_name_to_core_type from cm.models import ( Action, @@ -145,7 +145,7 @@ def test_prepare_job_inventory(self): self.assertDictEqual(actual_data, inv) -class TestInventoryAndMaintenanceMode(BaseTestCase): +class TestInventoryAndMaintenanceMode(BusinessLogicMixin, BaseTestCase): def setUp(self): super().setUp() init_adcm() @@ -207,9 +207,12 @@ def setUp(self): "component_id": self.component_hc_acl_2.pk, } - add_hc( + self.set_hostcomponent( cluster=self.cluster_hc_acl, - hc_in=[self.hc_c1_h1, self.hc_c1_h2, self.hc_c1_h3, self.hc_c2_h1, self.hc_c2_h2], + entries=( + (Host.objects.get(id=entry["host_id"]), ServiceComponent.objects.get(id=entry["component_id"])) + for entry in (self.hc_c1_h1, self.hc_c1_h2, self.hc_c1_h3, self.hc_c2_h1, self.hc_c2_h2) + ), ) self.action_hc_acl = Action.objects.get(name="cluster_action_hc_acl", allow_in_maintenance_mode=True) @@ -240,19 +243,11 @@ def setUp(self): cluster=self.cluster_target_group, prototype__name="component_1_target_group" ) - add_hc( + self.set_hostcomponent( cluster=self.cluster_target_group, - hc_in=[ - { - "host_id": self.host_target_group_1.pk, - "service_id": self.service_target_group.pk, - "component_id": self.component_target_group.pk, - }, - { - "host_id": self.host_target_group_2.pk, - "service_id": self.service_target_group.pk, - "component_id": self.component_target_group.pk, - }, + entries=[ + (self.host_target_group_1, self.component_target_group), + (self.host_target_group_2, self.component_target_group), ], ) diff --git a/python/cm/tests/test_requires.py b/python/cm/tests/test_requires.py index 3ce369d16c..ca825cff17 100644 --- a/python/cm/tests/test_requires.py +++ b/python/cm/tests/test_requires.py @@ -11,8 +11,9 @@ # limitations under the License. from adcm.tests.base import BaseTestCase +from core.cluster.types import HostComponentEntry -from cm.api import add_hc, add_service_to_cluster +from cm.api import add_service_to_cluster from cm.errors import AdcmEx from cm.issue import update_hierarchy_issues from cm.models import ( @@ -24,6 +25,7 @@ Prototype, ServiceComponent, ) +from cm.services.mapping import change_host_component_mapping class TestComponent(BaseTestCase): @@ -76,15 +78,20 @@ def test_requires_hc(self): with self.assertRaisesRegex( AdcmEx, 'No required service "service_2" for component "component_1_1" of service "service_1"' ): - add_hc(self.cluster, [{"host_id": host.id, "service_id": service_1.id, "component_id": component_1.id}]) + change_host_component_mapping( + cluster_id=self.cluster.id, + bundle_id=self.cluster.bundle_id, + flat_mapping=(HostComponentEntry(host_id=host.id, component_id=component_1.id),), + ) def test_service_requires_issue(self): service_2 = ClusterObject.objects.create(prototype=self.service_proto_2, cluster=self.cluster) update_hierarchy_issues(obj=self.cluster) concerns = service_2.concerns.all() - self.assertEqual(len(concerns), 1) - self.assertEqual(concerns.first().cause, ConcernCause.REQUIREMENT) + # todo is it ok that now concern is both on cluster (mapping) and service (requires)? + self.assertEqual(len(concerns), 2) + requirement_concern = concerns.get(cause=ConcernCause.REQUIREMENT) self.assertIn( "${source} has an issue with requirement. Need to be installed: ${target}", - concerns.first().reason.values(), + requirement_concern.reason.values(), ) diff --git a/python/cm/tests/test_upgrade.py b/python/cm/tests/test_upgrade.py index 165e7669ca..4afe085d9e 100644 --- a/python/cm/tests/test_upgrade.py +++ b/python/cm/tests/test_upgrade.py @@ -10,12 +10,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from adcm.tests.base import BaseTestCase +from adcm.tests.base import BaseTestCase, BusinessLogicMixin from cm.adcm_config.config import save_object_config, switch_config from cm.api import ( add_cluster, - add_hc, add_host, add_host_provider, add_host_to_cluster, @@ -406,7 +405,7 @@ def test_non_active_group(self): self.assertEqual(new_attr, {"advance": {"active": False}}) -class TestUpgrade(BaseTestCase): +class TestUpgrade(BusinessLogicMixin, BaseTestCase): def test_upgrade_with_license(self): bundle_1 = cook_cluster_bundle("1.0") bundle_2 = cook_cluster_bundle("2.0") @@ -480,11 +479,7 @@ def test_hc(self): add_host_to_cluster(cluster, host_1) add_host_to_cluster(cluster, host_2) - host_component = [ - {"service_id": service.id, "host_id": host_1.id, "component_id": service_component_1.id}, - {"service_id": service.id, "host_id": host_2.id, "component_id": service_component_2.id}, - ] - add_hc(cluster, host_component) + self.set_hostcomponent(cluster=cluster, entries=[(host_1, service_component_1), (host_2, service_component_2)]) host_component_1 = HostComponent.objects.get(cluster=cluster, service=service, component=service_component_2) self.assertEqual(host_component_1.component.id, service_component_2.id) diff --git a/python/cm/upgrade.py b/python/cm/upgrade.py index 615baaeaf7..4d44f1797c 100644 --- a/python/cm/upgrade.py +++ b/python/cm/upgrade.py @@ -16,6 +16,7 @@ import functools from adcm_version import compare_prototype_versions +from core.cluster.types import HostComponentEntry from core.types import ADCMCoreType, ClusterID, CoreObjectDescriptor from django.contrib.contenttypes.models import ContentType from django.db import transaction @@ -33,7 +34,6 @@ add_service_to_cluster, check_license, is_version_suitable, - save_hc, ) from cm.converters import orm_object_to_core_type from cm.errors import AdcmEx @@ -59,7 +59,7 @@ ServiceComponent, Upgrade, ) -from cm.services.cluster import retrieve_clusters_topology +from cm.services.cluster import retrieve_cluster_topology, retrieve_multiple_clusters_topology from cm.services.concern import create_issue, retrieve_issue from cm.services.concern.cases import ( recalculate_concerns_on_cluster_upgrade, @@ -67,6 +67,7 @@ from cm.services.concern.checks import object_configuration_has_issue from cm.services.concern.distribution import distribute_concern_on_related_objects, redistribute_issues_and_flags from cm.services.job.action import ActionRunPayload, run_action +from cm.services.mapping import change_host_component_mapping from cm.status_api import send_prototype_and_state_update_event from cm.utils import obj_ref @@ -244,7 +245,15 @@ def bundle_revert(obj: Cluster | HostProvider) -> None: ) host_comp_list.append((service, host, component)) - save_hc(cluster=obj, host_comp_list=host_comp_list) + change_host_component_mapping( + cluster_id=obj.id, + bundle_id=old_proto.bundle_id, + flat_mapping=( + HostComponentEntry(host_id=host.id, component_id=component.id) + for (_, host, component) in host_comp_list + ), + skip_checks=True, + ) if isinstance(obj, HostProvider): for host in Host.objects.filter(provider=obj): @@ -543,7 +552,7 @@ def _upgrade_children(self, old_prototype: Prototype, new_prototype: Prototype) def _update_concerns(self) -> None: recalculate_concerns_on_cluster_upgrade(cluster=self._target) - redistribute_issues_and_flags(topology=next(retrieve_clusters_topology((self._target.id,)))) + redistribute_issues_and_flags(topology=retrieve_cluster_topology(self._target.id)) def _get_objects_map_for_policy_update(self) -> dict[Cluster | ClusterObject | ServiceComponent, ContentType]: obj_type_map = {self._target: ContentType.objects.get_for_model(Cluster)} @@ -601,7 +610,7 @@ def _update_concerns(self) -> None: clusters_for_redistribution -= {None} if clusters_for_redistribution: - for topology in retrieve_clusters_topology(cluster_ids=clusters_for_redistribution): + for topology in retrieve_multiple_clusters_topology(cluster_ids=clusters_for_redistribution): redistribute_issues_and_flags(topology=topology) def _get_objects_map_for_policy_update(self) -> dict[HostProvider | Host, ContentType]: diff --git a/python/core/cluster/operations.py b/python/core/cluster/operations.py index d9406950a0..93085296ef 100644 --- a/python/core/cluster/operations.py +++ b/python/core/cluster/operations.py @@ -11,6 +11,7 @@ # limitations under the License. from collections import defaultdict +from copy import copy from typing import Any, Collection, Generator, Iterable, Protocol from core.cluster.rules import ( @@ -101,6 +102,36 @@ def build_clusters_topology( ) +def create_topology_with_new_mapping( + topology: ClusterTopology, new_mapping: Iterable[HostComponentEntry] +) -> ClusterTopology: + """ + If we assume that all objects from "new_mapping" are presented in topology, + then we can create new topology based on that input without additional information. + """ + mapping: dict[ComponentID, set[HostID]] = defaultdict(set) + for entry in new_mapping: + mapping[entry.component_id].add(entry.host_id) + + return ClusterTopology( + cluster_id=topology.cluster_id, + hosts=copy(topology.hosts), + services={ + service_id: ServiceTopology( + info=service.info, + components={ + component_id: ComponentTopology( + info=component.info, + hosts={host_id: topology.hosts[host_id] for host_id in mapping.get(component_id, ())}, + ) + for component_id, component in service.components.items() + }, + ) + for service_id, service in topology.services.items() + }, + ) + + def calculate_maintenance_mode_for_cluster_objects( topology: ClusterTopology, own_maintenance_mode: MaintenanceModeOfObjects ) -> MaintenanceModeOfObjects: diff --git a/python/core/cluster/types.py b/python/core/cluster/types.py index 6193baad18..559f66b8ea 100644 --- a/python/core/cluster/types.py +++ b/python/core/cluster/types.py @@ -14,9 +14,9 @@ from dataclasses import dataclass, field from enum import Enum from itertools import chain -from typing import Generator, NamedTuple +from typing import Generator, NamedTuple, TypeAlias -from core.types import ClusterID, ComponentID, HostID, ServiceID, ShortObjectInfo +from core.types import ClusterID, ComponentID, ComponentName, HostID, ServiceID, ServiceName, ShortObjectInfo class HostClusterPair(NamedTuple): @@ -29,6 +29,11 @@ class HostComponentEntry(NamedTuple): component_id: ComponentID +# Topology + +NamedMapping: TypeAlias = dict[ServiceName, dict[ComponentName, set[HostID]]] + + class ComponentTopology(NamedTuple): info: ShortObjectInfo hosts: dict[HostID, ShortObjectInfo] @@ -78,6 +83,10 @@ class MovedHosts: services: NoEmptyValuesDict[ServiceID, set[HostID]] = field(default_factory=NoEmptyValuesDict) components: NoEmptyValuesDict[ComponentID, set[HostID]] = field(default_factory=NoEmptyValuesDict) + @property + def all(self) -> set[HostID]: + return set(chain.from_iterable(chain(self.services.values(), self.components.values()))) + @dataclass(slots=True) class TopologyHostDiff: diff --git a/python/core/concern/__init__.py b/python/core/concern/__init__.py new file mode 100644 index 0000000000..824dd6c8fe --- /dev/null +++ b/python/core/concern/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/python/core/concern/checks/__init__.py b/python/core/concern/checks/__init__.py new file mode 100644 index 0000000000..0d55e52f5a --- /dev/null +++ b/python/core/concern/checks/__init__.py @@ -0,0 +1,16 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ._mapping import find_cluster_mapping_issues, parse_constraint +from ._service_requires import find_unsatisfied_service_requirements + +__all__ = ["find_cluster_mapping_issues", "parse_constraint", "find_unsatisfied_service_requirements"] diff --git a/python/core/concern/checks/_mapping.py b/python/core/concern/checks/_mapping.py new file mode 100644 index 0000000000..50c4755cc6 --- /dev/null +++ b/python/core/concern/checks/_mapping.py @@ -0,0 +1,188 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque +from functools import partial +from typing import Iterable + +from core.cluster.types import NamedMapping +from core.concern.types import ( + ComponentNameKey, + ComponentRestrictionOwner, + Constraint, + HostsAmount, + MappingRestrictions, + MappingRestrictionType, + MappingRestrictionViolation, + ServiceRestrictionOwner, + SupportedConstraintFormat, +) +from core.types import HostID + + +def find_cluster_mapping_issues( + restrictions: MappingRestrictions, named_mapping: NamedMapping, amount_of_hosts_in_cluster: HostsAmount +) -> tuple[MappingRestrictionViolation, ...]: + if not (restrictions.constraints or restrictions.required or restrictions.binds): + return () + + unsatisfied_restrictions = deque() + + component_mapping: dict[ComponentNameKey, set[HostID]] = { + ComponentNameKey(service=service_name, component=component_name): hosts + for service_name, components in named_mapping.items() + for component_name, hosts in components.items() + } + + for component_key, constraint in restrictions.constraints.items(): + hosts = component_mapping.get(component_key) + # check if component exists + if hosts is None: + continue + + if not is_constraint_restriction_satisfied( + constraint=constraint, hosts_with_component=len(hosts), hosts_in_cluster=amount_of_hosts_in_cluster + ): + unsatisfied_restrictions.append( + MappingRestrictionViolation( + restriction=MappingRestrictionType.CONSTRAINT, + component=component_key, + message=f"{str(component_key).capitalize()} has unsatisfied constraint: {constraint.internal}", + ) + ) + + for dependant_object, required_components in restrictions.required.items(): + if ( + isinstance(dependant_object, ComponentRestrictionOwner) and not component_mapping.get(dependant_object) + ) or (isinstance(dependant_object, ServiceRestrictionOwner) and dependant_object.name not in named_mapping): + # object with restriction isn't added to cluster + continue + + unsatisfied_requires = find_first_unmapped_component( + required_components=required_components, named_mapping=named_mapping + ) + if unsatisfied_requires: + unsatisfied_restrictions.append( + MappingRestrictionViolation( + restriction=MappingRestrictionType.REQUIRES, + component=dependant_object, + message=f"No required {unsatisfied_requires} for {dependant_object}", + ) + ) + + for component_key, bind_component in restrictions.binds.items(): + hosts = component_mapping.get(component_key) + # check if component exists + if hosts is None: + continue + + if not is_bound_to_restriction_satisfied( + bound_component=bind_component, component_hosts=hosts, named_mapping=named_mapping + ): + message = ( + "Component `bound_to` restriction violated.\n" + f"Each host with {bind_component} should have mapped {component_key}." + ) + unsatisfied_restrictions.append( + MappingRestrictionViolation( + restriction=MappingRestrictionType.BOUND_TO, + component=component_key, + message=message, + ) + ) + + return tuple(unsatisfied_restrictions) + + +def is_constraint_restriction_satisfied( + constraint: Constraint, hosts_with_component: HostsAmount, hosts_in_cluster: HostsAmount +) -> bool: + return all( + constraint_is_satisfied(hosts_with_component, hosts_in_cluster) for constraint_is_satisfied in constraint.checks + ) + + +def find_first_unmapped_component( + required_components: Iterable[ComponentNameKey], named_mapping: NamedMapping +) -> ComponentNameKey | None: + for component_key in required_components: + if not named_mapping.get(component_key.service, {}).get(component_key.component, ()): + return component_key + + return None + + +def is_bound_to_restriction_satisfied( + bound_component: ComponentNameKey, component_hosts: set[HostID], named_mapping: NamedMapping +) -> bool: + # unmapped dependant component satisfies restriction + if not component_hosts: + return True + + bound_component_hosts = named_mapping.get(bound_component.service, {}).get(bound_component.component, set()) + return bound_component_hosts == component_hosts + + +# Constraint Preparation + + +def check_equal_or_less(mapped_hosts: HostsAmount, _: HostsAmount, argument: int): + return mapped_hosts <= argument + + +def check_equal_or_greater(mapped_hosts: HostsAmount, _: HostsAmount, argument: int): + return mapped_hosts >= argument + + +def check_exact(mapped_hosts: HostsAmount, _: HostsAmount, argument: int): + return mapped_hosts == argument + + +def check_is_odd(mapped_hosts: HostsAmount, _: HostsAmount): + return mapped_hosts % 2 == 1 + + +def check_is_zero_or_odd(mapped_hosts: HostsAmount, hosts_in_cluster: HostsAmount): + if mapped_hosts == 0: + return True + + return check_is_odd(mapped_hosts, hosts_in_cluster) + + +def check_on_all(mapped_hosts: HostsAmount, hosts_in_cluster: HostsAmount): + return mapped_hosts > 0 and mapped_hosts == hosts_in_cluster + + +def parse_constraint(constraint: SupportedConstraintFormat) -> Constraint: + match constraint: + case [0, "+"]: + # no checks actually required, it's the "default" + checks = () + case ["+"]: + checks = (check_on_all,) + case ["odd"]: + checks = (check_is_odd,) + case [int(exact)]: + checks = (partial(check_exact, argument=exact),) + case [0, "odd"]: + checks = (check_is_zero_or_odd,) + case [int(min_), "odd"]: + checks = (partial(check_equal_or_greater, argument=min_), check_is_odd) + case [int(min_), "+"]: + checks = (partial(check_equal_or_greater, argument=min_),) + case [int(min_), int(max_)]: + checks = (partial(check_equal_or_greater, argument=min_), partial(check_equal_or_less, argument=max_)) + case _: + # keep this function safe, even though it may lead to "strange" results + checks = () + + return Constraint(internal=constraint, checks=checks) diff --git a/python/core/concern/checks/_service_requires.py b/python/core/concern/checks/_service_requires.py new file mode 100644 index 0000000000..965728839e --- /dev/null +++ b/python/core/concern/checks/_service_requires.py @@ -0,0 +1,48 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque + +from core.cluster.types import NamedMapping +from core.concern.types import ( + ComponentRestrictionOwner, + MissingServiceRequiresViolation, + ServiceDependencies, + ServiceRestrictionOwner, +) + + +def find_unsatisfied_service_requirements( + services_restrictions: ServiceDependencies, named_mapping: NamedMapping +) -> tuple[MissingServiceRequiresViolation, ...]: + if not services_restrictions: + return () + + violations = deque() + + existing_services = set(named_mapping) + + for dependant_object, requires in services_restrictions.items(): + # if dependant object isn't added, requires shouldn't be checked + if ( + isinstance(dependant_object, ComponentRestrictionOwner) + and dependant_object.component not in named_mapping.get(dependant_object.service, ()) + ) or (isinstance(dependant_object, ServiceRestrictionOwner) and dependant_object.name not in existing_services): + continue + + if not_found_services := requires - existing_services: + violations.extend( + MissingServiceRequiresViolation(dependant_object=dependant_object, required_service=service) + for service in not_found_services + ) + + return tuple(violations) diff --git a/python/core/concern/types.py b/python/core/concern/types.py new file mode 100644 index 0000000000..36ff737013 --- /dev/null +++ b/python/core/concern/types.py @@ -0,0 +1,88 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque +from dataclasses import dataclass +from enum import Enum +from typing import Callable, NamedTuple, TypeAlias + +from core.types import ComponentName, ServiceName + + +class ComponentNameKey(NamedTuple): + service: ServiceName + component: ComponentName + + def __str__(self) -> str: + return f'component "{self.component}" of service "{self.service}"' + + +ComponentRestrictionOwner: TypeAlias = ComponentNameKey + + +class ServiceRestrictionOwner(NamedTuple): + name: ServiceName + + def __str__(self) -> str: + return f'service "{self.name}"' + + +class MappingRestrictionType(Enum): + CONSTRAINT = "constraint" + REQUIRES = "requires" + BOUND_TO = "bound_to" + + +class MappingRestrictionViolation(NamedTuple): + restriction: MappingRestrictionType + component: ComponentNameKey + message: str + + +class MissingServiceRequiresViolation(NamedTuple): + required_service: ServiceName + dependant_object: ServiceRestrictionOwner | ComponentRestrictionOwner + + +HostsAmount: TypeAlias = int + + +# Constraints - Requirements On Mapped Hosts Amount + + +SupportedConstraintFormat: TypeAlias = tuple[str] | tuple[HostsAmount | str, HostsAmount | str] + + +class Constraint(NamedTuple): + internal: SupportedConstraintFormat + checks: tuple[Callable[[HostsAmount, HostsAmount], bool], ...] + + +# Bundle Restrictions + +# Services that should be added to cluster +ServiceDependencies: TypeAlias = dict[ServiceRestrictionOwner | ComponentRestrictionOwner, set[ServiceName]] + + +@dataclass(slots=True) +class MappingRestrictions: + constraints: dict[ComponentRestrictionOwner, Constraint] + # Components that should be mapped at least on one host + required: dict[ServiceRestrictionOwner | ComponentRestrictionOwner, deque[ComponentNameKey]] + # Should be mapped on the same hosts + binds: dict[ComponentRestrictionOwner, ComponentNameKey] + + +@dataclass(slots=True, frozen=True) +class BundleRestrictions: + service_requires: ServiceDependencies + mapping: MappingRestrictions diff --git a/python/core/converters.py b/python/core/converters.py new file mode 100644 index 0000000000..6aad847c80 --- /dev/null +++ b/python/core/converters.py @@ -0,0 +1,20 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from core.cluster.types import ClusterTopology, NamedMapping + + +def named_mapping_from_topology(topology: ClusterTopology) -> NamedMapping: + return { + service.info.name: {component.info.name: set(component.hosts) for component in service.components.values()} + for service in topology.services.values() + } diff --git a/python/rbac/tests/test_policy/test_rbac.py b/python/rbac/tests/test_policy/test_rbac.py index 0bf349f335..157c707f8e 100644 --- a/python/rbac/tests/test_policy/test_rbac.py +++ b/python/rbac/tests/test_policy/test_rbac.py @@ -10,7 +10,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from cm.api import add_hc, add_host_to_cluster, add_service_to_cluster +from adcm.tests.base import BusinessLogicMixin +from cm.api import add_host_to_cluster, add_service_to_cluster from cm.models import ( Cluster, ClusterObject, @@ -24,7 +25,7 @@ from rbac.tests.test_base import RBACBaseTestCase -class PolicyRBACTestCase(RBACBaseTestCase): +class PolicyRBACTestCase(BusinessLogicMixin, RBACBaseTestCase): """Tests for applying policy with different combination of roles and object""" def setUp(self) -> None: @@ -231,21 +232,7 @@ def test_parent_policy4host_in_service(self): _, host1, host2 = self.get_hosts_and_provider() add_host_to_cluster(self.cluster, host1) add_host_to_cluster(self.cluster, host2) - add_hc( - self.cluster, - [ - { - "service_id": self.service_1.id, - "component_id": self.component_11.id, - "host_id": host1.id, - }, - { - "service_id": self.service_2.id, - "component_id": self.component_21.id, - "host_id": host2.id, - }, - ], - ) + self.set_hostcomponent(cluster=self.cluster, entries=[(host1, self.component_11), (host2, self.component_21)]) policy = Policy.objects.create(role=self.object_role_custom_perm_cluster_service_component_host()) policy.group.add(self.group) policy.add_object(self.service_1) @@ -274,25 +261,9 @@ def test_parent_policy4host_in_component(self): add_host_to_cluster(self.cluster, host1) add_host_to_cluster(self.cluster, host2) add_host_to_cluster(self.cluster, host3) - add_hc( - self.cluster, - [ - { - "service_id": self.service_2.id, - "component_id": self.component_21.id, - "host_id": host1.id, - }, - { - "service_id": self.service_2.id, - "component_id": self.component_21.id, - "host_id": host2.id, - }, - { - "service_id": self.service_1.id, - "component_id": self.component_11.id, - "host_id": host3.id, - }, - ], + self.set_hostcomponent( + cluster=self.cluster, + entries=[(host1, self.component_21), (host2, self.component_21), (host3, self.component_11)], ) policy = Policy.objects.create(role=self.object_role_custom_perm_cluster_service_component_host()) @@ -374,16 +345,7 @@ def test_add_service(self): def test_add_host(self): _, host1, host2 = self.get_hosts_and_provider() add_host_to_cluster(self.cluster, host1) - add_hc( - self.cluster, - [ - { - "service_id": self.service_1.id, - "component_id": self.component_11.id, - "host_id": host1.id, - }, - ], - ) + self.set_hostcomponent(cluster=self.cluster, entries=[(host1, self.component_11)]) policy = Policy.objects.create(role=self.object_role_custom_perm_cluster_service_component_host()) policy.group.add(self.group) @@ -414,16 +376,7 @@ def test_add_host(self): def test_add_hc(self): _, host1, host2 = self.get_hosts_and_provider() add_host_to_cluster(self.cluster, host1) - add_hc( - self.cluster, - [ - { - "service_id": self.service_1.id, - "component_id": self.component_11.id, - "host_id": host1.id, - }, - ], - ) + self.set_hostcomponent(cluster=self.cluster, entries=[(host1, self.component_11)]) policy = Policy.objects.create(role=self.object_role_custom_perm_service_component_host()) policy.group.add(self.group) policy.add_object(self.service_1) @@ -445,21 +398,7 @@ def test_add_hc(self): self.assertFalse(self.user.has_perm("cm.change_config_of_host", host2)) add_host_to_cluster(self.cluster, host2) - add_hc( - self.cluster, - [ - { - "service_id": self.service_1.id, - "component_id": self.component_11.id, - "host_id": host1.id, - }, - { - "service_id": self.service_1.id, - "component_id": self.component_12.id, - "host_id": host2.id, - }, - ], - ) + self.set_hostcomponent(cluster=self.cluster, entries=[(host1, self.component_11), (host2, self.component_12)]) self.assertFalse(self.user.has_perm("cm.change_config_of_cluster", self.cluster)) self.assertTrue(self.user.has_perm("cm.change_config_of_clusterobject", self.service_1)) From fc4e904d560f61f679692096bf7731824951cf62 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Thu, 19 Sep 2024 12:31:23 +0000 Subject: [PATCH 69/98] ADCM-5965 Change `requires` of component on service calculation --- python/cm/services/concern/_operaitons.py | 10 +----- python/cm/services/concern/repo.py | 17 ++++++---- python/cm/tests/test_requires.py | 9 +++-- python/core/concern/checks/_mapping.py | 40 ++++++++++++++++++----- python/core/concern/types.py | 12 ++++--- 5 files changed, 54 insertions(+), 34 deletions(-) diff --git a/python/cm/services/concern/_operaitons.py b/python/cm/services/concern/_operaitons.py index bfddb0cb1b..b66270bc8d 100644 --- a/python/cm/services/concern/_operaitons.py +++ b/python/cm/services/concern/_operaitons.py @@ -10,7 +10,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from itertools import chain from typing import Iterable from core.types import ADCMCoreType, CoreObjectDescriptor, ObjectID @@ -109,14 +108,7 @@ def _get_target_and_placeholder_types( cluster_id = ClusterObject.objects.values_list("cluster_id", flat=True).get(pk=owner.id) placeholder_type_dto = PlaceholderTypeDTO(source="cluster_services", target="prototype") - required_services_names = {require["service"] for require in owner_prototype["requires"]} | set( - chain.from_iterable( - (require["service"] for require in requires if "service" in require and "component" not in require) - for requires in Prototype.objects.filter(parent_id=owner_prototype["id"]).values_list( - "requires", flat=True - ) - ) - ) + required_services_names = {require["service"] for require in owner_prototype["requires"]} existing_required_services = set( ClusterObject.objects.values_list("prototype__name", flat=True).filter( cluster_id=cluster_id, prototype__name__in=required_services_names diff --git a/python/cm/services/concern/repo.py b/python/cm/services/concern/repo.py index d3e3b868ab..fae0a3576c 100644 --- a/python/cm/services/concern/repo.py +++ b/python/cm/services/concern/repo.py @@ -27,7 +27,9 @@ def retrieve_bundle_restrictions(bundle_id: BundleID) -> BundleRestrictions: - mapping_restrictions = MappingRestrictions(constraints={}, required=defaultdict(deque), binds={}) + mapping_restrictions = MappingRestrictions( + constraints={}, required_components=defaultdict(deque), required_services=defaultdict(set), binds={} + ) service_requires: ServiceDependencies = defaultdict(set) for component_name, service_name, constraint, requires, bound_to in ( @@ -44,14 +46,16 @@ def retrieve_bundle_restrictions(bundle_id: BundleID) -> BundleRestrictions: # so it's enough to "add" required service. # - ones with `component` key adds restriction on mapping, # because such component should be mapped on at least one host. + # + # "service" requires from component are relative only to mapping checks, + # it doesn't affect service-related concerns. required_service_name = requirement["service"] if required_component_name := requirement.get("component"): - # "service" requirements aren't checked for mapping issue - mapping_restrictions.required[key].append( + mapping_restrictions.required_components[key].append( ComponentNameKey(component=required_component_name, service=required_service_name) ) else: - service_requires[key].add(required_service_name) + mapping_restrictions.required_services[key].add(required_service_name) constraint = parse_constraint(constraint) if constraint.checks: @@ -72,11 +76,10 @@ def retrieve_bundle_restrictions(bundle_id: BundleID) -> BundleRestrictions: for requirement in requires: required_service_name = requirement["service"] + service_requires[key].add(required_service_name) if component_name := requirement.get("component"): - mapping_restrictions.required[key].append( + mapping_restrictions.required_components[key].append( ComponentNameKey(component=component_name, service=required_service_name) ) - else: - service_requires[key].add(required_service_name) return BundleRestrictions(service_requires=service_requires, mapping=mapping_restrictions) diff --git a/python/cm/tests/test_requires.py b/python/cm/tests/test_requires.py index ca825cff17..ff64b69841 100644 --- a/python/cm/tests/test_requires.py +++ b/python/cm/tests/test_requires.py @@ -76,7 +76,7 @@ def test_requires_hc(self): ) with self.assertRaisesRegex( - AdcmEx, 'No required service "service_2" for component "component_1_1" of service "service_1"' + AdcmEx, 'Services required for component "component_1_1" of service "service_1" are missing: service_2' ): change_host_component_mapping( cluster_id=self.cluster.id, @@ -88,10 +88,9 @@ def test_service_requires_issue(self): service_2 = ClusterObject.objects.create(prototype=self.service_proto_2, cluster=self.cluster) update_hierarchy_issues(obj=self.cluster) concerns = service_2.concerns.all() - # todo is it ok that now concern is both on cluster (mapping) and service (requires)? - self.assertEqual(len(concerns), 2) - requirement_concern = concerns.get(cause=ConcernCause.REQUIREMENT) + self.assertEqual(len(concerns), 1) + self.assertEqual(concerns.first().cause, ConcernCause.REQUIREMENT) self.assertIn( "${source} has an issue with requirement. Need to be installed: ${target}", - requirement_concern.reason.values(), + concerns.first().reason.values(), ) diff --git a/python/core/concern/checks/_mapping.py b/python/core/concern/checks/_mapping.py index 50c4755cc6..704698297d 100644 --- a/python/core/concern/checks/_mapping.py +++ b/python/core/concern/checks/_mapping.py @@ -32,7 +32,7 @@ def find_cluster_mapping_issues( restrictions: MappingRestrictions, named_mapping: NamedMapping, amount_of_hosts_in_cluster: HostsAmount ) -> tuple[MappingRestrictionViolation, ...]: - if not (restrictions.constraints or restrictions.required or restrictions.binds): + if not (restrictions.constraints or restrictions.required_components or restrictions.binds): return () unsatisfied_restrictions = deque() @@ -55,18 +55,25 @@ def find_cluster_mapping_issues( unsatisfied_restrictions.append( MappingRestrictionViolation( restriction=MappingRestrictionType.CONSTRAINT, - component=component_key, + owner=component_key, message=f"{str(component_key).capitalize()} has unsatisfied constraint: {constraint.internal}", ) ) - for dependant_object, required_components in restrictions.required.items(): - if ( - isinstance(dependant_object, ComponentRestrictionOwner) and not component_mapping.get(dependant_object) - ) or (isinstance(dependant_object, ServiceRestrictionOwner) and dependant_object.name not in named_mapping): - # object with restriction isn't added to cluster + for dependant_object, required_components in restrictions.required_components.items(): + if isinstance(dependant_object, ComponentRestrictionOwner) and not component_mapping.get(dependant_object): + # restriction from unmapped dependant component shouldn't be checked continue + if isinstance(dependant_object, ServiceRestrictionOwner): + # In order for "requires" restriction from service to be performed, following condition should be met: + # 1. Service added to cluster + # 2. At least one component of this service should be mapped + + at_least_one_mapped = any(named_mapping.get(dependant_object.name, {}).values()) + if not at_least_one_mapped: + continue + unsatisfied_requires = find_first_unmapped_component( required_components=required_components, named_mapping=named_mapping ) @@ -74,11 +81,26 @@ def find_cluster_mapping_issues( unsatisfied_restrictions.append( MappingRestrictionViolation( restriction=MappingRestrictionType.REQUIRES, - component=dependant_object, + owner=dependant_object, message=f"No required {unsatisfied_requires} for {dependant_object}", ) ) + for component_key, required_services in restrictions.required_services.items(): + if not component_mapping.get(component_key): + continue + + not_existing_services = required_services.difference(named_mapping) + if not_existing_services: + unsatisfied_restrictions.append( + MappingRestrictionViolation( + restriction=MappingRestrictionType.REQUIRES, + owner=component_key, + message=f"Services required for {component_key} are " + f"missing: {', '.join(sorted(not_existing_services))}", + ) + ) + for component_key, bind_component in restrictions.binds.items(): hosts = component_mapping.get(component_key) # check if component exists @@ -95,7 +117,7 @@ def find_cluster_mapping_issues( unsatisfied_restrictions.append( MappingRestrictionViolation( restriction=MappingRestrictionType.BOUND_TO, - component=component_key, + owner=component_key, message=message, ) ) diff --git a/python/core/concern/types.py b/python/core/concern/types.py index 36ff737013..f6b9d004e8 100644 --- a/python/core/concern/types.py +++ b/python/core/concern/types.py @@ -42,13 +42,15 @@ class MappingRestrictionType(Enum): BOUND_TO = "bound_to" -class MappingRestrictionViolation(NamedTuple): +@dataclass(slots=True) +class MappingRestrictionViolation: restriction: MappingRestrictionType - component: ComponentNameKey + owner: ServiceRestrictionOwner | ComponentRestrictionOwner message: str -class MissingServiceRequiresViolation(NamedTuple): +@dataclass(slots=True) +class MissingServiceRequiresViolation: required_service: ServiceName dependant_object: ServiceRestrictionOwner | ComponentRestrictionOwner @@ -77,7 +79,9 @@ class Constraint(NamedTuple): class MappingRestrictions: constraints: dict[ComponentRestrictionOwner, Constraint] # Components that should be mapped at least on one host - required: dict[ServiceRestrictionOwner | ComponentRestrictionOwner, deque[ComponentNameKey]] + required_components: dict[ServiceRestrictionOwner | ComponentRestrictionOwner, deque[ComponentNameKey]] + # Services that should exist when component is mapped + required_services: dict[ComponentRestrictionOwner, set[ServiceName]] # Should be mapped on the same hosts binds: dict[ComponentRestrictionOwner, ComponentNameKey] From 9e05e8d3cda8a2aee915f3da859e5f5c70d93889 Mon Sep 17 00:00:00 2001 From: Egor Araslanov Date: Mon, 23 Sep 2024 10:31:22 +0500 Subject: [PATCH 70/98] ADCM-5965 Check correct topology during upgrade without hostcomponent --- python/cm/services/job/action.py | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/python/cm/services/job/action.py b/python/cm/services/job/action.py index 8df947f60d..a0f05a15f1 100644 --- a/python/cm/services/job/action.py +++ b/python/cm/services/job/action.py @@ -14,7 +14,6 @@ from functools import partial from typing import TypeAlias -from core.cluster.operations import create_topology_with_new_mapping, find_hosts_difference from core.cluster.types import HostComponentEntry from core.job.dto import TaskPayloadDTO from core.types import ActionTargetDescriptor, CoreObjectDescriptor @@ -52,7 +51,7 @@ from cm.services.job.inventory._config import update_configuration_for_inventory_inplace from cm.services.job.prepare import prepare_task_for_action from cm.services.job.run import run_task -from cm.services.mapping import change_host_component_mapping, check_no_host_in_mm +from cm.services.mapping import change_host_component_mapping from cm.status_api import send_task_status_update_event from cm.variant import process_variant @@ -244,20 +243,13 @@ def _process_hostcomponent( if is_upgrade_action and not action.hostcomponentmap: topology = retrieve_cluster_topology(cluster_id=cluster.id) bundle_restrictions = retrieve_bundle_restrictions(bundle_id=int(action.upgrade.bundle_id)) - new_topology = create_topology_with_new_mapping( - topology=topology, - new_mapping=( - HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) - for entry in new_hostcomponent - ), - ) + check_mapping_restrictions( mapping_restrictions=bundle_restrictions.mapping, - topology=new_topology, + topology=topology, error_message_template=HC_CONSTRAINT_VIOLATION_ON_UPGRADE_TEMPLATE, ) - host_difference = find_hosts_difference(new_topology=new_topology, old_topology=topology) - check_no_host_in_mm(host_difference.mapped.all) + return None, [], {}, is_upgrade_action host_map, post_upgrade_hc, delta = check_hostcomponentmap(cluster=cluster, action=action, new_hc=new_hostcomponent) From d3306214142810574c9789184ce2276455b7b6c0 Mon Sep 17 00:00:00 2001 From: Kirill Fedorenko Date: Mon, 23 Sep 2024 08:36:08 +0000 Subject: [PATCH 71/98] ADCM-5936 [UI] Remove tooltips when there is no focus on the trigger element https://tracker.yandex.ru/ADCM-5936 ![image](/uploads/fb2e5adc87a7a761db0680331260ed0c/image.png) --- adcm-web/app/src/components/uikit/Modal/Modal.tsx | 2 +- adcm-web/app/src/components/uikit/Popover/Popover.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/adcm-web/app/src/components/uikit/Modal/Modal.tsx b/adcm-web/app/src/components/uikit/Modal/Modal.tsx index 46a70c6049..1853715c9f 100644 --- a/adcm-web/app/src/components/uikit/Modal/Modal.tsx +++ b/adcm-web/app/src/components/uikit/Modal/Modal.tsx @@ -42,7 +42,7 @@ const Modal: React.FC = ({ {isOpen && ( - +
= ({ return ( {isOpen && ( - + {React.cloneElement(popoverPanel, { ref, ...children.props, style: panelStyle, ...getFloatingProps() })} )} From 18856619e32f96d2584a1d7ae7101793be07b3ff Mon Sep 17 00:00:00 2001 From: Igor Kuzmin Date: Mon, 23 Sep 2024 10:27:53 +0000 Subject: [PATCH 72/98] bugfix/ADCM-5960 use object id's as dependencies instead of objects Task: https://tracker.yandex.ru/ADCM-5960 --- .../useHostProviderConfigGroupConfiguration.ts | 8 ++++---- .../useHostProviderPrimaryConfiguration.ts | 8 ++++---- .../useClusterConfigGroupConfiguration.ts | 8 ++++---- .../useClusterPrimaryConfiguration.ts | 8 ++++---- .../useServiceConfigGroupSingleConfiguration.ts | 8 ++++---- .../useServicesPrimaryConfiguration.ts | 8 ++++---- .../useServiceComponentConfigGroupConfiguration.ts | 8 ++++---- adcm-web/app/src/models/adcm/backendEvents.ts | 6 +++--- 8 files changed, 31 insertions(+), 31 deletions(-) diff --git a/adcm-web/app/src/components/pages/HostProviderPage/HostProviderConfigurationGroupSingle/HostProviderConfigGroupConfiguration/useHostProviderConfigGroupConfiguration.ts b/adcm-web/app/src/components/pages/HostProviderPage/HostProviderConfigurationGroupSingle/HostProviderConfigGroupConfiguration/useHostProviderConfigGroupConfiguration.ts index 05fd20a41d..3c442878d4 100644 --- a/adcm-web/app/src/components/pages/HostProviderPage/HostProviderConfigurationGroupSingle/HostProviderConfigGroupConfiguration/useHostProviderConfigGroupConfiguration.ts +++ b/adcm-web/app/src/components/pages/HostProviderPage/HostProviderConfigurationGroupSingle/HostProviderConfigGroupConfiguration/useHostProviderConfigGroupConfiguration.ts @@ -18,7 +18,7 @@ export const useHostProviderConfigGroupConfiguration = () => { const isVersionsLoading = useStore(({ adcm }) => adcm.entityConfiguration.isVersionsLoading); useEffect(() => { - if (hostProvider && hostProviderConfigGroup) { + if (hostProvider?.id && hostProviderConfigGroup?.id) { // load all configurations for current HostProvider dispatch( getConfigurationsVersions({ @@ -34,7 +34,7 @@ export const useHostProviderConfigGroupConfiguration = () => { return () => { dispatch(cleanup()); }; - }, [hostProvider, hostProviderConfigGroup, dispatch]); + }, [hostProvider?.id, hostProviderConfigGroup?.id, dispatch]); const configurationsOptions = useConfigurations({ configVersions, @@ -42,7 +42,7 @@ export const useHostProviderConfigGroupConfiguration = () => { const { selectedConfigId, onReset } = configurationsOptions; useEffect(() => { - if (hostProvider && hostProviderConfigGroup && selectedConfigId) { + if (hostProvider?.id && hostProviderConfigGroup?.id && selectedConfigId) { // load full config for selected configuration dispatch( getConfiguration({ @@ -55,7 +55,7 @@ export const useHostProviderConfigGroupConfiguration = () => { }), ); } - }, [dispatch, hostProvider, hostProviderConfigGroup, selectedConfigId]); + }, [dispatch, hostProvider?.id, hostProviderConfigGroup?.id, selectedConfigId]); const selectedConfiguration = selectedConfigId === 0 ? configurationsOptions.draftConfiguration : loadedConfiguration; diff --git a/adcm-web/app/src/components/pages/HostProviderPage/HostProviderPrimaryConfiguration/useHostProviderPrimaryConfiguration.ts b/adcm-web/app/src/components/pages/HostProviderPage/HostProviderPrimaryConfiguration/useHostProviderPrimaryConfiguration.ts index 699759c07f..45e5da2889 100644 --- a/adcm-web/app/src/components/pages/HostProviderPage/HostProviderPrimaryConfiguration/useHostProviderPrimaryConfiguration.ts +++ b/adcm-web/app/src/components/pages/HostProviderPage/HostProviderPrimaryConfiguration/useHostProviderPrimaryConfiguration.ts @@ -18,7 +18,7 @@ export const useHostProviderPrimaryConfiguration = () => { const accessCheckStatus = useStore(({ adcm }) => adcm.entityConfiguration.accessCheckStatus); useEffect(() => { - if (hostProvider) { + if (hostProvider?.id) { // load all configurations for current HostProvider dispatch(getConfigurationsVersions({ entityType: 'host-provider', args: { hostProviderId: hostProvider.id } })); } @@ -26,7 +26,7 @@ export const useHostProviderPrimaryConfiguration = () => { return () => { dispatch(cleanup()); }; - }, [hostProvider, dispatch]); + }, [hostProvider?.id, dispatch]); const configurationsOptions = useConfigurations({ configVersions, @@ -34,7 +34,7 @@ export const useHostProviderPrimaryConfiguration = () => { const { selectedConfigId, onReset } = configurationsOptions; useEffect(() => { - if (hostProvider && selectedConfigId) { + if (hostProvider?.id && selectedConfigId) { // load full config for selected configuration dispatch( getConfiguration({ @@ -43,7 +43,7 @@ export const useHostProviderPrimaryConfiguration = () => { }), ); } - }, [dispatch, hostProvider, selectedConfigId]); + }, [dispatch, hostProvider?.id, selectedConfigId]); const selectedConfiguration = selectedConfigId === 0 ? configurationsOptions.draftConfiguration : loadedConfiguration; diff --git a/adcm-web/app/src/components/pages/cluster/ClusterConfiguration/ClusterConfigGroupSingle/ClusterConfigGroupConfiguration/useClusterConfigGroupConfiguration.ts b/adcm-web/app/src/components/pages/cluster/ClusterConfiguration/ClusterConfigGroupSingle/ClusterConfigGroupConfiguration/useClusterConfigGroupConfiguration.ts index c37dce5569..4c4e976796 100644 --- a/adcm-web/app/src/components/pages/cluster/ClusterConfiguration/ClusterConfigGroupSingle/ClusterConfigGroupConfiguration/useClusterConfigGroupConfiguration.ts +++ b/adcm-web/app/src/components/pages/cluster/ClusterConfiguration/ClusterConfigGroupSingle/ClusterConfigGroupConfiguration/useClusterConfigGroupConfiguration.ts @@ -18,7 +18,7 @@ export const useClusterConfigGroupConfiguration = () => { const isVersionsLoading = useStore(({ adcm }) => adcm.entityConfiguration.isVersionsLoading); useEffect(() => { - if (cluster && clusterConfigGroup) { + if (cluster?.id && clusterConfigGroup?.id) { // load all configurations for current Cluster dispatch( getConfigurationsVersions({ @@ -31,7 +31,7 @@ export const useClusterConfigGroupConfiguration = () => { return () => { dispatch(cleanup()); }; - }, [cluster, clusterConfigGroup, dispatch]); + }, [cluster?.id, clusterConfigGroup?.id, dispatch]); const configurationsOptions = useConfigurations({ configVersions, @@ -39,7 +39,7 @@ export const useClusterConfigGroupConfiguration = () => { const { selectedConfigId, onReset } = configurationsOptions; useEffect(() => { - if (cluster && clusterConfigGroup && selectedConfigId) { + if (cluster?.id && clusterConfigGroup?.id && selectedConfigId) { // load full config for selected configuration dispatch( getConfiguration({ @@ -52,7 +52,7 @@ export const useClusterConfigGroupConfiguration = () => { }), ); } - }, [dispatch, cluster, clusterConfigGroup, selectedConfigId]); + }, [dispatch, cluster?.id, clusterConfigGroup?.id, selectedConfigId]); const selectedConfiguration = selectedConfigId === 0 ? configurationsOptions.draftConfiguration : loadedConfiguration; diff --git a/adcm-web/app/src/components/pages/cluster/ClusterConfiguration/ClusterPrimaryConfiguration/useClusterPrimaryConfiguration.ts b/adcm-web/app/src/components/pages/cluster/ClusterConfiguration/ClusterPrimaryConfiguration/useClusterPrimaryConfiguration.ts index bfafda7491..770f20742e 100644 --- a/adcm-web/app/src/components/pages/cluster/ClusterConfiguration/ClusterPrimaryConfiguration/useClusterPrimaryConfiguration.ts +++ b/adcm-web/app/src/components/pages/cluster/ClusterConfiguration/ClusterPrimaryConfiguration/useClusterPrimaryConfiguration.ts @@ -18,7 +18,7 @@ export const useClusterPrimaryConfiguration = () => { const accessCheckStatus = useStore(({ adcm }) => adcm.entityConfiguration.accessCheckStatus); useEffect(() => { - if (cluster) { + if (cluster?.id) { // load all configurations for current Cluster dispatch(getConfigurationsVersions({ entityType: 'cluster', args: { clusterId: cluster.id } })); } @@ -26,7 +26,7 @@ export const useClusterPrimaryConfiguration = () => { return () => { dispatch(cleanup()); }; - }, [cluster, dispatch]); + }, [cluster?.id, dispatch]); const configurationsOptions = useConfigurations({ configVersions, @@ -34,13 +34,13 @@ export const useClusterPrimaryConfiguration = () => { const { selectedConfigId, onReset } = configurationsOptions; useEffect(() => { - if (cluster && selectedConfigId) { + if (cluster?.id && selectedConfigId) { // load full config for selected configuration dispatch( getConfiguration({ entityType: 'cluster', args: { clusterId: cluster.id, configId: selectedConfigId } }), ); } - }, [dispatch, cluster, selectedConfigId]); + }, [dispatch, cluster?.id, selectedConfigId]); const selectedConfiguration = selectedConfigId === 0 ? configurationsOptions.draftConfiguration : loadedConfiguration; diff --git a/adcm-web/app/src/components/pages/cluster/service/ServiceConfiguration/ServiceConfigGroupSingle/ServiceConfigGroupSingleConfiguration/useServiceConfigGroupSingleConfiguration.ts b/adcm-web/app/src/components/pages/cluster/service/ServiceConfiguration/ServiceConfigGroupSingle/ServiceConfigGroupSingleConfiguration/useServiceConfigGroupSingleConfiguration.ts index 6da36b0486..ad8cabb1ea 100644 --- a/adcm-web/app/src/components/pages/cluster/service/ServiceConfiguration/ServiceConfigGroupSingle/ServiceConfigGroupSingleConfiguration/useServiceConfigGroupSingleConfiguration.ts +++ b/adcm-web/app/src/components/pages/cluster/service/ServiceConfiguration/ServiceConfigGroupSingle/ServiceConfigGroupSingleConfiguration/useServiceConfigGroupSingleConfiguration.ts @@ -19,7 +19,7 @@ export const useServiceConfigGroupSingleConfiguration = () => { const isVersionsLoading = useStore(({ adcm }) => adcm.entityConfiguration.isVersionsLoading); useEffect(() => { - if (cluster && service && serviceConfigGroup) { + if (cluster?.id && service?.id && serviceConfigGroup?.id) { // load all configurations for current Cluster dispatch( getConfigurationsVersions({ @@ -36,7 +36,7 @@ export const useServiceConfigGroupSingleConfiguration = () => { return () => { dispatch(cleanup()); }; - }, [cluster, service, serviceConfigGroup, dispatch]); + }, [cluster?.id, service?.id, serviceConfigGroup?.id, dispatch]); const configurationsOptions = useConfigurations({ configVersions, @@ -44,7 +44,7 @@ export const useServiceConfigGroupSingleConfiguration = () => { const { selectedConfigId, onReset } = configurationsOptions; useEffect(() => { - if (cluster && service && serviceConfigGroup && selectedConfigId) { + if (cluster?.id && service?.id && serviceConfigGroup?.id && selectedConfigId) { // load full config for selected configuration dispatch( getConfiguration({ @@ -58,7 +58,7 @@ export const useServiceConfigGroupSingleConfiguration = () => { }), ); } - }, [dispatch, cluster, service, serviceConfigGroup, selectedConfigId]); + }, [dispatch, cluster?.id, service?.id, serviceConfigGroup?.id, selectedConfigId]); const selectedConfiguration = selectedConfigId === 0 ? configurationsOptions.draftConfiguration : loadedConfiguration; diff --git a/adcm-web/app/src/components/pages/cluster/service/ServiceConfiguration/ServicePrimaryConfiguration/useServicesPrimaryConfiguration.ts b/adcm-web/app/src/components/pages/cluster/service/ServiceConfiguration/ServicePrimaryConfiguration/useServicesPrimaryConfiguration.ts index 32498a927d..1cd5e28f1b 100644 --- a/adcm-web/app/src/components/pages/cluster/service/ServiceConfiguration/ServicePrimaryConfiguration/useServicesPrimaryConfiguration.ts +++ b/adcm-web/app/src/components/pages/cluster/service/ServiceConfiguration/ServicePrimaryConfiguration/useServicesPrimaryConfiguration.ts @@ -18,7 +18,7 @@ export const useServicesPrimaryConfiguration = () => { const accessCheckStatus = useStore(({ adcm }) => adcm.entityConfiguration.accessCheckStatus); useEffect(() => { - if (service) { + if (service?.id) { // load all configurations for current HostProvider dispatch( getConfigurationsVersions({ @@ -31,7 +31,7 @@ export const useServicesPrimaryConfiguration = () => { return () => { dispatch(cleanup()); }; - }, [service, dispatch]); + }, [service?.id, service?.cluster.id, dispatch]); const configurationsOptions = useConfigurations({ configVersions, @@ -39,7 +39,7 @@ export const useServicesPrimaryConfiguration = () => { const { selectedConfigId, onReset } = configurationsOptions; useEffect(() => { - if (service && selectedConfigId) { + if (service?.id && selectedConfigId) { // load full config for selected configuration dispatch( getConfiguration({ @@ -52,7 +52,7 @@ export const useServicesPrimaryConfiguration = () => { }), ); } - }, [dispatch, service, selectedConfigId]); + }, [service?.id, service?.cluster.id, dispatch, selectedConfigId]); const selectedConfiguration = selectedConfigId === 0 ? configurationsOptions.draftConfiguration : loadedConfiguration; diff --git a/adcm-web/app/src/components/pages/cluster/service/component/ServiceComponentConfiguration/ServiceComponentConfigGroupSingle/ServiceComponentConfigGroupConfiguration/useServiceComponentConfigGroupConfiguration.ts b/adcm-web/app/src/components/pages/cluster/service/component/ServiceComponentConfiguration/ServiceComponentConfigGroupSingle/ServiceComponentConfigGroupConfiguration/useServiceComponentConfigGroupConfiguration.ts index 0ed9e2e0e9..6a50de716d 100644 --- a/adcm-web/app/src/components/pages/cluster/service/component/ServiceComponentConfiguration/ServiceComponentConfigGroupSingle/ServiceComponentConfigGroupConfiguration/useServiceComponentConfigGroupConfiguration.ts +++ b/adcm-web/app/src/components/pages/cluster/service/component/ServiceComponentConfiguration/ServiceComponentConfigGroupSingle/ServiceComponentConfigGroupConfiguration/useServiceComponentConfigGroupConfiguration.ts @@ -22,7 +22,7 @@ export const useServiceComponentConfigGroupConfiguration = () => { const isVersionsLoading = useStore(({ adcm }) => adcm.entityConfiguration.isVersionsLoading); useEffect(() => { - if (cluster && service && component && serviceComponentConfigGroup) { + if (cluster?.id && service?.id && component?.id && serviceComponentConfigGroup?.id) { // load all configurations for current Cluster dispatch( getConfigurationsVersions({ @@ -40,7 +40,7 @@ export const useServiceComponentConfigGroupConfiguration = () => { return () => { dispatch(cleanup()); }; - }, [cluster, service, component, serviceComponentConfigGroup, dispatch]); + }, [cluster?.id, service?.id, component?.id, serviceComponentConfigGroup?.id, dispatch]); const configurationsOptions = useConfigurations({ configVersions, @@ -48,7 +48,7 @@ export const useServiceComponentConfigGroupConfiguration = () => { const { selectedConfigId, onReset } = configurationsOptions; useEffect(() => { - if (cluster && service && component && serviceComponentConfigGroup && selectedConfigId) { + if (cluster?.id && service?.id && component?.id && serviceComponentConfigGroup?.id && selectedConfigId) { // load full config for selected configuration dispatch( getConfiguration({ @@ -63,7 +63,7 @@ export const useServiceComponentConfigGroupConfiguration = () => { }), ); } - }, [dispatch, cluster, service, component, serviceComponentConfigGroup, selectedConfigId]); + }, [dispatch, cluster?.id, service?.id, component?.id, serviceComponentConfigGroup?.id, selectedConfigId]); const selectedConfiguration = selectedConfigId === 0 ? configurationsOptions.draftConfiguration : loadedConfiguration; diff --git a/adcm-web/app/src/models/adcm/backendEvents.ts b/adcm-web/app/src/models/adcm/backendEvents.ts index aa7f32c738..be2388cca2 100644 --- a/adcm-web/app/src/models/adcm/backendEvents.ts +++ b/adcm-web/app/src/models/adcm/backendEvents.ts @@ -83,7 +83,7 @@ export type UpdateServiceEvent = { event: 'update_service'; object: { id: number; - changes: ServiceChanges; + changes: Partial; }; }; @@ -95,7 +95,7 @@ export type UpdateComponentEvent = { event: 'update_component'; object: { id: number; - changes: ComponentChanges; + changes: Partial; }; }; @@ -107,7 +107,7 @@ export type UpdateHostProviderEvent = { event: 'update_hostprovider'; object: { id: number; - changes: HostProviderChanges; + changes: Partial; }; }; From f68bb44fe52636bcd920a54640311b3f589910a3 Mon Sep 17 00:00:00 2001 From: Skrynnik Daniil Date: Mon, 23 Sep 2024 18:31:19 +0300 Subject: [PATCH 73/98] ADCM-5973: remove bundle archive from downloads on error in prepare_bundle --- python/cm/bundle.py | 1 + 1 file changed, 1 insertion(+) diff --git a/python/cm/bundle.py b/python/cm/bundle.py index 5a7f543d4b..08b15e1e5e 100644 --- a/python/cm/bundle.py +++ b/python/cm/bundle.py @@ -103,6 +103,7 @@ def prepare_bundle( except Exception as error: shutil.rmtree(path, ignore_errors=True) + (settings.DOWNLOAD_DIR / Path(bundle_file).name).unlink() raise error From 1a90bd2be099bfdd1dac5e7f347479918cfae704 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 24 Sep 2024 07:23:39 +0000 Subject: [PATCH 74/98] ADCM-5971 Unify error on duplicated error on host group creation --- python/api_v2/generic/group_config/serializers.py | 6 ++++-- python/api_v2/tests/test_config.py | 8 ++++---- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/python/api_v2/generic/group_config/serializers.py b/python/api_v2/generic/group_config/serializers.py index 368b0cb9ac..5040f97f09 100644 --- a/python/api_v2/generic/group_config/serializers.py +++ b/python/api_v2/generic/group_config/serializers.py @@ -10,6 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from cm.errors import AdcmEx from cm.models import GroupConfig, Host from django.contrib.contenttypes.models import ContentType from rest_framework.exceptions import ValidationError @@ -34,8 +35,9 @@ def validate_name(self, value): parent_content_type = ContentType.objects.get_for_model(model=object_) queryset = GroupConfig.objects.filter(name=value, object_type=parent_content_type, object_id=object_.pk) if queryset.exists(): - raise ValidationError( - f"Group config with name {value} already exists for {parent_content_type} {object_.name}" + raise AdcmEx( + code="CREATE_CONFLICT", + msg=f"Group config with name {value} already exists for {parent_content_type} {object_.name}", ) return value diff --git a/python/api_v2/tests/test_config.py b/python/api_v2/tests/test_config.py index 4292186ceb..369b30b6b8 100644 --- a/python/api_v2/tests/test_config.py +++ b/python/api_v2/tests/test_config.py @@ -529,13 +529,13 @@ def test_adcm_4894_duplicate_name_fail(self): response = self.client.v2[self.cluster_1, "config-groups"].post( data={"name": "group-config-new", "description": "group-config-new"} ) - self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST) + self.assertEqual(response.status_code, HTTP_409_CONFLICT) self.assertDictEqual( response.json(), { - "code": "BAD_REQUEST", - "desc": f"name - Group config with name group-config-new " - f"already exists for cm | cluster {self.cluster_1.name};", + "code": "CREATE_CONFLICT", + "desc": f"Group config with name group-config-new " + f"already exists for cm | cluster {self.cluster_1.name}", "level": "error", }, ) From c1c956c64c326448b64fe4f0ef6e65e46cc6f1c6 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 24 Sep 2024 07:46:41 +0000 Subject: [PATCH 75/98] ADCM-5969 Rework task preparation for action/upgrade --- .../executors/delete_service.py | 4 +- python/api/job/serializers.py | 6 +- python/api_v2/generic/action/views.py | 10 +- python/api_v2/tests/test_actions.py | 1 + python/cm/services/bundle.py | 72 ++++++ python/cm/services/cluster.py | 15 +- python/cm/services/concern/checks.py | 14 +- python/cm/services/concern/flags.py | 18 -- python/cm/services/concern/repo.py | 85 ------- python/cm/services/job/_utils.py | 113 +++------ python/cm/services/job/action.py | 236 ++++++++++-------- python/cm/services/job/checks.py | 143 ----------- python/cm/services/job/constants.py | 15 ++ python/cm/services/job/inventory/_base.py | 9 +- python/cm/services/job/inventory/_groups.py | 35 ++- python/cm/services/job/jinja_scripts.py | 9 +- python/cm/services/job/prepare.py | 3 +- .../cm/services/job/run/_target_factories.py | 73 +++--- .../cm/services/job/run/_task_finalizers.py | 4 +- python/cm/services/job/run/repo.py | 6 +- python/cm/services/job/types.py | 23 +- python/cm/services/maintenance_mode.py | 2 +- python/cm/services/mapping.py | 55 +++- python/cm/services/service.py | 2 +- python/cm/tests/test_action_host_group.py | 6 +- python/cm/tests/test_hc.py | 167 +------------ python/cm/tests/test_inventory/base.py | 52 ++-- .../cm/tests/test_inventory/test_inventory.py | 17 +- python/cm/upgrade.py | 39 ++- python/core/cluster/operations.py | 15 +- python/core/job/dto.py | 7 +- python/core/job/task.py | 3 +- 32 files changed, 511 insertions(+), 748 deletions(-) delete mode 100644 python/cm/services/concern/repo.py delete mode 100644 python/cm/services/job/checks.py create mode 100644 python/cm/services/job/constants.py diff --git a/python/ansible_plugin/executors/delete_service.py b/python/ansible_plugin/executors/delete_service.py index 18d2b0e505..216f8a2397 100644 --- a/python/ansible_plugin/executors/delete_service.py +++ b/python/ansible_plugin/executors/delete_service.py @@ -14,7 +14,7 @@ from cm.api import delete_service from cm.models import ClusterBind, ClusterObject, HostComponent, Prototype -from cm.services.mapping import change_host_component_mapping +from cm.services.mapping import change_host_component_mapping, check_nothing from core.cluster.types import HostComponentEntry from core.types import ADCMCoreType, CoreObjectDescriptor from django.db.transaction import atomic @@ -79,7 +79,7 @@ def __call__( .filter(cluster=service.cluster) .exclude(service=service) ), - skip_checks=True, + checks_func=check_nothing, ) # remove existing binds diff --git a/python/api/job/serializers.py b/python/api/job/serializers.py index 903ad1ef55..8fa62a657b 100644 --- a/python/api/job/serializers.py +++ b/python/api/job/serializers.py @@ -16,6 +16,7 @@ from ansible_plugin.utils import get_checklogs_data_by_job_id from cm.models import JobLog, JobStatus, LogStorage, TaskLog from cm.services.job.action import ActionRunPayload, run_action +from core.cluster.types import HostComponentEntry from django.conf import settings from rest_framework.reverse import reverse from rest_framework.serializers import ( @@ -137,7 +138,10 @@ def create(self, validated_data): payload=ActionRunPayload( conf=validated_data.get("config", {}), attr=validated_data.get("attr", {}), - hostcomponent=validated_data.get("hc", []), + hostcomponent={ + HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) + for entry in validated_data.get("hc", ()) + }, verbose=validated_data.get("verbose", False), ), ) diff --git a/python/api_v2/generic/action/views.py b/python/api_v2/generic/action/views.py index 14f9401727..bad496deb6 100644 --- a/python/api_v2/generic/action/views.py +++ b/python/api_v2/generic/action/views.py @@ -25,6 +25,7 @@ from cm.services.config.jinja import get_jinja_config from cm.services.job.action import ActionRunPayload, run_action from cm.stack import check_hostcomponents_objects_exist +from core.cluster.types import HostComponentEntry from django.conf import settings from django.db.models import Q from django_filters.rest_framework.backends import DjangoFilterBackend @@ -47,8 +48,6 @@ filter_actions_by_user_perm, get_action_configuration, has_run_perms, - insert_service_ids, - unique_hc_entries, ) from api_v2.generic.config.utils import convert_adcm_meta_to_attr, represent_string_as_json_type from api_v2.task.serializers import TaskListSerializer @@ -191,9 +190,10 @@ def run(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG001, A payload=ActionRunPayload( conf=config, attr=attr, - hostcomponent=insert_service_ids( - hc_create_data=unique_hc_entries(serializer.validated_data["host_component_map"]) - ), + hostcomponent={ + HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) + for entry in serializer.validated_data["host_component_map"] + }, verbose=serializer.validated_data["is_verbose"], ), ) diff --git a/python/api_v2/tests/test_actions.py b/python/api_v2/tests/test_actions.py index 115a45ff76..23ef024a18 100644 --- a/python/api_v2/tests/test_actions.py +++ b/python/api_v2/tests/test_actions.py @@ -378,6 +378,7 @@ def test_adcm_4856_action_with_duplicated_hc_success(self) -> None: def test_adcm_4856_action_with_several_entries_hc_success(self) -> None: self.add_host_to_cluster(cluster=self.cluster, host=self.host_1) + self.add_host_to_cluster(cluster=self.cluster, host=self.host_2) allowed_action = Action.objects.filter(display_name="cluster_host_action_allowed").first() with RunTaskMock() as run_task: diff --git a/python/cm/services/bundle.py b/python/cm/services/bundle.py index 3d3e45f899..7a2960b7bb 100644 --- a/python/cm/services/bundle.py +++ b/python/cm/services/bundle.py @@ -11,10 +11,23 @@ # limitations under the License. from abc import ABC +from collections import defaultdict, deque from pathlib import Path +from core.concern.checks import parse_constraint +from core.concern.types import ( + BundleRestrictions, + ComponentNameKey, + ComponentRestrictionOwner, + MappingRestrictions, + ServiceDependencies, + ServiceRestrictionOwner, +) +from core.types import BundleID from django.conf import settings +from cm.models import ObjectType, Prototype + def detect_relative_path_to_bundle_root(source_file_dir: str | Path, raw_path: str) -> Path: """ @@ -86,3 +99,62 @@ def __init__(self, bundle_hash: str): class ADCMBundlePathResolver(PathResolver): def __init__(self): self._root = settings.BASE_DIR / "conf" / "adcm" + + +def retrieve_bundle_restrictions(bundle_id: BundleID) -> BundleRestrictions: + mapping_restrictions = MappingRestrictions( + constraints={}, required_components=defaultdict(deque), required_services=defaultdict(set), binds={} + ) + service_requires: ServiceDependencies = defaultdict(set) + + for component_name, service_name, constraint, requires, bound_to in ( + Prototype.objects.select_related("parent") + .values_list("name", "parent__name", "constraint", "requires", "bound_to") + .filter(bundle_id=bundle_id, type=ObjectType.COMPONENT) + ): + key = ComponentRestrictionOwner(service=service_name, component=component_name) + + for requirement in requires: + # Requires that have `component` specified aren't the same with only service specified + # (regardless of restriction source): + # - "service-only" require presence of service in cluster, + # so it's enough to "add" required service. + # - ones with `component` key adds restriction on mapping, + # because such component should be mapped on at least one host. + # + # "service" requires from component are relative only to mapping checks, + # it doesn't affect service-related concerns. + required_service_name = requirement["service"] + if required_component_name := requirement.get("component"): + mapping_restrictions.required_components[key].append( + ComponentNameKey(component=required_component_name, service=required_service_name) + ) + else: + mapping_restrictions.required_services[key].add(required_service_name) + + constraint = parse_constraint(constraint) + if constraint.checks: + mapping_restrictions.constraints[key] = constraint + + if bound_to: + mapping_restrictions.binds[key] = ComponentNameKey( + component=bound_to["component"], service=bound_to["service"] + ) + + for service_name, requires in Prototype.objects.values_list("name", "requires").filter( + bundle_id=bundle_id, type=ObjectType.SERVICE + ): + if not requires: + continue + + key = ServiceRestrictionOwner(name=service_name) + + for requirement in requires: + required_service_name = requirement["service"] + service_requires[key].add(required_service_name) + if component_name := requirement.get("component"): + mapping_restrictions.required_components[key].append( + ComponentNameKey(component=component_name, service=required_service_name) + ) + + return BundleRestrictions(service_requires=service_requires, mapping=mapping_restrictions) diff --git a/python/cm/services/cluster.py b/python/cm/services/cluster.py index 74971d21dd..19c4a72b68 100644 --- a/python/cm/services/cluster.py +++ b/python/cm/services/cluster.py @@ -21,7 +21,7 @@ MaintenanceModeOfObjects, ObjectMaintenanceModeState, ) -from core.types import ADCMCoreType, ClusterID, CoreObjectDescriptor, HostID, MappingDict, ShortObjectInfo +from core.types import ADCMCoreType, ClusterID, CoreObjectDescriptor, HostID, ShortObjectInfo from django.db.transaction import atomic from rbac.models import re_apply_object_policy @@ -122,14 +122,19 @@ def perform_host_to_cluster_map( return hosts +def retrieve_host_component_entries(cluster_id: ClusterID) -> set[HostComponentEntry]: + return { + HostComponentEntry(**db_entry) + for db_entry in HostComponent.objects.values("host_id", "component_id").filter(cluster_id=cluster_id) + } + + def retrieve_cluster_topology(cluster_id: ClusterID) -> ClusterTopology: return next(retrieve_multiple_clusters_topology(cluster_ids=(cluster_id,))) -def retrieve_multiple_clusters_topology( - cluster_ids: Iterable[ClusterID], input_mapping: dict[ClusterID, list[MappingDict]] | None = None -) -> Generator[ClusterTopology, None, None]: - return build_clusters_topology(cluster_ids=cluster_ids, db=ClusterDB, input_mapping=input_mapping) +def retrieve_multiple_clusters_topology(cluster_ids: Iterable[ClusterID]) -> Generator[ClusterTopology, None, None]: + return build_clusters_topology(cluster_ids=cluster_ids, db=ClusterDB) def retrieve_related_cluster_topology(orm_object: Cluster | ClusterObject | ServiceComponent | Host) -> ClusterTopology: diff --git a/python/cm/services/concern/checks.py b/python/cm/services/concern/checks.py index 958a650dd0..abd5331752 100644 --- a/python/cm/services/concern/checks.py +++ b/python/cm/services/concern/checks.py @@ -16,7 +16,13 @@ from core.cluster.types import ClusterTopology from core.concern.checks import find_cluster_mapping_issues, find_unsatisfied_service_requirements -from core.concern.types import ComponentRestrictionOwner, ServiceDependencies, ServiceRestrictionOwner +from core.concern.types import ( + BundleRestrictions, + ComponentRestrictionOwner, + MappingRestrictions, + ServiceDependencies, + ServiceRestrictionOwner, +) from core.converters import named_mapping_from_topology from core.types import ClusterID, ConfigID, ObjectID from django.db.models import Q @@ -33,12 +39,8 @@ PrototypeImport, ServiceComponent, ) +from cm.services.bundle import retrieve_bundle_restrictions from cm.services.cluster import retrieve_cluster_topology -from cm.services.concern.repo import ( - BundleRestrictions, - MappingRestrictions, - retrieve_bundle_restrictions, -) from cm.services.config import retrieve_config_attr_pairs from cm.services.config.spec import FlatSpec, retrieve_flat_spec_for_objects diff --git a/python/cm/services/concern/flags.py b/python/cm/services/concern/flags.py index b6a7178e29..629ca061c3 100644 --- a/python/cm/services/concern/flags.py +++ b/python/cm/services/concern/flags.py @@ -23,8 +23,6 @@ from django.db.models import Q from cm.converters import core_type_to_model, model_name_to_core_type -from cm.hierarchy import Tree -from cm.issue import add_concern_to_object, remove_concern_from_object from cm.models import ADCMEntity, ConcernCause, ConcernItem, ConcernType from cm.services.concern.distribution import distribute_concern_on_related_objects from cm.services.concern.messages import ( @@ -131,22 +129,6 @@ def update_hierarchy_for_flag(flag: ConcernFlag, on_objects: Collection[CoreObje distribute_concern_on_related_objects(owner=owner, concern_id=concern.id) -def update_hierarchy(concern: ConcernItem) -> None: - tree = Tree(obj=concern.owner) - - related = set(concern.related_objects) - affected = {node.value for node in tree.get_directly_affected(node=tree.built_from)} - - if related == affected: - return - - for object_moved_out_hierarchy in related.difference(affected): - remove_concern_from_object(object_=object_moved_out_hierarchy, concern=concern) - - for new_object in affected.difference(related): - add_concern_to_object(object_=new_object, concern=concern) - - def _get_filter_for_flags_of_objects(content_type_id_map: dict[ContentType, set[int]]) -> Q: return Q(type=ConcernType.FLAG) & reduce( or_, diff --git a/python/cm/services/concern/repo.py b/python/cm/services/concern/repo.py deleted file mode 100644 index fae0a3576c..0000000000 --- a/python/cm/services/concern/repo.py +++ /dev/null @@ -1,85 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from collections import defaultdict, deque - -from core.concern.checks import parse_constraint -from core.concern.types import ( - BundleRestrictions, - ComponentNameKey, - ComponentRestrictionOwner, - MappingRestrictions, - ServiceDependencies, - ServiceRestrictionOwner, -) -from core.types import BundleID - -from cm.models import ObjectType, Prototype - - -def retrieve_bundle_restrictions(bundle_id: BundleID) -> BundleRestrictions: - mapping_restrictions = MappingRestrictions( - constraints={}, required_components=defaultdict(deque), required_services=defaultdict(set), binds={} - ) - service_requires: ServiceDependencies = defaultdict(set) - - for component_name, service_name, constraint, requires, bound_to in ( - Prototype.objects.select_related("parent") - .values_list("name", "parent__name", "constraint", "requires", "bound_to") - .filter(bundle_id=bundle_id, type=ObjectType.COMPONENT) - ): - key = ComponentRestrictionOwner(service=service_name, component=component_name) - - for requirement in requires: - # Requires that have `component` specified aren't the same with only service specified - # (regardless of restriction source): - # - "service-only" require presence of service in cluster, - # so it's enough to "add" required service. - # - ones with `component` key adds restriction on mapping, - # because such component should be mapped on at least one host. - # - # "service" requires from component are relative only to mapping checks, - # it doesn't affect service-related concerns. - required_service_name = requirement["service"] - if required_component_name := requirement.get("component"): - mapping_restrictions.required_components[key].append( - ComponentNameKey(component=required_component_name, service=required_service_name) - ) - else: - mapping_restrictions.required_services[key].add(required_service_name) - - constraint = parse_constraint(constraint) - if constraint.checks: - mapping_restrictions.constraints[key] = constraint - - if bound_to: - mapping_restrictions.binds[key] = ComponentNameKey( - component=bound_to["component"], service=bound_to["service"] - ) - - for service_name, requires in Prototype.objects.values_list("name", "requires").filter( - bundle_id=bundle_id, type=ObjectType.SERVICE - ): - if not requires: - continue - - key = ServiceRestrictionOwner(name=service_name) - - for requirement in requires: - required_service_name = requirement["service"] - service_requires[key].add(required_service_name) - if component_name := requirement.get("component"): - mapping_restrictions.required_components[key].append( - ComponentNameKey(component=component_name, service=required_service_name) - ) - - return BundleRestrictions(service_requires=service_requires, mapping=mapping_restrictions) diff --git a/python/cm/services/job/_utils.py b/python/cm/services/job/_utils.py index 8e1106812c..a21411e9c0 100644 --- a/python/cm/services/job/_utils.py +++ b/python/cm/services/job/_utils.py @@ -10,96 +10,53 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Hashable -from cm.errors import AdcmEx -from cm.models import Action, Cluster, ClusterObject, Host, HostComponent, ServiceComponent -from cm.services.job.types import HcAclAction - - -def get_old_hc(saved_hostcomponent: list[dict]): - if not saved_hostcomponent: - return {} - - old_hostcomponent = {} - for hostcomponent in saved_hostcomponent: - service = ClusterObject.objects.get(id=hostcomponent["service_id"]) - comp = ServiceComponent.objects.get(id=hostcomponent["component_id"]) - host = Host.objects.get(id=hostcomponent["host_id"]) - key = _cook_comp_key(service.prototype.name, comp.prototype.name) - _add_to_dict(old_hostcomponent, key, host.fqdn, host) - - return old_hostcomponent +from core.cluster.types import ClusterTopology, TopologyHostDiff +from cm.errors import AdcmEx +from cm.services.job.types import ActionHCRule, TaskMappingDelta -def cook_delta( - cluster: Cluster, - new_hc: list[tuple[ClusterObject, Host, ServiceComponent]], - action_hc: list[dict], - old: dict = None, -) -> dict: - def add_delta(_delta, action, _key, fqdn, _host): - _service, _comp = _key.split(".") - if not _check_action_hc(action_hc, _service, _comp, action): - msg = ( - f'no permission to "{action}" component "{_comp}" of ' f'service "{_service}" to/from hostcomponentmap' - ) - raise AdcmEx(code="WRONG_ACTION_HC", msg=msg) - - _add_to_dict(_delta[action], _key, fqdn, _host) - new = {} - for service, host, comp in new_hc: - key = _cook_comp_key(service.prototype.name, comp.prototype.name) - _add_to_dict(new, key, host.fqdn, host) +def construct_delta_for_task(topology: ClusterTopology, host_difference: TopologyHostDiff) -> TaskMappingDelta: + delta = TaskMappingDelta() - if old is None: - old = {} - for hostcomponent in HostComponent.objects.filter(cluster=cluster): - key = _cook_comp_key(hostcomponent.service.prototype.name, hostcomponent.component.prototype.name) - _add_to_dict(old, key, hostcomponent.host.fqdn, hostcomponent.host) + if not (host_difference.mapped or host_difference.unmapped): + return delta - delta = {HcAclAction.ADD.value: {}, HcAclAction.REMOVE.value: {}} - for key, value in new.items(): - if key in old: - for host in value: - if host not in old[key]: - add_delta(_delta=delta, action=HcAclAction.ADD.value, _key=key, fqdn=host, _host=value[host]) + component_keys = { + component_id: f"{service_topology.info.name}.{component_topology.info.name}" + for service_id, service_topology in topology.services.items() + for component_id, component_topology in service_topology.components.items() + } - for host in old[key]: - if host not in value: - add_delta(_delta=delta, action=HcAclAction.REMOVE.value, _key=key, fqdn=host, _host=old[key][host]) - else: - for host in value: - add_delta(_delta=delta, action=HcAclAction.ADD.value, _key=key, fqdn=host, _host=value[host]) + for component_id, added_hosts in host_difference.mapped.components.items(): + key = component_keys[component_id] + delta.add[key] = {topology.hosts[host_id] for host_id in added_hosts} - for key, value in old.items(): - if key not in new: - for host in value: - add_delta(_delta=delta, action=HcAclAction.REMOVE.value, _key=key, fqdn=host, _host=value[host]) + for component_id, removed_hosts in host_difference.unmapped.components.items(): + key = component_keys[component_id] + delta.remove[key] = {topology.hosts[host_id] for host_id in removed_hosts} return delta -def _add_to_dict(my_dict: dict, key: Hashable, subkey: Hashable, value: Any) -> None: - if key not in my_dict: - my_dict[key] = {} - - my_dict[key][subkey] = value - - -def _cook_comp_key(name, subname): - return f"{name}.{subname}" +def check_delta_is_allowed(delta: TaskMappingDelta, rules: list[ActionHCRule]) -> None: + if not rules: + return + allowed = {"add": set(), "remove": set()} + for rule in rules: + component_key = f"{rule['service']}.{rule['component']}" + allowed[rule["action"]].add(component_key) -def _check_action_hc( - action_hc: list[dict], - service: ClusterObject, - component: ServiceComponent, - action: Action, -) -> bool: - for item in action_hc: - if item["service"] == service and item["component"] == component and item["action"] == action: - return True + disallowed_add = set(delta.add.keys()).difference(allowed["add"]) + if disallowed_add: + disallowed = next(iter(disallowed_add)) + message = f'no permission to "add" component {disallowed} to cluster mapping' + raise AdcmEx(code="WRONG_ACTION_HC", msg=message) - return False + disallowed_remove = set(delta.remove.keys()).difference(allowed["remove"]) + if disallowed_remove: + disallowed = next(iter(disallowed_remove)) + message = f'no permission to "remove" component {disallowed} from cluster mapping' + raise AdcmEx(code="WRONG_ACTION_HC", msg=message) diff --git a/python/cm/services/job/action.py b/python/cm/services/job/action.py index a0f05a15f1..506af79a21 100644 --- a/python/cm/services/job/action.py +++ b/python/cm/services/job/action.py @@ -11,20 +11,20 @@ # limitations under the License. from dataclasses import dataclass, field -from functools import partial -from typing import TypeAlias +from typing import Iterable, TypeAlias -from core.cluster.types import HostComponentEntry +from core.cluster.operations import create_topology_with_new_mapping, find_hosts_difference +from core.cluster.types import ClusterTopology, HostComponentEntry from core.job.dto import TaskPayloadDTO -from core.types import ActionTargetDescriptor, CoreObjectDescriptor +from core.types import ActionTargetDescriptor, BundleID, CoreObjectDescriptor, HostID from django.conf import settings -from django.db.transaction import atomic, on_commit +from django.db.transaction import atomic from rbac.roles import re_apply_policy_for_jobs +from rest_framework.status import HTTP_409_CONFLICT from cm.adcm_config.checks import check_attr from cm.adcm_config.config import check_config_spec, get_prototype_config, process_config_spec, process_file_type -from cm.api import get_hc -from cm.converters import model_name_to_core_type, orm_object_to_action_target_type, orm_object_to_core_type +from cm.converters import orm_object_to_action_target_type, orm_object_to_core_type from cm.errors import AdcmEx from cm.models import ( ADCM, @@ -40,18 +40,17 @@ ServiceComponent, TaskLog, ) +from cm.services.bundle import retrieve_bundle_restrictions from cm.services.cluster import retrieve_cluster_topology -from cm.services.concern.repo import retrieve_bundle_restrictions +from cm.services.concern.checks import check_mapping_restrictions from cm.services.config.spec import convert_to_flat_spec_from_proto_flat_spec -from cm.services.job.checks import ( - HC_CONSTRAINT_VIOLATION_ON_UPGRADE_TEMPLATE, - check_hostcomponentmap, - check_mapping_restrictions, -) +from cm.services.job._utils import check_delta_is_allowed, construct_delta_for_task +from cm.services.job.constants import HC_CONSTRAINT_VIOLATION_ON_UPGRADE_TEMPLATE from cm.services.job.inventory._config import update_configuration_for_inventory_inplace from cm.services.job.prepare import prepare_task_for_action from cm.services.job.run import run_task -from cm.services.mapping import change_host_component_mapping +from cm.services.job.types import ActionHCRule, TaskMappingDelta +from cm.services.mapping import change_host_component_mapping, check_no_host_in_mm, check_nothing from cm.status_api import send_task_status_update_event from cm.variant import process_variant @@ -63,13 +62,29 @@ class ActionRunPayload: conf: dict = field(default_factory=dict) attr: dict = field(default_factory=dict) - hostcomponent: list[dict] = field(default_factory=list) + hostcomponent: set[HostComponentEntry] = field(default_factory=set) verbose: bool = False -def run_action(action: Action, obj: ActionTarget, payload: ActionRunPayload) -> TaskLog: +def run_action( + action: Action, obj: ActionTarget, payload: ActionRunPayload, post_upgrade_hc: list[dict] | None = None +) -> TaskLog: + task_payload = TaskPayloadDTO( + conf=payload.conf, + attr=payload.attr, + verbose=payload.verbose, + hostcomponent=None, + post_upgrade_hostcomponent=post_upgrade_hc, + ) + action_objects = _ActionLaunchObjects(target=obj, action=action) + is_upgrade_action = hasattr(action, "upgrade") + action_has_hc_acl = bool(action.hostcomponentmap) + + if action_has_hc_acl and not action_objects.cluster: + raise AdcmEx(code="TASK_ERROR", msg="Only cluster objects can have action with hostcomponentmap") + _check_no_target_conflict(target=action_objects.target, action=action) _check_no_blocking_concerns(lock_owner=action_objects.object_to_lock, action_name=action.name) _check_action_is_available_for_object(owner=action_objects.owner, action=action) @@ -77,46 +92,63 @@ def run_action(action: Action, obj: ActionTarget, payload: ActionRunPayload) -> spec, flat_spec = _process_run_config( action=action, owner=action_objects.owner, conf=payload.conf, attr=payload.attr ) - host_map, post_upgrade_hc, delta, is_upgrade_action = _process_hostcomponent( - cluster=action_objects.cluster, action=action, new_hostcomponent=payload.hostcomponent - ) - with atomic(): - task = prepare_task_for_action( - target=ActionTargetDescriptor( - id=action_objects.target.id, type=orm_object_to_action_target_type(action_objects.target) - ), - owner=CoreObjectDescriptor(id=action_objects.owner.id, type=orm_object_to_core_type(action_objects.owner)), - action=action.pk, - payload=TaskPayloadDTO( - conf=payload.conf, - attr=payload.attr, - verbose=payload.verbose, - hostcomponent=get_hc(cluster=action_objects.cluster), - post_upgrade_hostcomponent=post_upgrade_hc, - ), - delta=delta, + delta = TaskMappingDelta() + if action_objects.cluster and (action_has_hc_acl or is_upgrade_action): + topology = retrieve_cluster_topology(cluster_id=action_objects.cluster.id) + delta = _check_hostcomponent_and_get_delta( + bundle_id=int(action.prototype.bundle_id), + topology=topology, + hc_payload=payload.hostcomponent, + hc_rules=action.hostcomponentmap, + mapping_restriction_err_template=HC_CONSTRAINT_VIOLATION_ON_UPGRADE_TEMPLATE if is_upgrade_action else "{}", ) + if action_has_hc_acl: + # current topology should be saved + task_payload.hostcomponent = { + HostComponentEntry(host_id=host_id, component_id=component_id) + for service in topology.services.values() + for component_id, component in service.components.items() + for host_id in component.hosts + } - on_commit(func=partial(send_task_status_update_event, task_id=task.id, status=JobStatus.CREATED.value)) - - task_ = TaskLog.objects.get(id=task.id) - _finish_task_preparation( - task=task_, - owner=action_objects.owner, - cluster=action_objects.cluster, - host_map=host_map, - is_upgrade_action=is_upgrade_action, - payload=payload, - spec=spec, - flat_spec=flat_spec, + with atomic(): + owner = CoreObjectDescriptor(id=action_objects.owner.id, type=orm_object_to_core_type(action_objects.owner)) + target = ActionTargetDescriptor( + id=action_objects.target.id, type=orm_object_to_action_target_type(action_objects.target) ) + task = prepare_task_for_action(target=target, owner=owner, action=action.id, payload=task_payload, delta=delta) + + orm_task = TaskLog.objects.get(id=task.id) + + # Original check: `if host_map or (is_upgrade_action and host_map is not None)`. + # I believe second condition is the same as "is cluster action with hc" + if action_objects.cluster and (payload.hostcomponent or (is_upgrade_action and action_has_hc_acl)): + change_host_component_mapping( + cluster_id=action_objects.cluster.id, + bundle_id=int(action_objects.cluster.prototype.bundle_id), + flat_mapping=payload.hostcomponent, + checks_func=check_nothing, + ) + + if payload.conf: + new_conf = update_configuration_for_inventory_inplace( + configuration=payload.conf, + attributes=payload.attr, + specification=convert_to_flat_spec_from_proto_flat_spec(prototypes_flat_spec=flat_spec), + config_owner=owner, + ) + process_file_type(obj=orm_task, spec=spec, conf=payload.conf) + orm_task.config = new_conf + orm_task.save(update_fields=["config"]) - re_apply_policy_for_jobs(action_object=action_objects.owner, task=task_) + re_apply_policy_for_jobs(action_object=action_objects.owner, task=orm_task) - run_task(task_) + run_task(orm_task) - return task_ + send_task_status_update_event(task_id=task.id, status=JobStatus.CREATED.value) + + return orm_task class _ActionLaunchObjects: @@ -225,67 +257,57 @@ def _process_run_config(action: Action, owner: ObjectWithAction, conf: dict, att return spec, flat_spec -def _process_hostcomponent( - cluster: Cluster | None, action: Action, new_hostcomponent: list[dict] -) -> tuple[list[tuple[ClusterObject, Host, ServiceComponent]] | None, list, dict[str, dict], bool]: - is_upgrade_action = hasattr(action, "upgrade") +def _check_hostcomponent_and_get_delta( + bundle_id: BundleID, + topology: ClusterTopology, + hc_payload: set[HostComponentEntry], + hc_rules: list[ActionHCRule], + mapping_restriction_err_template: str, +) -> TaskMappingDelta | None: + existing_hosts = set(topology.hosts) + existing_components = set(topology.component_ids) - if not cluster: - if not new_hostcomponent: - return None, [], {}, is_upgrade_action + for entry in hc_payload: + if entry.host_id not in existing_hosts: + raise AdcmEx(code="FOREIGN_HOST", http_code=HTTP_409_CONFLICT) - # Don't think it's even required check on action preparation, - # should be handled one level above - raise AdcmEx(code="TASK_ERROR", msg="Only cluster objects can have action with hostcomponentmap") + if entry.component_id not in existing_components: + raise AdcmEx(code="COMPONENT_NOT_FOUND", http_code=HTTP_409_CONFLICT) - # `check_hostcomponentmap` won't run checks in these conditions, because it's checking actions with `hc_acl`. - # But this code checks whether existing hostcomponent satisfies constraints from new bundle. - if is_upgrade_action and not action.hostcomponentmap: - topology = retrieve_cluster_topology(cluster_id=cluster.id) - bundle_restrictions = retrieve_bundle_restrictions(bundle_id=int(action.upgrade.bundle_id)) + with_hc_acl = bool(hc_rules) + # if there aren't hc_acl rules, then `payload.hostcomponent` is irrelevant + new_topology = ( + create_topology_with_new_mapping(topology=topology, new_mapping=hc_payload) if with_hc_acl else topology + ) - check_mapping_restrictions( - mapping_restrictions=bundle_restrictions.mapping, - topology=topology, - error_message_template=HC_CONSTRAINT_VIOLATION_ON_UPGRADE_TEMPLATE, - ) + bundle_restrictions = retrieve_bundle_restrictions(bundle_id=bundle_id) + check_mapping_restrictions( + mapping_restrictions=bundle_restrictions.mapping, + topology=new_topology, + error_message_template=mapping_restriction_err_template, + ) - return None, [], {}, is_upgrade_action - - host_map, post_upgrade_hc, delta = check_hostcomponentmap(cluster=cluster, action=action, new_hc=new_hostcomponent) - - return host_map, post_upgrade_hc, delta, is_upgrade_action - - -def _finish_task_preparation( - task: TaskLog, - owner: ObjectWithAction, - cluster: Cluster | None, - host_map: list[tuple[ClusterObject, Host, ServiceComponent]] | None, - is_upgrade_action: bool, - payload: ActionRunPayload, - spec: dict, - flat_spec: dict, -): - if host_map or (is_upgrade_action and host_map is not None): - change_host_component_mapping( - cluster_id=cluster.id, - bundle_id=cluster.prototype.bundle_id, - flat_mapping=( - HostComponentEntry(host_id=host.id, component_id=component.id) for (_, host, component) in host_map - ), - skip_checks=True, - ) + host_difference = find_hosts_difference(new_topology=new_topology, old_topology=topology) + check_no_host_in_mm(host_difference.mapped.all) + # some of newly mapped hosts may have concerns + _check_no_blocking_concerns_on_hosts(host_difference.mapped.all) - if payload.conf: - new_conf = update_configuration_for_inventory_inplace( - configuration=payload.conf, - attributes=payload.attr, - specification=convert_to_flat_spec_from_proto_flat_spec(prototypes_flat_spec=flat_spec), - config_owner=CoreObjectDescriptor( - id=owner.pk, type=model_name_to_core_type(model_name=owner._meta.model_name) - ), - ) - process_file_type(obj=task, spec=spec, conf=payload.conf) - task.config = new_conf - task.save(update_fields=["config"]) + if with_hc_acl: + delta = construct_delta_for_task(topology=new_topology, host_difference=host_difference) + check_delta_is_allowed(delta=delta, rules=hc_rules) + return delta + + return None + + +def _check_no_blocking_concerns_on_hosts(hosts: Iterable[HostID]) -> None: + # this function should be a generic function like "retrieve_concerns_from_objects", + # but exact use cases (=> API) aren't clear now, so implementation is put out for later. + hosts_with_concerns = tuple( + Host.concerns.through.objects.filter(host_id__in=hosts, concernitem__blocking=True) + .values_list("host_id", flat=True) + .distinct() + ) + if hosts_with_concerns: + host_names = ",".join(sorted(Host.objects.filter(id__in=hosts_with_concerns).values_list("fqdn", flat=True))) + raise AdcmEx(code="ISSUE_INTEGRITY_ERROR", msg=f"Hosts are locked or have issues: {host_names}") diff --git a/python/cm/services/job/checks.py b/python/cm/services/job/checks.py deleted file mode 100644 index 74a3533051..0000000000 --- a/python/cm/services/job/checks.py +++ /dev/null @@ -1,143 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import copy - -from core.cluster.operations import create_topology_with_new_mapping, find_hosts_difference -from core.cluster.types import ClusterTopology, HostComponentEntry -from rest_framework.status import HTTP_409_CONFLICT - -from cm.errors import AdcmEx -from cm.models import ( - Action, - Cluster, - ClusterObject, - ConcernType, - Host, - Prototype, - ServiceComponent, -) -from cm.services.cluster import retrieve_cluster_topology -from cm.services.concern.checks import check_mapping_restrictions -from cm.services.concern.repo import retrieve_bundle_restrictions -from cm.services.job._utils import cook_delta, get_old_hc -from cm.services.job.types import HcAclAction -from cm.services.mapping import check_no_host_in_mm - -HC_CONSTRAINT_VIOLATION_ON_UPGRADE_TEMPLATE = ( - "Host-component map of upgraded cluster should satisfy constraints of new bundle. Now error is: {}" -) - - -def check_hostcomponentmap( - cluster: Cluster | None, action: Action, new_hc: list[dict] -) -> tuple[list[tuple[ClusterObject, Host, ServiceComponent]] | None, list, dict[str, dict]]: - from cm.api import check_sub_key, get_hc, make_host_comp_list - - if not action.hostcomponentmap: - return None, [], {} - - if not new_hc: - raise AdcmEx(code="TASK_ERROR", msg="hc is required") - - if not cluster: - raise AdcmEx(code="TASK_ERROR", msg="Only cluster objects can have action with hostcomponentmap") - - if not hasattr(action, "upgrade"): - for host_comp in new_hc: - host = Host.obj.get(id=host_comp.get("host_id", 0)) - if host.concerns.filter(type=ConcernType.LOCK).exists(): - raise AdcmEx(code="LOCK_ERROR", msg=f"object {host} is locked") - - if host.concerns.filter(type=ConcernType.ISSUE).exists(): - raise AdcmEx(code="ISSUE_INTEGRITY_ERROR", msg=f"object {host} has issues") - - post_upgrade_hc, clear_hc = _check_upgrade_hc(action=action, new_hc=new_hc) - check_sub_key(hc_in=clear_hc) - - old_hc = get_old_hc(saved_hostcomponent=get_hc(cluster=cluster)) - new_entries = tuple( - HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) for entry in clear_hc - ) - - # todo most likely this topology should be created somewhere above and passed in here as argument - topology = retrieve_cluster_topology(cluster_id=cluster.id) - _check_entries_are_related_to_topology(topology=topology, entries=new_entries) - new_topology = create_topology_with_new_mapping( - topology=topology, - new_mapping=( - HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) for entry in clear_hc - ), - ) - host_difference = find_hosts_difference(new_topology=new_topology, old_topology=topology) - check_no_host_in_mm(host_difference.mapped.all) - - if not hasattr(action, "upgrade"): - bundle_restrictions = retrieve_bundle_restrictions(bundle_id=int(cluster.prototype.bundle_id)) - check_mapping_restrictions(mapping_restrictions=bundle_restrictions.mapping, topology=new_topology) - - else: - bundle_restrictions = retrieve_bundle_restrictions(bundle_id=int(action.upgrade.bundle_id)) - check_mapping_restrictions( - mapping_restrictions=bundle_restrictions.mapping, - topology=new_topology, - error_message_template=HC_CONSTRAINT_VIOLATION_ON_UPGRADE_TEMPLATE, - ) - - prepared_hc_list = make_host_comp_list(cluster=cluster, hc_in=clear_hc) - - delta = cook_delta(cluster=cluster, new_hc=prepared_hc_list, action_hc=action.hostcomponentmap, old=old_hc) - - return prepared_hc_list, post_upgrade_hc, delta - - -def _check_entries_are_related_to_topology(topology: ClusterTopology, entries: tuple[HostComponentEntry, ...]) -> None: - if not {entry.host_id for entry in entries}.issubset(topology.hosts): - raise AdcmEx(code="FOREIGN_HOST", http_code=HTTP_409_CONFLICT) - - if not {entry.component_id for entry in entries}.issubset(topology.component_ids): - raise AdcmEx(code="COMPONENT_NOT_FOUND", http_code=HTTP_409_CONFLICT) - - -def _check_upgrade_hc(action, new_hc): - post_upgrade_hc = [] - clear_hc = copy.deepcopy(new_hc) - buff = 0 - for host_comp in new_hc: - if "component_prototype_id" in host_comp: - if not hasattr(action, "upgrade"): - raise AdcmEx( - code="WRONG_ACTION_HC", - msg="Hc map with components prototype available only in upgrade action", - ) - - proto = Prototype.obj.get( - type="component", - id=host_comp["component_prototype_id"], - bundle=action.upgrade.bundle, - ) - for hc_acl in action.hostcomponentmap: - if proto.name == hc_acl["component"]: - buff += 1 - if hc_acl["action"] != HcAclAction.ADD.value: - raise AdcmEx( - code="WRONG_ACTION_HC", - msg="New components from bundle with upgrade you can only add, not remove", - ) - - if buff == 0: - raise AdcmEx(code="INVALID_INPUT", msg="hc_acl doesn't allow actions with this component") - - post_upgrade_hc.append(host_comp) - clear_hc.remove(host_comp) - - return post_upgrade_hc, clear_hc diff --git a/python/cm/services/job/constants.py b/python/cm/services/job/constants.py new file mode 100644 index 0000000000..d69db7576c --- /dev/null +++ b/python/cm/services/job/constants.py @@ -0,0 +1,15 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +HC_CONSTRAINT_VIOLATION_ON_UPGRADE_TEMPLATE = ( + "Host-component map of upgraded cluster should satisfy constraints of new bundle. " "Now error is: {}" +) diff --git a/python/cm/services/job/inventory/_base.py b/python/cm/services/job/inventory/_base.py index a82c02a36f..e4f81e51b2 100644 --- a/python/cm/services/job/inventory/_base.py +++ b/python/cm/services/job/inventory/_base.py @@ -63,12 +63,13 @@ ObjectsInInventoryMap, ServiceNode, ) +from cm.services.job.types import TaskMappingDelta def get_inventory_data( target: ActionTargetDescriptor, is_host_action: bool, - delta: dict | None = None, + delta: TaskMappingDelta | None = None, related_objects: RelatedObjects | None = None, ) -> dict: if target.type == ExtraActionTargetType.ACTION_HOST_GROUP: @@ -80,7 +81,7 @@ def get_inventory_data( # but it's inadequate situation and in "context of action target group" such mutations aren't expected. return _get_inventory_for_action_from_cluster_bundle( cluster_id=group.object.id if isinstance(group.object, Cluster) else group.object.cluster_id, - delta=delta or {}, + delta=delta or TaskMappingDelta(), target_hosts=tuple((host.pk, host.fqdn) for host in group.hosts.all()), ) @@ -127,7 +128,7 @@ def get_inventory_data( raise RuntimeError(message) return _get_inventory_for_action_from_cluster_bundle( - cluster_id=cluster_id, delta=delta or {}, target_hosts=target_hosts + cluster_id=cluster_id, delta=delta or TaskMappingDelta(), target_hosts=target_hosts ) @@ -154,7 +155,7 @@ def get_cluster_vars(topology: ClusterTopology) -> ClusterVars: def _get_inventory_for_action_from_cluster_bundle( - cluster_id: int, delta: dict, target_hosts: Iterable[tuple[HostID, HostName]] + cluster_id: int, delta: TaskMappingDelta, target_hosts: Iterable[tuple[HostID, HostName]] ) -> dict: host_groups: dict[HostGroupName, set[tuple[HostID, HostName]]] = {} diff --git a/python/cm/services/job/inventory/_groups.py b/python/cm/services/job/inventory/_groups.py index e79841131c..d12f7e70e0 100644 --- a/python/cm/services/job/inventory/_groups.py +++ b/python/cm/services/job/inventory/_groups.py @@ -11,17 +11,17 @@ # limitations under the License. from collections import defaultdict -from typing import Literal from core.cluster.types import ClusterTopology from core.types import HostID, HostName from cm.services.job.inventory._constants import MAINTENANCE_MODE_GROUP_SUFFIX from cm.services.job.inventory._types import HostGroupName +from cm.services.job.types import TaskMappingDelta def detect_host_groups_for_cluster_bundle_action( - cluster_topology: ClusterTopology, hosts_in_maintenance_mode: set[int], hc_delta: dict + cluster_topology: ClusterTopology, hosts_in_maintenance_mode: set[int], hc_delta: TaskMappingDelta ) -> dict[HostGroupName, set[tuple[HostID, HostName]]]: groups = defaultdict(set) @@ -51,26 +51,23 @@ def detect_host_groups_for_cluster_bundle_action( groups[f"{service_name}.{component_name}.{MAINTENANCE_MODE_GROUP_SUFFIX}"] = hosts_in_mm groups[f"{service_name}.{MAINTENANCE_MODE_GROUP_SUFFIX}"] |= hosts_in_mm - if not hc_delta: + if hc_delta.is_empty: return groups - for hc_acl_action, delta_groups in hc_delta.items(): - hc_acl_action: Literal["add", "remove"] - for host_group_prefix, hosts_in_group in delta_groups.items(): - host_group_prefix: HostGroupName - group_full_name = f"{host_group_prefix}.{hc_acl_action}" + for component_key, hosts in hc_delta.add.items(): + group_full_name = f"{component_key}.add" + hosts_not_in_mm = {(host.id, host.name) for host in hosts if host.id not in hosts_in_maintenance_mode} + if hosts_not_in_mm: + groups[group_full_name] = hosts_not_in_mm - hosts_not_in_mm = { - (host.pk, host.fqdn) for host in hosts_in_group.values() if host.pk not in hosts_in_maintenance_mode - } - if hosts_not_in_mm: - groups[group_full_name] = hosts_not_in_mm + for component_key, hosts in hc_delta.remove.items(): + group_full_name = f"{component_key}.remove" + hosts_not_in_mm = {(host.id, host.name) for host in hosts if host.id not in hosts_in_maintenance_mode} + if hosts_not_in_mm: + groups[group_full_name] = hosts_not_in_mm - if hc_acl_action == "remove": - hosts_in_mm = { - (host.pk, host.fqdn) for host in hosts_in_group.values() if host.pk in hosts_in_maintenance_mode - } - if hosts_in_mm: - groups[f"{group_full_name}.{MAINTENANCE_MODE_GROUP_SUFFIX}"] = hosts_in_mm + hosts_in_mm = {(host.id, host.name) for host in hosts if host.id in hosts_in_maintenance_mode} + if hosts_in_mm: + groups[f"{group_full_name}.{MAINTENANCE_MODE_GROUP_SUFFIX}"] = hosts_in_mm return groups diff --git a/python/cm/services/job/jinja_scripts.py b/python/cm/services/job/jinja_scripts.py index 4af6879988..69c37a6059 100755 --- a/python/cm/services/job/jinja_scripts.py +++ b/python/cm/services/job/jinja_scripts.py @@ -34,6 +34,7 @@ get_cluster_vars, ) from cm.services.job.inventory._types import HostGroupName +from cm.services.job.types import TaskMappingDelta from cm.services.template import TemplateBuilder from cm.utils import get_on_fail_states @@ -56,7 +57,7 @@ class JinjaScriptsEnvironment(TypedDict): action: ActionContext -def get_env(task: TaskLog, delta: dict | None = None) -> JinjaScriptsEnvironment: +def get_env(task: TaskLog, delta: TaskMappingDelta | None = None) -> JinjaScriptsEnvironment: action_group = None target_object = task.task_object if isinstance(target_object, ActionHostGroup): @@ -74,7 +75,9 @@ def get_env(task: TaskLog, delta: dict | None = None) -> JinjaScriptsEnvironment ) host_groups = _get_host_group_names_only( host_groups=detect_host_groups_for_cluster_bundle_action( - cluster_topology=cluster_topology, hosts_in_maintenance_mode=hosts_in_maintenance_mode, hc_delta=delta + cluster_topology=cluster_topology, + hosts_in_maintenance_mode=hosts_in_maintenance_mode, + hc_delta=delta or TaskMappingDelta(), ) ) if action_group: @@ -98,7 +101,7 @@ def get_env(task: TaskLog, delta: dict | None = None) -> JinjaScriptsEnvironment ) -def get_job_specs_from_template(task_id: TaskID, delta: dict | None) -> Generator[JobSpec, None, None]: +def get_job_specs_from_template(task_id: TaskID, delta: TaskMappingDelta | None) -> Generator[JobSpec, None, None]: task = TaskLog.objects.select_related("action", "action__prototype__bundle").get(pk=task_id) path_resolver = BundlePathResolver(bundle_hash=task.action.prototype.bundle.hash) diff --git a/python/cm/services/job/prepare.py b/python/cm/services/job/prepare.py index 58f1f173aa..efe8e0eeb5 100644 --- a/python/cm/services/job/prepare.py +++ b/python/cm/services/job/prepare.py @@ -16,6 +16,7 @@ from core.types import ActionID, ActionTargetDescriptor, CoreObjectDescriptor from cm.services.job.run.repo import ActionRepoImpl, JobRepoImpl +from cm.services.job.types import TaskMappingDelta def prepare_task_for_action( @@ -23,7 +24,7 @@ def prepare_task_for_action( owner: CoreObjectDescriptor, action: ActionID, payload: TaskPayloadDTO, - delta: dict | None = None, + delta: TaskMappingDelta | None = None, ) -> Task: return compose_task( target=target, diff --git a/python/cm/services/job/run/_target_factories.py b/python/cm/services/job/run/_target_factories.py index 51d0834e0c..0c14570d9f 100644 --- a/python/cm/services/job/run/_target_factories.py +++ b/python/cm/services/job/run/_target_factories.py @@ -17,6 +17,7 @@ import json from ansible_plugin.utils import finish_check +from core.cluster.operations import create_topology_with_new_mapping, find_hosts_difference from core.cluster.types import HostComponentEntry from core.job.executors import BundleExecutorConfig, ExecutorConfig from core.job.runners import ExecutionTarget, ExternalSettings @@ -26,18 +27,16 @@ from django.db.transaction import atomic from rbac.roles import re_apply_policy_for_jobs -from cm.api import get_hc from cm.models import ( AnsibleConfig, Cluster, - HostComponent, LogStorage, - Prototype, ServiceComponent, TaskLog, ) -from cm.services.job._utils import cook_delta, get_old_hc -from cm.services.job.checks import check_hostcomponentmap +from cm.services.cluster import retrieve_cluster_topology, retrieve_host_component_entries +from cm.services.job._utils import construct_delta_for_task +from cm.services.job.constants import HC_CONSTRAINT_VIOLATION_ON_UPGRADE_TEMPLATE from cm.services.job.inventory import get_adcm_configuration, get_inventory_data from cm.services.job.run.executors import ( AnsibleExecutorConfig, @@ -55,7 +54,7 @@ JobEnv, ServiceActionType, ) -from cm.services.mapping import change_host_component_mapping +from cm.services.mapping import change_host_component_mapping, check_only_mapping from cm.status_api import send_prototype_and_state_update_event from cm.utils import deep_merge @@ -172,32 +171,33 @@ def _switch_hc_if_required(task: TaskLog): return cluster = task.task_object - old_hc = get_hc(cluster) - new_hc = [] - for hostcomponent in [*(task.post_upgrade_hc_map or ()), *(old_hc or ())]: - if hostcomponent not in new_hc: - new_hc.append(hostcomponent) - task.hostcomponentmap = old_hc + # `post_upgrade_hc_map` contains records with "component_prototype_id" which are "extra" to regular hc + newly_added_entries = set() + for new_entry in task.post_upgrade_hc_map or (): + if "component_prototype_id" in new_entry: + # if optimized to 1 request, it's probably good to filter by prototype__type="component" + component_id = ServiceComponent.objects.values_list("id", flat=True).get( + cluster=cluster, prototype_id=new_entry["component_prototype_id"] + ) + newly_added_entries.add(HostComponentEntry(component_id=component_id, host_id=new_entry["host_id"])) + + current_topology_entries = retrieve_host_component_entries(cluster_id=cluster.id) + + task.hostcomponentmap = [ + {"host_id": entry.host_id, "component_id": entry.component_id} for entry in current_topology_entries + ] task.post_upgrade_hc_map = None task.save(update_fields=["hostcomponentmap", "post_upgrade_hc_map"]) - for hostcomponent in new_hc: - if "component_prototype_id" in hostcomponent: - proto = Prototype.objects.get(type="component", id=hostcomponent.pop("component_prototype_id")) - comp = ServiceComponent.objects.get(cluster=cluster, prototype=proto) - hostcomponent["component_id"] = comp.id - hostcomponent["service_id"] = comp.service.id + after_upgrade_hostcomponent = current_topology_entries | newly_added_entries - host_map, *_ = check_hostcomponentmap(cluster, task.action, new_hc) - if host_map is not None: + if task.action.hostcomponentmap: change_host_component_mapping( cluster_id=cluster.id, bundle_id=cluster.bundle_id, - flat_mapping=( - HostComponentEntry(host_id=host.id, component_id=component.id) for (_, host, component) in host_map - ), - skip_checks=True, + flat_mapping=after_upgrade_hostcomponent, + checks_func=partial(check_only_mapping, error_template=HC_CONSTRAINT_VIOLATION_ON_UPGRADE_TEMPLATE), ) @@ -220,7 +220,7 @@ def prepare_ansible_environment(task: Task, job: Job, configuration: ExternalSet def prepare_ansible_inventory(task: Task) -> dict[str, Any]: - delta = {} + delta = None if task.action.hc_acl: cluster_id = None if task.owner: @@ -233,18 +233,19 @@ def prepare_ansible_inventory(task: Task) -> dict[str, Any]: message = f"Can't detect cluster id for {task.id} {task.action.name} based on: {task.owner=}" raise RuntimeError(message) - new_hc = [] - for hostcomponent in HostComponent.objects.select_related("service", "host", "component").filter( - cluster_id=cluster_id - ): - new_hc.append((hostcomponent.service, hostcomponent.host, hostcomponent.component)) - - delta = cook_delta( - cluster=Cluster.objects.get(id=cluster_id), - new_hc=new_hc, - action_hc=task.action.hc_acl, - old=get_old_hc(saved_hostcomponent=task.hostcomponent.saved), + current_topology = retrieve_cluster_topology(cluster_id=cluster_id) + previous_topology = create_topology_with_new_mapping( + topology=current_topology, + new_mapping=( + HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) + for entry in task.hostcomponent.saved + ), + ) + delta = construct_delta_for_task( + topology=current_topology, + host_difference=find_hosts_difference(new_topology=current_topology, old_topology=previous_topology), ) + # todo need check_delta_is_allowed? return get_inventory_data( target=task.target, diff --git a/python/cm/services/job/run/_task_finalizers.py b/python/cm/services/job/run/_task_finalizers.py index 6c7170cbe5..1f95b2c128 100644 --- a/python/cm/services/job/run/_task_finalizers.py +++ b/python/cm/services/job/run/_task_finalizers.py @@ -28,7 +28,7 @@ get_object_cluster, ) from cm.services.concern.messages import ConcernMessage, PlaceholderObjectsDTO, build_concern_reason -from cm.services.mapping import change_host_component_mapping +from cm.services.mapping import change_host_component_mapping, check_nothing from cm.status_api import send_object_update_event # todo "unwrap" these functions to use repo without directly calling ORM, @@ -73,7 +73,7 @@ def set_hostcomponent(task: Task, logger: Logger): HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) for entry in task.hostcomponent.saved ), - skip_checks=True, + checks_func=check_nothing, ) diff --git a/python/cm/services/job/run/repo.py b/python/cm/services/job/run/repo.py index fac4cef973..9d36864836 100644 --- a/python/cm/services/job/run/repo.py +++ b/python/cm/services/job/run/repo.py @@ -201,7 +201,11 @@ def create_task( owner_type=owner.type.value, config=payload.conf, attr=payload.attr or {}, - hostcomponentmap=payload.hostcomponent, + hostcomponentmap=[ + {"host_id": entry.host_id, "component_id": entry.component_id} for entry in payload.hostcomponent + ] + if payload.hostcomponent is not None + else None, post_upgrade_hc_map=payload.post_upgrade_hostcomponent, verbose=payload.verbose, status=ExecutionStatus.CREATED.value, diff --git a/python/cm/services/job/types.py b/python/cm/services/job/types.py index afe9904746..03569597d3 100644 --- a/python/cm/services/job/types.py +++ b/python/cm/services/job/types.py @@ -10,13 +10,16 @@ # See the License for the specific language governing permissions and # limitations under the License. +from dataclasses import dataclass, field from enum import Enum -from typing import Any, Literal, TypeAlias +from typing import Any, Literal, TypeAlias, TypedDict -from core.types import ClusterID, ComponentID, HostID, HostProviderID, ObjectID, PrototypeID, ServiceID +from core.types import ClusterID, ComponentID, HostID, HostProviderID, ObjectID, PrototypeID, ServiceID, ShortObjectInfo from pydantic import BaseModel, Field, Json Selector: TypeAlias = dict[str, dict[Literal["id", "name"], int | str]] +ComponentComposedKey: TypeAlias = str +ShortHostInfo: TypeAlias = ShortObjectInfo class ObjectWithHostGroup(BaseModel): @@ -111,3 +114,19 @@ def model_dump(self, **kwargs) -> dict[str, Any]: class HcAclAction(Enum): ADD = "add" REMOVE = "remove" + + +@dataclass(slots=True) +class TaskMappingDelta: + add: dict[ComponentComposedKey, set[ShortHostInfo]] = field(default_factory=dict) + remove: dict[ComponentComposedKey, set[ShortHostInfo]] = field(default_factory=dict) + + @property + def is_empty(self) -> bool: + return not (self.add or self.remove) + + +class ActionHCRule(TypedDict): + action: Literal["add", "remove"] + service: str + component: str diff --git a/python/cm/services/maintenance_mode.py b/python/cm/services/maintenance_mode.py index 18e3a6cc42..a24fa10ec6 100644 --- a/python/cm/services/maintenance_mode.py +++ b/python/cm/services/maintenance_mode.py @@ -40,7 +40,7 @@ def _change_mm_via_action( run_action( action=action, obj=obj, - payload=ActionRunPayload(conf={}, attr={}, hostcomponent=[], verbose=False), + payload=ActionRunPayload(conf={}, attr={}, hostcomponent=set(), verbose=False), ) serializer.validated_data["maintenance_mode"] = MaintenanceMode.CHANGING diff --git a/python/cm/services/mapping.py b/python/cm/services/mapping.py index 57703aa0d8..909805fa6f 100644 --- a/python/cm/services/mapping.py +++ b/python/cm/services/mapping.py @@ -10,10 +10,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Iterable +from typing import Iterable, Protocol from core.cluster.operations import create_topology_with_new_mapping, find_hosts_difference -from core.cluster.types import ClusterTopology, HostComponentEntry +from core.cluster.types import ClusterTopology, HostComponentEntry, TopologyHostDiff +from core.concern.types import BundleRestrictions from core.types import ADCMCoreType, BundleID, ClusterID, CoreObjectDescriptor, HostID from django.contrib.contenttypes.models import ContentType from django.db.transaction import atomic @@ -23,6 +24,7 @@ from cm.errors import AdcmEx from cm.models import Cluster, ClusterObject, ConcernCause, Host, HostComponent, MaintenanceMode from cm.services.action_host_group import ActionHostGroupRepo +from cm.services.bundle import retrieve_bundle_restrictions from cm.services.cluster import retrieve_cluster_topology from cm.services.concern import create_issue, delete_issue, retrieve_issue from cm.services.concern.checks import ( @@ -32,14 +34,51 @@ ) from cm.services.concern.distribution import lock_objects, redistribute_issues_and_flags, unlock_objects from cm.services.concern.locks import retrieve_lock_on_object -from cm.services.concern.repo import BundleRestrictions, retrieve_bundle_restrictions from cm.services.group_config import ConfigHostGroupRepo from cm.services.status.notify import reset_hc_map, reset_objects_in_mm from cm.status_api import send_host_component_map_update_event +class PerformMappingChecks(Protocol): + def __call__( + self, bundle_restrictions: BundleRestrictions, new_topology: ClusterTopology, host_difference: TopologyHostDiff + ) -> None: + ... + + +def check_nothing( + bundle_restrictions: BundleRestrictions, new_topology: ClusterTopology, host_difference: TopologyHostDiff +) -> None: + _ = bundle_restrictions, new_topology, host_difference + + +def check_only_mapping( + bundle_restrictions: BundleRestrictions, + new_topology: ClusterTopology, + host_difference: TopologyHostDiff, + error_template="{}", +) -> None: + _ = host_difference + check_mapping_restrictions( + mapping_restrictions=bundle_restrictions.mapping, topology=new_topology, error_message_template=error_template + ) + + +def check_all( + bundle_restrictions: BundleRestrictions, new_topology: ClusterTopology, host_difference: TopologyHostDiff +) -> None: + check_service_requirements(services_restrictions=bundle_restrictions.service_requires, topology=new_topology) + check_only_mapping( + bundle_restrictions=bundle_restrictions, new_topology=new_topology, host_difference=host_difference + ) + check_no_host_in_mm(host_difference.mapped.all) + + def change_host_component_mapping( - cluster_id: ClusterID, bundle_id: BundleID, flat_mapping: Iterable[HostComponentEntry], *, skip_checks: bool = False + cluster_id: ClusterID, + bundle_id: BundleID, + flat_mapping: Iterable[HostComponentEntry], + checks_func: PerformMappingChecks = check_all, ) -> ClusterTopology: # force remove duplicates new_mapping_entries = set(flat_mapping) @@ -55,13 +94,7 @@ def change_host_component_mapping( # business checks - # sometimes it's required to skip checks (e.g. in plugin calls) - if not skip_checks: - check_service_requirements( - services_restrictions=bundle_restrictions.service_requires, topology=new_topology - ) - check_mapping_restrictions(mapping_restrictions=bundle_restrictions.mapping, topology=new_topology) - check_no_host_in_mm(host_difference.mapped.all) + checks_func(bundle_restrictions=bundle_restrictions, new_topology=new_topology, host_difference=host_difference) # save _recreate_mapping_in_db(topology=new_topology) diff --git a/python/cm/services/service.py b/python/cm/services/service.py index 9da6625d44..2168c219b7 100644 --- a/python/cm/services/service.py +++ b/python/cm/services/service.py @@ -68,7 +68,7 @@ def delete_service_from_api(service: ClusterObject) -> Response: run_action( action=delete_action, obj=service, - payload=ActionRunPayload(conf={}, attr={}, hostcomponent=[], verbose=False), + payload=ActionRunPayload(conf={}, attr={}, hostcomponent=set(), verbose=False), ) else: delete_service(service=service) diff --git a/python/cm/tests/test_action_host_group.py b/python/cm/tests/test_action_host_group.py index 7e707efcf7..8e05d90799 100644 --- a/python/cm/tests/test_action_host_group.py +++ b/python/cm/tests/test_action_host_group.py @@ -96,7 +96,7 @@ def test_generate_inventory_success(self) -> None: group_inventory = get_inventory_data( target=ActionTargetDescriptor(id=self.action_group.id, type=ExtraActionTargetType.ACTION_HOST_GROUP), is_host_action=False, - delta={}, + delta=None, ) self.assertIn("target", group_inventory["all"]["children"]) @@ -105,7 +105,9 @@ def test_generate_inventory_success(self) -> None: ) owner_inventory = get_inventory_data( - target=ActionTargetDescriptor(id=self.cluster.id, type=ADCMCoreType.CLUSTER), is_host_action=False, delta={} + target=ActionTargetDescriptor(id=self.cluster.id, type=ADCMCoreType.CLUSTER), + is_host_action=False, + delta=None, ) group_inventory["all"]["children"].pop("target") diff --git a/python/cm/tests/test_hc.py b/python/cm/tests/test_hc.py index a1ec3ae50e..4f9868e60c 100644 --- a/python/cm/tests/test_hc.py +++ b/python/cm/tests/test_hc.py @@ -13,176 +13,13 @@ from pathlib import Path from adcm.tests.base import APPLICATION_JSON, BaseTestCase, BusinessLogicMixin -from django.conf import settings -from django.urls import reverse -from rest_framework.response import Response -from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED +from rest_framework.status import HTTP_200_OK -from cm.api import add_host_to_cluster -from cm.errors import AdcmEx -from cm.models import Action, Bundle, ClusterObject, Host, Prototype, ServiceComponent -from cm.services.job.checks import check_hostcomponentmap +from cm.models import Action, ServiceComponent from cm.tests.mocks.task_runner import RunTaskMock -from cm.tests.test_upgrade import ( - cook_cluster, - cook_cluster_bundle, - cook_provider, - cook_provider_bundle, -) class TestHC(BaseTestCase, BusinessLogicMixin): - def test_action_hc_simple(self): - bundle_1 = cook_cluster_bundle("1.0") - cluster = cook_cluster(bundle_1, "Test1") - bundle_2 = cook_provider_bundle("1.0") - provider = cook_provider(bundle_2, "DF01") - host_1 = Host.objects.get(provider=provider, fqdn="server01.inter.net") - - action = Action(name="run") - hc_list, *_ = check_hostcomponentmap(cluster, action, []) - self.assertEqual(hc_list, None) - - with self.assertRaises(AdcmEx) as e: - action = Action(name="run", hostcomponentmap=["qwe"]) - hc_list, *_ = check_hostcomponentmap(cluster, action, []) - self.assertEqual(e.exception.code, "TASK_ERROR") - self.assertEqual(e.exception.msg, "hc is required") - - service = ClusterObject.objects.get(cluster=cluster, prototype__name="hadoop") - sc1 = ServiceComponent.objects.get(cluster=cluster, service=service, prototype__name="server") - with self.assertRaises(AdcmEx) as e: - action = Action(name="run", hostcomponentmap=["qwe"]) - hostcomponent = [{"service_id": service.id, "component_id": sc1.id, "host_id": 500}] - hc_list, *_ = check_hostcomponentmap(cluster, action, hostcomponent) - self.assertEqual(e.exception.code, "HOST_NOT_FOUND") - - with self.assertRaises(AdcmEx) as e: - action = Action(name="run", hostcomponentmap=["qwe"]) - hostcomponent = [{"service_id": service.id, "component_id": sc1.id, "host_id": host_1.id}] - hc_list, *_ = check_hostcomponentmap(cluster, action, hostcomponent) - self.assertEqual(e.exception.code, "FOREIGN_HOST") - - add_host_to_cluster(cluster, host_1) - with self.assertRaises(AdcmEx) as e: - action = Action(name="run", hostcomponentmap="qwe") - hostcomponent = [{"service_id": 500, "component_id": sc1.id, "host_id": host_1.id}] - hc_list, *_ = check_hostcomponentmap(cluster, action, hostcomponent) - self.assertEqual(e.exception.code, "CLUSTER_SERVICE_NOT_FOUND") - - with self.assertRaises(AdcmEx) as e: - action = Action(name="run", hostcomponentmap=["qwe"]) - hostcomponent = [{"service_id": service.id, "component_id": 500, "host_id": host_1.id}] - hc_list, *_ = check_hostcomponentmap(cluster, action, hostcomponent) - self.assertEqual(e.exception.code, "COMPONENT_NOT_FOUND") - - def test_action_hc(self): - bundle_1 = cook_cluster_bundle("1.0") - cluster = cook_cluster(bundle_1, "Test1") - bundle_2 = cook_provider_bundle("1.0") - provider = cook_provider(bundle_2, "DF01") - - host_1 = Host.objects.get(provider=provider, fqdn="server01.inter.net") - host_2 = Host.objects.get(provider=provider, fqdn="server02.inter.net") - service = ClusterObject.objects.get(cluster=cluster, prototype__name="hadoop") - sc1 = ServiceComponent.objects.get(cluster=cluster, service=service, prototype__name="server") - - add_host_to_cluster(cluster, host_1) - add_host_to_cluster(cluster, host_2) - - try: - act_hc = [{"service": "hadoop", "component": "server", "action": "delete"}] - action = Action(name="run", hostcomponentmap=act_hc) - hostcomponent = [{"service_id": service.id, "component_id": sc1.id, "host_id": host_1.id}] - hc_list, *_ = check_hostcomponentmap(cluster, action, hostcomponent) - - self.assertNotEqual(hc_list, None) - except AdcmEx as e: - self.assertEqual(e.code, "WRONG_ACTION_HC") - self.assertEqual(e.msg[:32], 'no permission to "add" component') - - act_hc = [{"service": "hadoop", "component": "server", "action": "add"}] - action = Action(name="run", hostcomponentmap=act_hc) - hostcomponent = [ - {"service_id": service.id, "component_id": sc1.id, "host_id": host_1.id}, - {"service_id": service.id, "component_id": sc1.id, "host_id": host_2.id}, - ] - hc_list, *_ = check_hostcomponentmap(cluster, action, hostcomponent) - - self.assertNotEqual(hc_list, None) - - self.set_hostcomponent( - cluster=cluster, - entries=[ - (Host.objects.get(id=entry["host_id"]), ServiceComponent.objects.get(id=entry["component_id"])) - for entry in hostcomponent - ], - ) - act_hc = [{"service": "hadoop", "component": "server", "action": "remove"}] - action = Action(name="run", hostcomponentmap=act_hc) - hostcomponent = [ - {"service_id": service.id, "component_id": sc1.id, "host_id": host_2.id}, - ] - hc_list, *_ = check_hostcomponentmap(cluster, action, hostcomponent) - - self.assertNotEqual(hc_list, None) - - def test_empty_hostcomponent(self): - test_bundle_filename = "min-3199.tar" - test_bundle_path = Path( - self.base_dir, - "python/cm/tests/files", - test_bundle_filename, - ) - with open(test_bundle_path, encoding=settings.ENCODING_UTF_8) as f: - response: Response = self.client.post( - path=reverse(viewname="v1:upload-bundle"), - data={"file": f}, - ) - - self.assertEqual(response.status_code, HTTP_201_CREATED) - - response: Response = self.client.post( - path=reverse(viewname="v1:load-bundle"), - data={"bundle_file": test_bundle_filename}, - ) - - self.assertEqual(response.status_code, HTTP_200_OK) - - bundle = Bundle.objects.get(pk=response.data["id"]) - cluster_prototype = Prototype.objects.get(bundle=bundle, type="cluster") - service_prototype = Prototype.objects.get(bundle=bundle, type="service") - - response: Response = self.client.post( - path=reverse(viewname="v1:cluster"), - data={ - "bundle_id": bundle.pk, - "display_name": "test_cluster_display_name", - "name": "test-cluster-name", - "prototype_id": cluster_prototype.pk, - }, - ) - cluster_pk = response.data["id"] - - self.assertEqual(response.status_code, HTTP_201_CREATED) - - response: Response = self.client.post( - path=reverse(viewname="v1:service", kwargs={"cluster_id": cluster_pk}), - data={ - "prototype_id": service_prototype.pk, - }, - content_type=APPLICATION_JSON, - ) - - self.assertEqual(response.status_code, HTTP_201_CREATED) - - response: Response = self.client.get( - path=f'{reverse(viewname="v1:host-component", kwargs={"cluster_id": cluster_pk})}?view=interface', - extra={"view": "interface"}, - ) - - self.assertEqual(response.status_code, HTTP_200_OK) - def test_adcm_4929_run_same_hc_success(self) -> None: bundles_dir = Path(__file__).parent / "bundles" bundle = self.add_bundle(bundles_dir / "cluster_1") diff --git a/python/cm/tests/test_inventory/base.py b/python/cm/tests/test_inventory/base.py index c079a25a2e..6623c0f8f8 100644 --- a/python/cm/tests/test_inventory/base.py +++ b/python/cm/tests/test_inventory/base.py @@ -16,6 +16,8 @@ import json from adcm.tests.base import BaseTestCase, BusinessLogicMixin +from core.cluster.operations import create_topology_with_new_mapping, find_hosts_difference +from core.cluster.types import HostComponentEntry from core.types import CoreObjectDescriptor from django.contrib.contenttypes.models import ContentType from jinja2 import Template @@ -26,15 +28,14 @@ Action, ADCMEntity, ADCMModel, - ClusterObject, GroupConfig, Host, - HostComponent, MaintenanceMode, - ServiceComponent, ) +from cm.services.cluster import retrieve_cluster_topology +from cm.services.job._utils import construct_delta_for_task from cm.services.job.inventory import get_inventory_data -from cm.services.job.types import HcAclAction +from cm.services.job.types import TaskMappingDelta TemplatesData: TypeAlias = Mapping[tuple[str, ...], tuple[Path, Mapping[str, Any]]] MappingEntry: TypeAlias = dict[Literal["host_id", "component_id", "service_id"], int] @@ -96,7 +97,12 @@ def check_data_by_template(self, data: Mapping[str, dict], templates_data: Templ self.assertDictEqual(actual_data, expected_data) def assert_inventory( - self, obj: ADCMEntity, action: Action, expected_topology: dict, expected_data: dict, delta: Delta | None = None + self, + obj: ADCMEntity, + action: Action, + expected_topology: dict, + expected_data: dict, + delta: TaskMappingDelta | None = None, ) -> None: target = CoreObjectDescriptor(id=obj.id, type=model_name_to_core_type(obj.__class__.__name__)) actual_inventory = decrypt_secrets( @@ -117,32 +123,20 @@ def add_group_config(parent: ADCMModel, hosts: Iterable[Host]) -> GroupConfig: return group_config @staticmethod - def get_mapping_delta_for_hc_acl(cluster, new_mapping: list[MappingEntry]) -> Delta: - existing_mapping_ids = set( - HostComponent.objects.values_list("host_id", "component_id", "service_id").filter(cluster=cluster) + def get_mapping_delta_for_hc_acl(cluster, new_mapping: list[MappingEntry]) -> TaskMappingDelta: + topology = retrieve_cluster_topology(cluster_id=cluster.id) + new_topology = create_topology_with_new_mapping( + topology=topology, + new_mapping=( + HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) + for entry in new_mapping + ), ) - new_mapping_ids = {(hc["host_id"], hc["component_id"], hc["service_id"]) for hc in new_mapping} - added = {} - for host_id, component_id, service_id in new_mapping_ids.difference(existing_mapping_ids): - host = Host.objects.get(pk=host_id, cluster=cluster) - service = ClusterObject.objects.get(pk=service_id, cluster=cluster) - component = ServiceComponent.objects.get(pk=component_id, cluster=cluster, service=service) - - added.setdefault(f"{service.name}.{component.name}", {}).setdefault(host.fqdn, host) - - removed = {} - for host_id, component_id, service_id in existing_mapping_ids.difference(new_mapping_ids): - host = Host.objects.get(pk=host_id, cluster=cluster) - service = ClusterObject.objects.get(pk=service_id, cluster=cluster) - component = ServiceComponent.objects.get(pk=component_id, cluster=cluster, service=service) - - removed.setdefault(f"{service.name}.{component.name}", {}).setdefault(host.fqdn, host) - - return { - HcAclAction.ADD.value: added, - HcAclAction.REMOVE.value: removed, - } + return construct_delta_for_task( + topology=new_topology, + host_difference=find_hosts_difference(new_topology=new_topology, old_topology=topology), + ) @staticmethod def get_maintenance_mode_for_render(maintenance_mode: MaintenanceMode) -> str: diff --git a/python/cm/tests/test_inventory/test_inventory.py b/python/cm/tests/test_inventory/test_inventory.py index 6a376b6813..fe9bf852b6 100644 --- a/python/cm/tests/test_inventory/test_inventory.py +++ b/python/cm/tests/test_inventory/test_inventory.py @@ -14,6 +14,7 @@ from pathlib import Path from adcm.tests.base import BaseTestCase, BusinessLogicMixin +from core.cluster.types import HostComponentEntry from core.types import CoreObjectDescriptor from init_db import init as init_adcm @@ -287,7 +288,13 @@ def test_groups_remove_host_not_in_mm_success(self): inventory_data = self.get_children_from_inventory( action=self.action_hc_acl, object_=self.cluster_hc_acl, - payload=ActionRunPayload(hostcomponent=hc_request_data, verbose=False), + payload=ActionRunPayload( + hostcomponent={ + HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) + for entry in hc_request_data + }, + verbose=False, + ), ) target_key_remove = ( @@ -330,7 +337,13 @@ def test_groups_remove_host_in_mm_success(self): inventory_data = self.get_children_from_inventory( action=self.action_hc_acl, object_=self.cluster_hc_acl, - payload=ActionRunPayload(hostcomponent=hc_request_data, verbose=False), + payload=ActionRunPayload( + hostcomponent={ + HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) + for entry in hc_request_data + }, + verbose=False, + ), ) target_key_remove = ( diff --git a/python/cm/upgrade.py b/python/cm/upgrade.py index 4d44f1797c..ccd0f0fd68 100644 --- a/python/cm/upgrade.py +++ b/python/cm/upgrade.py @@ -67,7 +67,8 @@ from cm.services.concern.checks import object_configuration_has_issue from cm.services.concern.distribution import distribute_concern_on_related_objects, redistribute_issues_and_flags from cm.services.job.action import ActionRunPayload, run_action -from cm.services.mapping import change_host_component_mapping +from cm.services.job.types import HcAclAction +from cm.services.mapping import change_host_component_mapping, check_nothing from cm.status_api import send_prototype_and_state_update_event from cm.utils import obj_ref @@ -136,7 +137,7 @@ def do_upgrade( upgrade: Upgrade, config: dict, attr: dict, - hostcomponent: list, + hostcomponent: list[dict], verbose: bool = False, ) -> dict: check_license(prototype=obj.prototype) @@ -165,10 +166,40 @@ def do_upgrade( send_prototype_and_state_update_event(object_=obj) else: + bundle_id = upgrade.bundle_id + add_hc_rules = { + (rule["service"], rule["component"]) + for rule in upgrade.action.hostcomponentmap + if rule["action"] == HcAclAction.ADD.value + } + + existing_hostcomponent: set[HostComponentEntry] = set() + post_upgrade: list[dict] = [] + for entry in hostcomponent: + # alternative to removed `_check_upgrade_hc` + if "component_prototype_id" in entry: + component_name, service_name = Prototype.obj.values_list("name", "parent__name").get( + type="component", + id=entry["component_prototype_id"], + bundle_id=bundle_id, + ) + if (service_name, component_name) not in add_hc_rules: + raise AdcmEx( + code="WRONG_ACTION_HC", + msg="New components from bundle with upgrade you can only add, not remove", + ) + + post_upgrade.append(entry) + else: + existing_hostcomponent.add( + HostComponentEntry(host_id=entry["host_id"], component_id=entry["component_id"]) + ) + task = run_action( action=upgrade.action, obj=obj, - payload=ActionRunPayload(conf=config, attr=attr, hostcomponent=hostcomponent, verbose=verbose), + payload=ActionRunPayload(conf=config, attr=attr, hostcomponent=existing_hostcomponent, verbose=verbose), + post_upgrade_hc=post_upgrade, ) task_id = task.id @@ -252,7 +283,7 @@ def bundle_revert(obj: Cluster | HostProvider) -> None: HostComponentEntry(host_id=host.id, component_id=component.id) for (_, host, component) in host_comp_list ), - skip_checks=True, + checks_func=check_nothing, ) if isinstance(obj, HostProvider): diff --git a/python/core/cluster/operations.py b/python/core/cluster/operations.py index 93085296ef..37c93a59d9 100644 --- a/python/core/cluster/operations.py +++ b/python/core/cluster/operations.py @@ -28,7 +28,7 @@ ServiceTopology, TopologyHostDiff, ) -from core.types import ClusterID, ComponentID, HostID, MappingDict, ShortObjectInfo +from core.types import ClusterID, ComponentID, HostID, ShortObjectInfo # !===== Cluster Topology =====! @@ -52,31 +52,22 @@ def get_host_component_entries( def build_clusters_topology( - cluster_ids: Iterable[ClusterID], - db: ClusterTopologyDBProtocol, - input_mapping: dict[ClusterID, list[MappingDict]] | None = None, + cluster_ids: Iterable[ClusterID], db: ClusterTopologyDBProtocol ) -> Generator[ClusterTopology, None, None]: - input_mapping = {} if input_mapping is None else input_mapping - hosts_in_clusters = { cluster_id: {host.id: host for host in hosts} for cluster_id, hosts in db.get_clusters_hosts(cluster_ids=cluster_ids).items() } services_in_clusters = db.get_clusters_services_with_components(cluster_ids=cluster_ids) - # either existing mapping or input mapping is used to collect `hosts_on_components` hosts_on_components: dict[ClusterID, dict[ComponentID, set[HostID]]] = { cluster_id: defaultdict(set) for cluster_id in cluster_ids } - if hosts_in_clusters and services_in_clusters and not input_mapping: + if hosts_in_clusters: for cluster_id, entries in db.get_host_component_entries(cluster_ids=cluster_ids).items(): for entry in entries: hosts_on_components[cluster_id][entry.component_id].add(entry.host_id) - for cluster_id, input_mapping_list in input_mapping.items(): - for input_mapping_entry in input_mapping_list: - hosts_on_components[cluster_id][input_mapping_entry["component_id"]].add(input_mapping_entry["host_id"]) - return ( ClusterTopology( cluster_id=cluster_id, diff --git a/python/core/job/dto.py b/python/core/job/dto.py index 9d380f574b..374c085874 100644 --- a/python/core/job/dto.py +++ b/python/core/job/dto.py @@ -10,10 +10,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +from dataclasses import dataclass from datetime import datetime from pydantic import BaseModel +from core.cluster.types import HostComponentEntry from core.job.types import ExecutionStatus, HostComponentChanges @@ -38,13 +40,14 @@ class LogCreateDTO(BaseModel): format: str -class TaskPayloadDTO(BaseModel): +@dataclass(slots=True) +class TaskPayloadDTO: verbose: bool = False conf: dict | None = None attr: dict | None = None - hostcomponent: list[dict] | None = None + hostcomponent: list[HostComponentEntry] | None = None post_upgrade_hostcomponent: list[dict] | None = None diff --git a/python/core/job/task.py b/python/core/job/task.py index dd4a031240..d2eb934337 100644 --- a/python/core/job/task.py +++ b/python/core/job/task.py @@ -12,6 +12,7 @@ from cm.services.job.jinja_scripts import get_job_specs_from_template +from cm.services.job.types import TaskMappingDelta from core.job.dto import LogCreateDTO, TaskPayloadDTO from core.job.errors import TaskCreateError @@ -26,7 +27,7 @@ def compose_task( payload: TaskPayloadDTO, job_repo: JobRepoInterface, action_repo: ActionRepoInterface, - delta: dict | None = None, + delta: TaskMappingDelta | None = None, ): """ Prepare task based on action, target object and task payload. From af89f1dcbd343f4661b92fe6092bbacd600f4521 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Wed, 25 Sep 2024 05:32:49 +0000 Subject: [PATCH 76/98] ADCM-5801 Fix `None` file handling inside of group in `loadcluster.py` --- python/cm/management/commands/loadcluster.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/python/cm/management/commands/loadcluster.py b/python/cm/management/commands/loadcluster.py index a840354373..91fc85d863 100644 --- a/python/cm/management/commands/loadcluster.py +++ b/python/cm/management/commands/loadcluster.py @@ -162,14 +162,17 @@ def process_config(proto, config): def create_file_from_config(obj, config): - if config is not None: - conf = config["current"]["config"] - proto = obj.prototype - for pconf in PrototypeConfig.objects.filter(prototype=proto, type="file"): - if pconf.subname and conf[pconf.name].get(pconf.subname): + if config is None: + return + + conf = config["current"]["config"] + + for pconf in PrototypeConfig.objects.filter(prototype=obj.prototype, type="file"): + if pconf.subname: + if pconf.subname in conf.get(pconf.name, {}): save_file_type(obj, pconf.name, pconf.subname, conf[pconf.name][pconf.subname]) - elif conf.get(pconf.name): - save_file_type(obj, pconf.name, "", conf[pconf.name]) + elif pconf.name in conf: + save_file_type(obj, pconf.name, "", conf[pconf.name]) def create_cluster(cluster): From 997fd0fbd3917a9ab40a7f28b4bf958d1230654f Mon Sep 17 00:00:00 2001 From: Maksim Mureev Date: Wed, 25 Sep 2024 13:31:50 +0000 Subject: [PATCH 77/98] Add support for ARM (arm64) architecture and reduce the final image size - Add dependencies for Poetry under ARM. Now the ADCM can be built under ARM (e.g. on macOS on an M-series processor) - Reduce the final image size. Dependency installation and removal of intermediate build results are placed in one RUN. Logic was changed not to copy the system directory to venv, which doubles its size, but to use the system directory using the `--system-site-packages` parameter --- Dockerfile | 26 ++++++++++++++++---------- Makefile | 6 ++++-- README.md | 6 +++--- 3 files changed, 23 insertions(+), 15 deletions(-) diff --git a/Dockerfile b/Dockerfile index fd4a762a11..b838566160 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,7 @@ FROM python:3.10-alpine +ENV PATH="/root/.local/bin:$PATH" +COPY pyproject.toml poetry.lock /adcm/ + RUN apk update && \ apk upgrade && \ apk add --virtual .build-deps \ @@ -21,18 +24,21 @@ RUN apk update && \ openssl \ rsync \ runit \ - sshpass && \ - curl -sSL https://install.python-poetry.org | python - -ENV PATH="/root/.local/bin:$PATH" -COPY pyproject.toml poetry.lock /adcm/ -RUN python -m venv /adcm/venv/2.9 && \ - poetry config virtualenvs.create false && \ - poetry -C /adcm install --no-root && \ - cp -r /usr/local/lib/python3.10/site-packages /adcm/venv/2.9/lib/python3.10 && \ + sshpass \ + libffi-dev && \ + curl -sSL https://install.python-poetry.org | POETRY_HOME=/tmp/poetry python - && \ + /tmp/poetry/bin/poetry config virtualenvs.create false && \ + /tmp/poetry/bin/poetry --directory=/adcm install --no-root && \ + python -m venv /adcm/venv/2.9 --system-site-packages && \ . /adcm/venv/2.9/bin/activate && \ pip install git+https://github.com/arenadata/ansible.git@v2.9.27-p1 && \ - deactivate -RUN apk del .build-deps + deactivate && \ + apk del .build-deps && \ + /tmp/poetry/bin/poetry cache clear pypi --all && \ + rm -rf /root/.cache && \ + rm -rf /var/cache/apk/* && \ + rm -rf /tmp/poetry + COPY . /adcm RUN mkdir -p /adcm/data/log && \ mkdir -p /usr/share/ansible/plugins/modules && \ diff --git a/Makefile b/Makefile index 9a34ff5493..075ef3b5fc 100644 --- a/Makefile +++ b/Makefile @@ -5,6 +5,8 @@ SELENOID_HOST ?= 10.92.2.65 SELENOID_PORT ?= 4444 ADCM_VERSION = "2.4.0-dev" PY_FILES = python dev/linters conf/adcm/python_scripts +GOLANG_VERSION = 1.23 +NODE_VERSION = 18.16-alpine .PHONY: help @@ -12,10 +14,10 @@ help: @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' buildss: - @docker run -i --rm -v $(CURDIR)/go:/code -w /code golang sh -c "make" + @docker run -i --rm -v $(CURDIR)/go:/code -w /code golang:$(GOLANG_VERSION) sh -c "make" buildjs: - @docker run -i --rm -v $(CURDIR)/wwwroot:/wwwroot -v $(CURDIR)/adcm-web/app:/code -e ADCM_VERSION=$(ADCM_VERSION) -w /code node:18.16-alpine ./build.sh + @docker run -i --rm -v $(CURDIR)/wwwroot:/wwwroot -v $(CURDIR)/adcm-web/app:/code -e ADCM_VERSION=$(ADCM_VERSION) -w /code node:$(NODE_VERSION) ./build.sh build_base: @docker build . -t $(APP_IMAGE):$(APP_TAG) --build-arg ADCM_VERSION=$(ADCM_VERSION) diff --git a/README.md b/README.md index 9d84136252..e9c4a8cd57 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,7 @@ git clone https://github.com/arenadata/adcm cd adcm -# Run build process +# Run build process for current architecture make build ``` @@ -64,7 +64,7 @@ clean Cleanup. Just a cleanup. describe Create .version file with output of describe help Shows that help build2js For new design and api v2: Build client side js/html/css in directory wwwroot -build2 For new design and api v2: Build final docker image and all depended targets except baseimage. +build2 For new design and api v2: Build final docker image and all depended targets except baseimage ``` And check out the description for every operation available. @@ -152,4 +152,4 @@ _PostgreSQL must be version 11 or newer - JSONB field used_ valid choices are: `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL` - defaults to `ERROR` \ No newline at end of file + defaults to `ERROR` From 5eb5896dd44c1aa9f1bc66c6a1386dbc1192586e Mon Sep 17 00:00:00 2001 From: Aleksandr Alferov Date: Tue, 1 Oct 2024 07:29:20 +0000 Subject: [PATCH 78/98] ADCM-5991 Improve build ADCM image --- .dockerignore | 8 +++-- Dockerfile | 85 +++++++++++++++++++++++++++++++---------------- Makefile | 18 ++-------- python/pytest.ini | 2 -- 4 files changed, 64 insertions(+), 49 deletions(-) delete mode 100644 python/pytest.ini diff --git a/.dockerignore b/.dockerignore index 865a330891..a575179361 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,6 +1,8 @@ **/*.git* -**/.* +.* +go/**/.* +python/**/.* **/tests/ data -venv -adcm-web \ No newline at end of file +dev +venv \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index b838566160..a15a42ff0f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,15 +1,23 @@ +FROM golang:1.23 AS go_builder +COPY ./go /code +WORKDIR /code +RUN sh -c "make" + + +FROM node:18.16-alpine AS ui_builder +ARG ADCM_VERSION +ENV ADCM_VERSION=$ADCM_VERSION +COPY ./adcm-web/app /code +WORKDIR /code +RUN . build.sh + + FROM python:3.10-alpine ENV PATH="/root/.local/bin:$PATH" -COPY pyproject.toml poetry.lock /adcm/ - RUN apk update && \ apk upgrade && \ - apk add --virtual .build-deps \ - build-base \ - linux-headers && \ - apk add \ + apk add --no-cache \ bash \ - curl \ git \ gnupg \ libc6-compat \ @@ -24,30 +32,51 @@ RUN apk update && \ openssl \ rsync \ runit \ - sshpass \ + sshpass && \ + apk cache clean --purge + +ENV PYTHONDONTWRITECODE=1 +ENV PYTHONBUFFERED=1 + +ENV POETRY_VERSION=1.8.3 +ENV POETRY_HOME=/opt/poetry +ENV POETRY_VENV=/opt/poetry-venv +ENV POETRY_CACHE_DIR=/opt/poetry-cache +ENV POETRY_VIRTUALENVS_CREATE=0 + +COPY poetry.lock pyproject.toml /adcm/ + +RUN apk add --no-cache --virtual .build-deps \ + build-base \ + linux-headers \ libffi-dev && \ - curl -sSL https://install.python-poetry.org | POETRY_HOME=/tmp/poetry python - && \ - /tmp/poetry/bin/poetry config virtualenvs.create false && \ - /tmp/poetry/bin/poetry --directory=/adcm install --no-root && \ + python -m venv $POETRY_VENV && \ + $POETRY_VENV/bin/pip install --no-cache-dir -U pip setuptools && \ + $POETRY_VENV/bin/pip install --no-cache-dir poetry==$POETRY_VERSION && \ + $POETRY_VENV/bin/poetry --no-cache --directory=/adcm install --no-root && \ python -m venv /adcm/venv/2.9 --system-site-packages && \ - . /adcm/venv/2.9/bin/activate && \ - pip install git+https://github.com/arenadata/ansible.git@v2.9.27-p1 && \ - deactivate && \ + /adcm/venv/2.9/bin/pip install --no-cache-dir git+https://github.com/arenadata/ansible.git@v2.9.27-p1 && \ + $POETRY_VENV/bin/poetry cache clear pypi --all && \ apk del .build-deps && \ - /tmp/poetry/bin/poetry cache clear pypi --all && \ - rm -rf /root/.cache && \ - rm -rf /var/cache/apk/* && \ - rm -rf /tmp/poetry - -COPY . /adcm -RUN mkdir -p /adcm/data/log && \ - mkdir -p /usr/share/ansible/plugins/modules && \ - cp -r /adcm/os/* / && \ - cp /adcm/os/etc/crontabs/root /var/spool/cron/crontabs/root && \ - cp -r /adcm/python/ansible/* /usr/local/lib/python3.10/site-packages/ansible/ && \ - cp -r /adcm/python/ansible/* /adcm/venv/2.9/lib/python3.10/site-packages/ansible/ && \ - python /adcm/python/manage.py collectstatic --noinput && \ - cp -r /adcm/wwwroot/static/rest_framework/css/* /adcm/wwwroot/static/rest_framework/docs/css/ + apk cache clean --purge && \ + rm -rf $POETRY_HOME && \ + rm -rf $POETRY_VENV && \ + rm -rf $POETRY_CACHE_DIR + +RUN rm /adcm/poetry.lock /adcm/pyproject.toml + +COPY os/etc /etc +COPY os/etc/crontabs/root /var/spool/cron/crontabs/root +COPY --from=go_builder /code/bin/runstatus /adcm/go/bin/runstatus +COPY --from=ui_builder /wwwroot /adcm/wwwroot +COPY conf /adcm/conf +COPY python/ansible/plugins /usr/share/ansible/plugins +COPY python /adcm/python + +RUN mkdir -p /adcm/data/log + +RUN python /adcm/python/manage.py collectstatic --noinput + ARG ADCM_VERSION ENV ADCM_VERSION=$ADCM_VERSION EXPOSE 8000 diff --git a/Makefile b/Makefile index 075ef3b5fc..48b0502390 100644 --- a/Makefile +++ b/Makefile @@ -5,26 +5,12 @@ SELENOID_HOST ?= 10.92.2.65 SELENOID_PORT ?= 4444 ADCM_VERSION = "2.4.0-dev" PY_FILES = python dev/linters conf/adcm/python_scripts -GOLANG_VERSION = 1.23 -NODE_VERSION = 18.16-alpine -.PHONY: help +.PHONY: build unittests_sqlite unittests_postgresql pretty lint version -help: - @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' - -buildss: - @docker run -i --rm -v $(CURDIR)/go:/code -w /code golang:$(GOLANG_VERSION) sh -c "make" - -buildjs: - @docker run -i --rm -v $(CURDIR)/wwwroot:/wwwroot -v $(CURDIR)/adcm-web/app:/code -e ADCM_VERSION=$(ADCM_VERSION) -w /code node:$(NODE_VERSION) ./build.sh - -build_base: +build: @docker build . -t $(APP_IMAGE):$(APP_TAG) --build-arg ADCM_VERSION=$(ADCM_VERSION) -# build ADCM_v2 -build: buildss buildjs build_base - unittests_sqlite: poetry install --no-root --with unittests poetry run python/manage.py test python -v 2 --parallel diff --git a/python/pytest.ini b/python/pytest.ini deleted file mode 100644 index 7603a27a52..0000000000 --- a/python/pytest.ini +++ /dev/null @@ -1,2 +0,0 @@ -[pytest] -DJANGO_SETTINGS_MODULE = adcm.settings From 4b52044e8697797820536c513a0c3aad3433bf97 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 1 Oct 2024 07:35:04 +0000 Subject: [PATCH 79/98] ADCM-4764 Rework `dependOn` calculation for API v2 using `BundleRestrictions` --- python/api_v2/cluster/depend_on.py | 102 ++++++++++++++++++ python/api_v2/cluster/serializers.py | 65 +++++++++-- python/api_v2/cluster/utils.py | 83 -------------- python/api_v2/cluster/views.py | 87 +++++++++++---- python/api_v2/component/serializers.py | 46 +------- python/api_v2/service/serializers.py | 8 -- .../bundles/complex_dependencies/config.yaml | 52 +++++++++ python/api_v2/tests/test_cluster.py | 37 +++++++ python/api_v2/tests/test_mapping.py | 8 -- python/cm/services/bundle.py | 9 +- python/cm/services/concern/checks.py | 12 +-- python/cm/services/mapping.py | 2 +- python/core/bundle/__init__.py | 11 ++ python/core/bundle/operations.py | 68 ++++++++++++ python/core/{concern => bundle}/types.py | 19 +--- python/core/concern/checks/_mapping.py | 9 +- .../core/concern/checks/_service_requires.py | 8 +- python/core/types.py | 15 +++ 18 files changed, 428 insertions(+), 213 deletions(-) create mode 100644 python/api_v2/cluster/depend_on.py delete mode 100644 python/api_v2/cluster/utils.py create mode 100644 python/api_v2/tests/bundles/complex_dependencies/config.yaml create mode 100644 python/core/bundle/__init__.py create mode 100644 python/core/bundle/operations.py rename python/core/{concern => bundle}/types.py (85%) diff --git a/python/api_v2/cluster/depend_on.py b/python/api_v2/cluster/depend_on.py new file mode 100644 index 0000000000..954fdd1c36 --- /dev/null +++ b/python/api_v2/cluster/depend_on.py @@ -0,0 +1,102 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import defaultdict +from functools import reduce +from operator import or_ +from typing import Iterable, TypeAlias + +from cm.models import ObjectType, Prototype +from core.bundle.operations import RequiresDependencies +from core.types import ComponentName, ComponentNameKey, ObjectID, ServiceName, ServiceNameKey +from django.db.models import Q + +from api_v2.prototype.utils import get_license_text + +DependOnIDNameHierarchy: TypeAlias = dict[ObjectID, dict[ServiceName, set[ComponentName]]] + + +def prepare_depend_on_hierarchy( + dependencies: RequiresDependencies, targets: Iterable[tuple[ObjectID, ServiceNameKey | ComponentNameKey]] +) -> DependOnIDNameHierarchy: + result = defaultdict(lambda: defaultdict(set)) + + for object_id, key in targets: + for required_object_key in dependencies[key]: + if isinstance(required_object_key, ComponentNameKey): + result[object_id][required_object_key.service].add(required_object_key.component) + elif required_object_key.service not in result[object_id]: + result[object_id][required_object_key.service] = set() + + return result + + +def retrieve_serialized_depend_on_hierarchy( + hierarchy: DependOnIDNameHierarchy, bundle_id: int, bundle_hash: str +) -> dict[ObjectID, list[dict]]: + objects_in_hierarchy: dict[ServiceName, set[ComponentName]] = defaultdict(set) + + for object_dict in hierarchy.values(): + for service_name, component_names in object_dict.items(): + objects_in_hierarchy[service_name].update(component_names) + + service_proto_query = Q(type=ObjectType.SERVICE, name__in=objects_in_hierarchy) + components_proto_query = reduce( + or_, + ( + Q(type=ObjectType.COMPONENT, name__in=component_names, parent__name=service_name) + for service_name, component_names in objects_in_hierarchy.items() + ), + Q(), + ) + + serialized: dict[ServiceNameKey | ComponentNameKey, dict] = {} + for prototype in Prototype.objects.filter( + service_proto_query | components_proto_query, bundle_id=bundle_id + ).select_related("parent"): + if prototype.type == ObjectType.COMPONENT: + key = ComponentNameKey(service=prototype.parent.name, component=prototype.name) + serialized[key] = { + "id": prototype.id, + "name": prototype.name, + "display_name": prototype.display_name, + "version": prototype.version, + } + continue + + key = ServiceNameKey(service=prototype.name) + serialized[key] = { + "id": prototype.id, + "name": prototype.name, + "display_name": prototype.display_name, + "version": prototype.version, + "license": { + "status": prototype.license, + "text": get_license_text(license_path=prototype.license_path, bundle_hash=bundle_hash), + }, + } + + return { + object_id: [ + { + "service_prototype": { + **serialized[ServiceNameKey(service=service_name)], + "component_prototypes": [ + serialized[ComponentNameKey(service=service_name, component=component_name)] + for component_name in component_names + ], + } + } + for service_name, component_names in dependencies.items() + ] + for object_id, dependencies in hierarchy.items() + } diff --git a/python/api_v2/cluster/serializers.py b/python/api_v2/cluster/serializers.py index f5e067dd51..6f3a049725 100644 --- a/python/api_v2/cluster/serializers.py +++ b/python/api_v2/cluster/serializers.py @@ -19,6 +19,7 @@ ClusterObject, Host, HostComponent, + MaintenanceMode, Prototype, ServiceComponent, ) @@ -26,11 +27,18 @@ from cm.validators import ClusterUniqueValidator, StartMidEndValidator from django.conf import settings from drf_spectacular.utils import extend_schema_field -from rest_framework.fields import CharField, DictField, IntegerField -from rest_framework.serializers import BooleanField, ModelSerializer, SerializerMethodField +from rest_framework.fields import DictField +from rest_framework.serializers import ( + BooleanField, + CharField, + ChoiceField, + IntegerField, + ListField, + ModelSerializer, + SerializerMethodField, +) from rest_framework.status import HTTP_409_CONFLICT -from api_v2.cluster.utils import get_depend_on from api_v2.concern.serializers import ConcernSerializer from api_v2.prototype.serializers import LicenseSerializer, PrototypeRelatedSerializer from api_v2.prototype.utils import get_license_text @@ -126,6 +134,14 @@ class Meta: fields = ("name", "description") +class ServiceNameSerializer(ModelSerializer): + prototype = PrototypeRelatedSerializer(read_only=True) + + class Meta: + model = ClusterObject + fields = ["id", "name", "display_name", "state", "prototype"] + + class ServicePrototypeSerializer(ModelSerializer): is_required = BooleanField(source="required") depend_on = SerializerMethodField() @@ -135,13 +151,9 @@ class Meta: model = Prototype fields = ["id", "name", "display_name", "version", "is_required", "depend_on", "license"] - @staticmethod @extend_schema_field(field=DependOnSerializer(many=True)) - def get_depend_on(prototype: Prototype) -> list[dict] | None: - if prototype.requires: - return get_depend_on(prototype=prototype) - - return None + def get_depend_on(self, prototype: Prototype) -> list[dict] | None: + return self.context["depend_on"].get(prototype.id) @staticmethod @extend_schema_field(field=LicenseSerializer) @@ -266,3 +278,38 @@ class ClusterHostStatusSerializer(EmptySerializer): class Meta: model = Host fields = ["host_components"] + + +class ComponentMappingSerializer(ModelSerializer): + service = ServiceNameSerializer(read_only=True) + depend_on = SerializerMethodField() + constraints = ListField(source="constraint") + prototype = PrototypeRelatedSerializer(read_only=True) + maintenance_mode = SerializerMethodField() + is_maintenance_mode_available = SerializerMethodField() + + class Meta: + model = ServiceComponent + fields = [ + "id", + "name", + "display_name", + "is_maintenance_mode_available", + "maintenance_mode", + "constraints", + "prototype", + "depend_on", + "service", + ] + + @extend_schema_field(field=DependOnSerializer(many=True)) + def get_depend_on(self, instance: ServiceComponent) -> list[dict] | None: + return self.context["depend_on"].get(instance.id) + + @extend_schema_field(field=ChoiceField(choices=(MaintenanceMode.ON.value, MaintenanceMode.OFF.value))) + def get_maintenance_mode(self, instance: ServiceComponent): + return self.context["mm"].components.get(instance.id, MaintenanceMode.OFF).value + + @extend_schema_field(field=BooleanField()) + def get_is_maintenance_mode_available(self, _instance: ServiceComponent): + return self.context["is_mm_available"] diff --git a/python/api_v2/cluster/utils.py b/python/api_v2/cluster/utils.py deleted file mode 100644 index c4c6d3486d..0000000000 --- a/python/api_v2/cluster/utils.py +++ /dev/null @@ -1,83 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from collections import defaultdict - -from cm.models import ObjectType, Prototype - -from api_v2.prototype.utils import get_license_text - - -def get_requires(requires: list[dict]) -> dict: - new_requires = defaultdict(list) - - for require in requires: - if "component" in require: - new_requires[require["service"]].append(require["component"]) - elif require["service"] not in new_requires: - new_requires[require["service"]] = [] - - return new_requires - - -def get_depend_on( - prototype: Prototype, depend_on: list[dict] | None = None, checked_objects: set[Prototype] | None = None -) -> list[dict]: - if depend_on is None: - depend_on = [] - - if checked_objects is None: - checked_objects = set() - - checked_objects.add(prototype) - - for service_name, component_names in get_requires(requires=prototype.requires).items(): - required_service = Prototype.objects.get(type=ObjectType.SERVICE, name=service_name, bundle=prototype.bundle) - checked_objects.add(required_service) - service_prototype = { - "id": required_service.pk, - "name": required_service.name, - "display_name": required_service.display_name, - "version": required_service.version, - "license": { - "status": required_service.license, - "text": get_license_text( - license_path=required_service.license_path, - bundle_hash=required_service.bundle.hash, - ), - }, - "component_prototypes": [], - } - - for component_name in component_names: - required_component = Prototype.objects.get( - type=ObjectType.COMPONENT, name=component_name, bundle=prototype.bundle, parent=required_service - ) - checked_objects.add(required_component) - service_prototype["component_prototypes"].append( - { - "id": required_component.pk, - "name": required_component.name, - "display_name": required_component.display_name, - "version": required_component.version, - } - ) - - if required_component.requires and required_component not in checked_objects: - get_depend_on(prototype=required_component, depend_on=depend_on, checked_objects=checked_objects) - - depend_on.append({"service_prototype": service_prototype}) - - if required_service.requires and required_service not in checked_objects: - get_depend_on(prototype=required_service, depend_on=depend_on, checked_objects=checked_objects) - - return depend_on diff --git a/python/api_v2/cluster/views.py b/python/api_v2/cluster/views.py index 972c705f8a..b8afafab42 100644 --- a/python/api_v2/cluster/views.py +++ b/python/api_v2/cluster/views.py @@ -33,6 +33,7 @@ from cm.errors import AdcmEx from cm.models import ( AnsibleConfig, + Bundle, Cluster, ClusterObject, ConcernType, @@ -42,6 +43,7 @@ Prototype, ServiceComponent, ) +from cm.services.bundle import retrieve_bundle_restrictions from cm.services.cluster import ( perform_host_to_cluster_map, retrieve_cluster_topology, @@ -49,12 +51,15 @@ ) from cm.services.mapping import change_host_component_mapping from cm.services.status import notify +from core.bundle.operations import build_requires_dependencies_map from core.cluster.errors import HostAlreadyBoundError, HostBelongsToAnotherClusterError, HostDoesNotExistError from core.cluster.operations import ( calculate_maintenance_mode_for_cluster_objects, ) from core.cluster.types import HostComponentEntry, MaintenanceModeOfObjects +from core.types import ComponentNameKey, ServiceNameKey from django.contrib.contenttypes.models import ContentType +from django.db.models import Q from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view from guardian.mixins import PermissionListMixin from guardian.shortcuts import get_objects_for_user @@ -73,6 +78,7 @@ ) from api_v2.api_schema import DefaultParams, responses +from api_v2.cluster.depend_on import prepare_depend_on_hierarchy, retrieve_serialized_depend_on_hierarchy from api_v2.cluster.filters import ( ClusterFilter, ClusterHostFilter, @@ -86,13 +92,13 @@ ClusterHostStatusSerializer, ClusterSerializer, ClusterUpdateSerializer, + ComponentMappingSerializer, MappingSerializer, RelatedHostsStatusesSerializer, RelatedServicesStatusesSerializer, ServicePrototypeSerializer, SetMappingSerializer, ) -from api_v2.component.serializers import ComponentMappingSerializer from api_v2.generic.action.api_schema import document_action_viewset from api_v2.generic.action.audit import audit_action_viewset from api_v2.generic.action.views import ActionViewSet @@ -320,22 +326,43 @@ def destroy(self, request, *args, **kwargs): # noqa: ARG002 @action(methods=["get"], detail=True, url_path="service-prototypes", pagination_class=None) def service_prototypes(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 cluster = self.get_object() - prototypes = Prototype.objects.filter(type=ObjectType.SERVICE, bundle=cluster.prototype.bundle).order_by( - "display_name" - ) - serializer = self.get_serializer_class()(instance=prototypes, many=True) - - return Response(data=serializer.data) + return self._respond_with_prototypes(cluster_prototype_id=cluster.prototype_id) @action(methods=["get"], detail=True, url_path="service-candidates", pagination_class=None) def service_candidates(self, request: Request, *args, **kwargs) -> Response: # noqa: ARG002 cluster = self.get_object() - prototypes = ( - Prototype.objects.filter(type=ObjectType.SERVICE, bundle=cluster.prototype.bundle) - .exclude(id__in=cluster.clusterobject_set.all().values_list("prototype", flat=True)) + exclude_added_service_prototypes = Q( + id__in=ClusterObject.objects.values_list("prototype_id", flat=True).filter(cluster_id=cluster.id) + ) + return self._respond_with_prototypes( + cluster_prototype_id=cluster.prototype_id, exclude_clause=exclude_added_service_prototypes + ) + + def _respond_with_prototypes(self, cluster_prototype_id: int, exclude_clause: Q | None = None) -> Response: + exclude_clause = exclude_clause or Q() + bundle_id = Prototype.objects.values_list("bundle_id", flat=True).get(id=cluster_prototype_id) + + prototypes = tuple( + Prototype.objects.filter(type=ObjectType.SERVICE, bundle_id=bundle_id) + .exclude(exclude_clause) .order_by("display_name") ) - serializer = self.get_serializer_class()(instance=prototypes, many=True) + + context = {"depend_on": {}} + + if any(proto.requires for proto in prototypes): + requires_dependencies = build_requires_dependencies_map(retrieve_bundle_restrictions(bundle_id)) + bundle_hash = Bundle.objects.values_list("hash", flat=True).get(id=bundle_id) + context["depend_on"] = retrieve_serialized_depend_on_hierarchy( + hierarchy=prepare_depend_on_hierarchy( + dependencies=requires_dependencies, + targets=((proto.id, ServiceNameKey(service=proto.name)) for proto in prototypes), + ), + bundle_id=bundle_id, + bundle_hash=bundle_hash, + ) + + serializer = self.get_serializer_class()(instance=prototypes, many=True, context=context) return Response(data=serializer.data) @@ -480,7 +507,7 @@ def mapping_hosts(self, request: Request, *args, **kwargs) -> Response: # noqa: def mapping_components(self, request: Request, *args, **kwargs): # noqa: ARG002 cluster = self.get_object() - is_mm_available = Prototype.objects.values_list("allow_maintenance_mode", flat=True).get( + bundle_id, is_mm_available = Prototype.objects.values_list("bundle_id", "allow_maintenance_mode").get( id=cluster.prototype_id ) @@ -493,16 +520,36 @@ def mapping_components(self, request: Request, *args, **kwargs): # noqa: ARG002 else MaintenanceModeOfObjects(services={}, components={}, hosts={}) ) - serializer = self.get_serializer( - instance=( - ServiceComponent.objects.filter(cluster=cluster) - .select_related("prototype", "service__prototype") - .order_by("pk") - ), - many=True, - context={"mm": objects_mm, "is_mm_available": is_mm_available}, + components = tuple( + ServiceComponent.objects.filter(cluster=cluster) + .select_related("prototype", "prototype__parent", "service__prototype") + .order_by("pk") ) + context = {"mm": objects_mm, "is_mm_available": is_mm_available, "depend_on": {}} + + if any(component.prototype.requires for component in components): + requires_dependencies = build_requires_dependencies_map(retrieve_bundle_restrictions(bundle_id)) + bundle_hash = Bundle.objects.values_list("hash", flat=True).get(id=bundle_id) + context["depend_on"] = retrieve_serialized_depend_on_hierarchy( + hierarchy=prepare_depend_on_hierarchy( + dependencies=requires_dependencies, + targets=( + ( + component.id, + ComponentNameKey( + service=component.prototype.parent.name, component=component.prototype.name + ), + ) + for component in components + ), + ), + bundle_id=bundle_id, + bundle_hash=bundle_hash, + ) + + serializer = self.get_serializer(instance=components, many=True, context=context) + return Response(status=HTTP_200_OK, data=serializer.data) @extend_schema( diff --git a/python/api_v2/component/serializers.py b/python/api_v2/component/serializers.py index 1958880a91..e45dd02514 100644 --- a/python/api_v2/component/serializers.py +++ b/python/api_v2/component/serializers.py @@ -13,62 +13,20 @@ from cm.adcm_config.config import get_main_info from cm.models import Host, HostComponent, MaintenanceMode, ServiceComponent from drf_spectacular.utils import extend_schema_field -from rest_framework.fields import BooleanField from rest_framework.serializers import ( CharField, ChoiceField, IntegerField, - ListField, ModelSerializer, SerializerMethodField, ) from api_v2.cluster.serializers import ClusterRelatedSerializer -from api_v2.cluster.utils import get_depend_on from api_v2.concern.serializers import ConcernSerializer from api_v2.host.serializers import HostShortSerializer from api_v2.prototype.serializers import PrototypeRelatedSerializer -from api_v2.serializers import DependOnSerializer, WithStatusSerializer -from api_v2.service.serializers import ServiceNameSerializer, ServiceRelatedSerializer - - -class ComponentMappingSerializer(ModelSerializer): - service = ServiceNameSerializer(read_only=True) - depend_on = SerializerMethodField() - constraints = ListField(source="constraint") - prototype = PrototypeRelatedSerializer(read_only=True) - maintenance_mode = SerializerMethodField() - is_maintenance_mode_available = SerializerMethodField() - - class Meta: - model = ServiceComponent - fields = [ - "id", - "name", - "display_name", - "is_maintenance_mode_available", - "maintenance_mode", - "constraints", - "prototype", - "depend_on", - "service", - ] - - @staticmethod - @extend_schema_field(field=DependOnSerializer(many=True)) - def get_depend_on(instance: ServiceComponent) -> list[dict] | None: - if instance.prototype.requires: - return get_depend_on(prototype=instance.prototype) - - return None - - @extend_schema_field(field=ChoiceField(choices=(MaintenanceMode.ON.value, MaintenanceMode.OFF.value))) - def get_maintenance_mode(self, instance: ServiceComponent): - return self.context["mm"].components.get(instance.id, MaintenanceMode.OFF).value - - @extend_schema_field(field=BooleanField()) - def get_is_maintenance_mode_available(self, _instance: ServiceComponent): - return self.context["is_mm_available"] +from api_v2.serializers import WithStatusSerializer +from api_v2.service.serializers import ServiceRelatedSerializer class ComponentSerializer(WithStatusSerializer): diff --git a/python/api_v2/service/serializers.py b/python/api_v2/service/serializers.py index 1a8ad61001..2803cec637 100644 --- a/python/api_v2/service/serializers.py +++ b/python/api_v2/service/serializers.py @@ -73,14 +73,6 @@ class Meta: fields = ["maintenance_mode"] -class ServiceNameSerializer(ModelSerializer): - prototype = PrototypeRelatedSerializer(read_only=True) - - class Meta: - model = ClusterObject - fields = ["id", "name", "display_name", "state", "prototype"] - - class RelatedComponentsStatusesSerializer(WithStatusSerializer): class Meta: model = ServiceComponent diff --git a/python/api_v2/tests/bundles/complex_dependencies/config.yaml b/python/api_v2/tests/bundles/complex_dependencies/config.yaml new file mode 100644 index 0000000000..aea66f8552 --- /dev/null +++ b/python/api_v2/tests/bundles/complex_dependencies/config.yaml @@ -0,0 +1,52 @@ +- type: cluster + name: cluster_dep + version: 1.0 + + config: &config + - name: string_param + type: string + default: some_default_value + + - name: int_param + type: integer + display_name: Some Int + default: 12 + +- type: service + name: first_service + flag_autogeneration: + enable_outdated_config: True + version: 1.5 + config: *config + + components: + first_component: + flag_autogeneration: + enable_outdated_config: True + config: *config + + second_component: + flag_autogeneration: + enable_outdated_config: False + config: *config + +- type: service + name: second_service + requires: + - service: first_service + component: first_component + flag_autogeneration: + enable_outdated_config: False + version: 1.2 + config: *config + + components: + first_component: + flag_autogeneration: + enable_outdated_config: False + config: *config + + second_component: + flag_autogeneration: + enable_outdated_config: False + config: *config diff --git a/python/api_v2/tests/test_cluster.py b/python/api_v2/tests/test_cluster.py index 5bfcc79d6d..8dabe828e2 100644 --- a/python/api_v2/tests/test_cluster.py +++ b/python/api_v2/tests/test_cluster.py @@ -341,6 +341,43 @@ def test_service_candidates_success(self): ], ) + def test_depends_on_in_service_candidates(self) -> None: + self.maxDiff = None + + bundle = self.add_bundle(self.test_bundles_dir / "complex_dependencies") + cluster = self.add_cluster(bundle=bundle, name="With Deps") + service_proto = Prototype.objects.get(name="first_service", type="service") + component_proto = Prototype.objects.get(name="first_component", type="component", parent=service_proto) + + candidates = self.client.v2[cluster, "service-candidates"].get().json() + depend_on = {entry["name"]: entry["dependOn"] for entry in candidates} + + self.assertDictEqual( + depend_on, + { + "first_service": None, + "second_service": [ + { + "servicePrototype": { + "id": service_proto.id, + "name": "first_service", + "displayName": "first_service", + "version": "1.5", + "license": {"status": "absent", "text": None}, + "componentPrototypes": [ + { + "id": component_proto.id, + "name": "first_component", + "displayName": "first_component", + "version": "1.5", + } + ], + } + } + ], + }, + ) + def test_service_create_success(self): service_prototype = Prototype.objects.filter(type="service").first() response = (self.client.v2[self.cluster_1] / "services").post(data=[{"prototype_id": service_prototype.pk}]) diff --git a/python/api_v2/tests/test_mapping.py b/python/api_v2/tests/test_mapping.py index eb22f440a6..a6effd83a2 100644 --- a/python/api_v2/tests/test_mapping.py +++ b/python/api_v2/tests/test_mapping.py @@ -33,7 +33,6 @@ HTTP_409_CONFLICT, ) -from api_v2.cluster.utils import get_requires from api_v2.tests.base import BaseAPITestCase @@ -231,13 +230,6 @@ def test_mapping_components_with_requires_success(self): self.assertEqual(len(component.prototype.requires), len(component_data["dependOn"])) - def test_get_requires(self): - requires = [{"service": "service1", "component": "component1"}, {"service": "service1"}] - - new_requires = get_requires(requires=requires) - - self.assertDictEqual(new_requires, {"service1": ["component1"]}) - class TestMappingConstraints(BaseAPITestCase): def setUp(self) -> None: diff --git a/python/cm/services/bundle.py b/python/cm/services/bundle.py index 7a2960b7bb..67596ea846 100644 --- a/python/cm/services/bundle.py +++ b/python/cm/services/bundle.py @@ -14,16 +14,15 @@ from collections import defaultdict, deque from pathlib import Path -from core.concern.checks import parse_constraint -from core.concern.types import ( +from core.bundle.types import ( BundleRestrictions, - ComponentNameKey, ComponentRestrictionOwner, MappingRestrictions, ServiceDependencies, ServiceRestrictionOwner, ) -from core.types import BundleID +from core.concern.checks import parse_constraint +from core.types import BundleID, ComponentNameKey from django.conf import settings from cm.models import ObjectType, Prototype @@ -147,7 +146,7 @@ def retrieve_bundle_restrictions(bundle_id: BundleID) -> BundleRestrictions: if not requires: continue - key = ServiceRestrictionOwner(name=service_name) + key = ServiceRestrictionOwner(service=service_name) for requirement in requires: required_service_name = requirement["service"] diff --git a/python/cm/services/concern/checks.py b/python/cm/services/concern/checks.py index abd5331752..a31dd4dc91 100644 --- a/python/cm/services/concern/checks.py +++ b/python/cm/services/concern/checks.py @@ -14,15 +14,9 @@ from operator import attrgetter from typing import Iterable, Literal, NamedTuple, TypeAlias +from core.bundle.types import BundleRestrictions, MappingRestrictions, ServiceDependencies from core.cluster.types import ClusterTopology from core.concern.checks import find_cluster_mapping_issues, find_unsatisfied_service_requirements -from core.concern.types import ( - BundleRestrictions, - ComponentRestrictionOwner, - MappingRestrictions, - ServiceDependencies, - ServiceRestrictionOwner, -) from core.converters import named_mapping_from_topology from core.types import ClusterID, ConfigID, ObjectID from django.db.models import Q @@ -136,9 +130,7 @@ def service_requirements_has_issue(service: ClusterObject) -> HasIssue: service_name = service.prototype.name service_related_restrictions = {} for key, required_services in bundle_restrictions.service_requires.items(): - if (isinstance(key, ServiceRestrictionOwner) and key.name == service_name) or ( - isinstance(key, ComponentRestrictionOwner) and key.service == service_name - ): + if key.service == service_name: service_related_restrictions[key] = required_services return bool( diff --git a/python/cm/services/mapping.py b/python/cm/services/mapping.py index 909805fa6f..55df167274 100644 --- a/python/cm/services/mapping.py +++ b/python/cm/services/mapping.py @@ -12,9 +12,9 @@ from typing import Iterable, Protocol +from core.bundle.types import BundleRestrictions from core.cluster.operations import create_topology_with_new_mapping, find_hosts_difference from core.cluster.types import ClusterTopology, HostComponentEntry, TopologyHostDiff -from core.concern.types import BundleRestrictions from core.types import ADCMCoreType, BundleID, ClusterID, CoreObjectDescriptor, HostID from django.contrib.contenttypes.models import ContentType from django.db.transaction import atomic diff --git a/python/core/bundle/__init__.py b/python/core/bundle/__init__.py new file mode 100644 index 0000000000..824dd6c8fe --- /dev/null +++ b/python/core/bundle/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/python/core/bundle/operations.py b/python/core/bundle/operations.py new file mode 100644 index 0000000000..3e33a58c92 --- /dev/null +++ b/python/core/bundle/operations.py @@ -0,0 +1,68 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import defaultdict +from typing import TypeAlias + +from core.bundle.types import BundleRestrictions +from core.types import ComponentNameKey, ServiceNameKey + +_DependencyMap: TypeAlias = dict[ServiceNameKey | ComponentNameKey, set[ServiceNameKey | ComponentNameKey]] + + +class RequiresDependencies: + __slots__ = ("_direct", "_full") + + def __init__(self, direct_dependencies: _DependencyMap) -> None: + self._direct = direct_dependencies + self._full: _DependencyMap = {} + + def __getitem__(self, item: ServiceNameKey | ComponentNameKey) -> set[ServiceNameKey | ComponentNameKey]: + if item not in self._direct: + return set() + + if all_dependencies := self._full.get(item): + return all_dependencies + + all_dependencies = self._resolve_full_dependencies(key=item, processed=set()) + self._full[item] = all_dependencies + + return all_dependencies + + def _resolve_full_dependencies( + self, key: ServiceNameKey | ComponentNameKey, processed: set[ServiceNameKey | ComponentNameKey] + ) -> set[ServiceNameKey | ComponentNameKey]: + dependencies = set() + + for dependency in self._direct.get(key, ()): + dependencies.add(dependency) + + if dependency not in processed: + processed.add(dependency) + dependencies |= self._resolve_full_dependencies(key=key, processed=processed) + + return dependencies - {key} + + +def build_requires_dependencies_map(bundle_restrictions: BundleRestrictions) -> RequiresDependencies: + requires: _DependencyMap = defaultdict(set) + + for dependant_object, required_service_names in bundle_restrictions.service_requires.items(): + requires[dependant_object].update(map(ServiceNameKey, required_service_names)) + + for dependant_object, required_components in bundle_restrictions.mapping.required_components.items(): + requires[dependant_object].update(required_components) + + for dependant_component, required_service_names in bundle_restrictions.mapping.required_services.items(): + requires[dependant_component].update(map(ServiceNameKey, required_service_names)) + + return RequiresDependencies(direct_dependencies=requires) diff --git a/python/core/concern/types.py b/python/core/bundle/types.py similarity index 85% rename from python/core/concern/types.py rename to python/core/bundle/types.py index f6b9d004e8..b84966bb55 100644 --- a/python/core/concern/types.py +++ b/python/core/bundle/types.py @@ -15,27 +15,12 @@ from enum import Enum from typing import Callable, NamedTuple, TypeAlias -from core.types import ComponentName, ServiceName - - -class ComponentNameKey(NamedTuple): - service: ServiceName - component: ComponentName - - def __str__(self) -> str: - return f'component "{self.component}" of service "{self.service}"' - +from core.types import ComponentNameKey, ServiceName, ServiceNameKey +ServiceRestrictionOwner: TypeAlias = ServiceNameKey ComponentRestrictionOwner: TypeAlias = ComponentNameKey -class ServiceRestrictionOwner(NamedTuple): - name: ServiceName - - def __str__(self) -> str: - return f'service "{self.name}"' - - class MappingRestrictionType(Enum): CONSTRAINT = "constraint" REQUIRES = "requires" diff --git a/python/core/concern/checks/_mapping.py b/python/core/concern/checks/_mapping.py index 704698297d..84f59519b5 100644 --- a/python/core/concern/checks/_mapping.py +++ b/python/core/concern/checks/_mapping.py @@ -14,9 +14,7 @@ from functools import partial from typing import Iterable -from core.cluster.types import NamedMapping -from core.concern.types import ( - ComponentNameKey, +from core.bundle.types import ( ComponentRestrictionOwner, Constraint, HostsAmount, @@ -26,7 +24,8 @@ ServiceRestrictionOwner, SupportedConstraintFormat, ) -from core.types import HostID +from core.cluster.types import NamedMapping +from core.types import ComponentNameKey, HostID def find_cluster_mapping_issues( @@ -70,7 +69,7 @@ def find_cluster_mapping_issues( # 1. Service added to cluster # 2. At least one component of this service should be mapped - at_least_one_mapped = any(named_mapping.get(dependant_object.name, {}).values()) + at_least_one_mapped = any(named_mapping.get(dependant_object.service, {}).values()) if not at_least_one_mapped: continue diff --git a/python/core/concern/checks/_service_requires.py b/python/core/concern/checks/_service_requires.py index 965728839e..dfdf18c673 100644 --- a/python/core/concern/checks/_service_requires.py +++ b/python/core/concern/checks/_service_requires.py @@ -12,13 +12,13 @@ from collections import deque -from core.cluster.types import NamedMapping -from core.concern.types import ( +from core.bundle.types import ( ComponentRestrictionOwner, MissingServiceRequiresViolation, ServiceDependencies, ServiceRestrictionOwner, ) +from core.cluster.types import NamedMapping def find_unsatisfied_service_requirements( @@ -36,7 +36,9 @@ def find_unsatisfied_service_requirements( if ( isinstance(dependant_object, ComponentRestrictionOwner) and dependant_object.component not in named_mapping.get(dependant_object.service, ()) - ) or (isinstance(dependant_object, ServiceRestrictionOwner) and dependant_object.name not in existing_services): + ) or ( + isinstance(dependant_object, ServiceRestrictionOwner) and dependant_object.service not in existing_services + ): continue if not_found_services := requires - existing_services: diff --git a/python/core/types.py b/python/core/types.py index e568ba28d1..ec4a62eac8 100644 --- a/python/core/types.py +++ b/python/core/types.py @@ -115,3 +115,18 @@ class NamedCoreObjectWithPrototype(NamedTuple): prototype_id: PrototypeID type: ADCMCoreType name: str + + +class ServiceNameKey(NamedTuple): + service: ServiceName + + def __str__(self) -> str: + return f'service "{self.service}"' + + +class ComponentNameKey(NamedTuple): + service: ServiceName + component: ComponentName + + def __str__(self) -> str: + return f'component "{self.component}" of service "{self.service}"' From ed4b49fa7f38e7a65d7934495d800d9fdc83c514 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 1 Oct 2024 07:35:32 +0000 Subject: [PATCH 80/98] ADCM-5997 Send concern create/delete notifications on redistribution --- pyproject.toml | 3 +- .../ansible_plugin/executors/change_flag.py | 5 +- .../executors/change_maintenance_mode.py | 7 - python/api_v2/service/utils.py | 59 ++++--- python/cm/api.py | 64 +++++-- python/cm/services/cluster.py | 8 +- python/cm/services/concern/distribution.py | 158 +++++++++++++----- python/cm/services/concern/flags.py | 16 +- python/cm/services/mapping.py | 20 ++- python/cm/status_api.py | 60 ++++++- python/cm/upgrade.py | 37 +++- 11 files changed, 321 insertions(+), 116 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3865f062f7..9720f7bfd1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -161,7 +161,8 @@ pythonVersion = "3.10" pythonPlatform = "Linux" executionEnvironments = [ - { root = "conf/adcm/python_scripts", extraPaths = [ "python" ]}, + { root = "python" }, + { root = "conf/adcm/python_scripts", extraPaths = [ "python" ] }, ] # TODO: Remove. Temporarily disable type issue checks diff --git a/python/ansible_plugin/executors/change_flag.py b/python/ansible_plugin/executors/change_flag.py index fa7e607119..4cf5157b7b 100644 --- a/python/ansible_plugin/executors/change_flag.py +++ b/python/ansible_plugin/executors/change_flag.py @@ -21,6 +21,7 @@ raise_flag, update_hierarchy_for_flag, ) +from cm.status_api import notify_about_redistributed_concerns_from_maps from core.types import ADCMCoreType, CoreObjectDescriptor from django.db.transaction import atomic from pydantic import field_validator @@ -119,7 +120,9 @@ def __call__( flag = ConcernFlag(name=arguments.name.lower(), message=arguments.msg, cause=None) changed = raise_flag(flag=flag, on_objects=targets) - update_hierarchy_for_flag(flag=flag, on_objects=targets) + if changed: + added = update_hierarchy_for_flag(flag=flag, on_objects=targets) + notify_about_redistributed_concerns_from_maps(added=added, removed={}) case ChangeFlagOperation.DOWN: if arguments.name: changed = lower_flag(name=arguments.name.lower(), on_objects=targets) diff --git a/python/ansible_plugin/executors/change_maintenance_mode.py b/python/ansible_plugin/executors/change_maintenance_mode.py index e5c272a777..2f36afac4d 100644 --- a/python/ansible_plugin/executors/change_maintenance_mode.py +++ b/python/ansible_plugin/executors/change_maintenance_mode.py @@ -14,8 +14,6 @@ from typing import Any, Collection from cm.models import Host, MaintenanceMode -from cm.services.cluster import retrieve_cluster_topology -from cm.services.concern.distribution import redistribute_issues_and_flags from cm.services.status.notify import reset_objects_in_mm from cm.status_api import send_object_update_event from core.types import ADCMCoreType, CoreObjectDescriptor @@ -97,11 +95,6 @@ def __call__( update_fields=["maintenance_mode"] if isinstance(target_object, Host) else ["_maintenance_mode"] ) - if not value: - # In terms of concerns CHANGING and ON is the same, - # so recalculation is required only for turning it OFF - redistribute_issues_and_flags(topology=retrieve_cluster_topology(target_object.cluster_id)) - with suppress(Exception): send_object_update_event(object_=target_object, changes={"maintenanceMode": target_object.maintenance_mode}) diff --git a/python/api_v2/service/utils.py b/python/api_v2/service/utils.py index a0933cc412..4417b37b75 100644 --- a/python/api_v2/service/utils.py +++ b/python/api_v2/service/utils.py @@ -28,41 +28,46 @@ from cm.services.concern.cases import recalculate_own_concerns_on_add_services from cm.services.concern.distribution import redistribute_issues_and_flags from cm.services.status.notify import reset_hc_map +from cm.status_api import notify_about_redistributed_concerns_from_maps from django.db import connection, transaction from django.db.models import QuerySet from rbac.models import re_apply_object_policy -@transaction.atomic def bulk_add_services_to_cluster(cluster: Cluster, prototypes: QuerySet[Prototype]) -> QuerySet[ClusterObject]: - ClusterObject.objects.bulk_create(objs=[ClusterObject(cluster=cluster, prototype=proto) for proto in prototypes]) - services = ClusterObject.objects.filter(cluster=cluster, prototype__in=prototypes).select_related("prototype") - bulk_init_config(objects=services) - - service_proto_service_map = {service.prototype.pk: service for service in services} - ServiceComponent.objects.bulk_create( - objs=[ - ServiceComponent( - cluster=cluster, service=service_proto_service_map[prototype.parent.pk], prototype=prototype - ) - for prototype in Prototype.objects.filter(type=ObjectType.COMPONENT, parent__in=prototypes).select_related( - "parent" - ) - ] - ) - components = ServiceComponent.objects.filter(cluster=cluster, service__in=services).select_related("prototype") - bulk_init_config(objects=components) - - recalculate_own_concerns_on_add_services( - cluster=cluster, - services=services.prefetch_related( - "servicecomponent_set" - ).all(), # refresh values from db to update `config` field - ) - redistribute_issues_and_flags(topology=retrieve_cluster_topology(cluster.pk)) + with transaction.atomic(): + ClusterObject.objects.bulk_create( + objs=[ClusterObject(cluster=cluster, prototype=proto) for proto in prototypes] + ) + services = ClusterObject.objects.filter(cluster=cluster, prototype__in=prototypes).select_related("prototype") + bulk_init_config(objects=services) + + service_proto_service_map = {service.prototype.pk: service for service in services} + ServiceComponent.objects.bulk_create( + objs=[ + ServiceComponent( + cluster=cluster, service=service_proto_service_map[prototype.parent.pk], prototype=prototype + ) + for prototype in Prototype.objects.filter( + type=ObjectType.COMPONENT, parent__in=prototypes + ).select_related("parent") + ] + ) + components = ServiceComponent.objects.filter(cluster=cluster, service__in=services).select_related("prototype") + bulk_init_config(objects=components) + + recalculate_own_concerns_on_add_services( + cluster=cluster, + services=services.prefetch_related( + "servicecomponent_set" + ).all(), # refresh values from db to update `config` field + ) + added, removed = redistribute_issues_and_flags(topology=retrieve_cluster_topology(cluster.pk)) + + re_apply_object_policy(apply_object=cluster) - re_apply_object_policy(apply_object=cluster) reset_hc_map() + notify_about_redistributed_concerns_from_maps(added=added, removed=removed) return services diff --git a/python/cm/api.py b/python/cm/api.py index 9719632f65..1bcd59baa5 100644 --- a/python/cm/api.py +++ b/python/cm/api.py @@ -16,7 +16,7 @@ import json from adcm_version import compare_prototype_versions -from core.types import ADCMCoreType, CoreObjectDescriptor +from core.types import ADCMCoreType, ConcernID, CoreObjectDescriptor from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.core.exceptions import MultipleObjectsReturned @@ -77,11 +77,17 @@ object_configuration_has_issue, object_imports_has_issue, ) -from cm.services.concern.distribution import distribute_concern_on_related_objects, redistribute_issues_and_flags +from cm.services.concern.distribution import ( + ConcernRelatedObjects, + distribute_concern_on_related_objects, + redistribute_issues_and_flags, +) from cm.services.concern.flags import BuiltInFlag, raise_flag from cm.services.concern.locks import get_lock_on_object from cm.services.status.notify import reset_hc_map, reset_objects_in_mm from cm.status_api import ( + notify_about_new_concern, + notify_about_redistributed_concerns_from_maps, send_config_creation_event, send_delete_service_event, ) @@ -139,10 +145,12 @@ def add_cluster(prototype: Prototype, name: str, description: str = "") -> Clust object_type=ContentType.objects.get_for_model(Cluster), ) - if recalculate_own_concerns_on_add_clusters(cluster): # TODO: redistribute only new issues. See ADCM-5798 - redistribute_issues_and_flags(topology=retrieve_cluster_topology(cluster.pk)) + added, removed = {}, {} + if recalculate_own_concerns_on_add_clusters(cluster): + added, removed = redistribute_issues_and_flags(topology=retrieve_cluster_topology(cluster.pk)) reset_hc_map() + notify_about_redistributed_concerns_from_maps(added=added, removed=removed) logger.info("cluster #%s %s is added", cluster.pk, cluster.name) @@ -168,10 +176,12 @@ def add_host(prototype: Prototype, provider: HostProvider, fqdn: str, descriptio host.save() add_concern_to_object(object_=host, concern=get_lock_on_object(object_=provider)) - if concerns := recalculate_own_concerns_on_add_hosts(host): # TODO: redistribute only new issues. See ADCM-5798 - distribute_concern_on_related_objects( - owner=CoreObjectDescriptor(id=host.id, type=ADCMCoreType.HOST), - concern_id=next(iter(concerns[ADCMCoreType.HOST][host.id])), + related_objects = {} + concern_id = None + if concerns := recalculate_own_concerns_on_add_hosts(host): + concern_id = next(iter(concerns[ADCMCoreType.HOST][host.id])) + related_objects = distribute_concern_on_related_objects( + owner=CoreObjectDescriptor(id=host.id, type=ADCMCoreType.HOST), concern_id=concern_id ) if concern := retrieve_issue( owner=CoreObjectDescriptor(id=provider.id, type=ADCMCoreType.HOSTPROVIDER), cause=ConcernCause.CONFIG @@ -181,6 +191,10 @@ def add_host(prototype: Prototype, provider: HostProvider, fqdn: str, descriptio re_apply_object_policy(provider) reset_hc_map() + + if concern_id: + notify_about_new_concern(concern_id=concern_id, related_objects=related_objects) + logger.info("host #%s %s is added", host.pk, host.fqdn) return host @@ -198,9 +212,14 @@ def add_host_provider(prototype: Prototype, name: str, description: str = ""): provider.save() provider_cod = CoreObjectDescriptor(id=provider.id, type=ADCMCoreType.HOSTPROVIDER) + concern_id = None if object_configuration_has_issue(provider): concern = create_issue(owner=provider_cod, cause=ConcernCause.CONFIG) - distribute_concern_on_related_objects(owner=provider_cod, concern_id=concern.id) + concern_id = concern.id + related_objects = distribute_concern_on_related_objects(owner=provider_cod, concern_id=concern_id) + + if concern_id: + notify_about_new_concern(concern_id=concern_id, related_objects=related_objects) logger.info("host provider #%s %s is added", provider.pk, provider.name) @@ -260,13 +279,16 @@ def delete_service(service: ClusterObject) -> None: cluster = service.cluster cluster_cod = CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER) + concern_id = None + related_objects = {} if not cluster_mapping_has_issue_orm_version(cluster=cluster): delete_issue( owner=CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER), cause=ConcernCause.HOSTCOMPONENT ) elif retrieve_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) is None: concern = create_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) - distribute_concern_on_related_objects(owner=cluster_cod, concern_id=concern.id) + concern_id = concern.id + related_objects = distribute_concern_on_related_objects(owner=cluster_cod, concern_id=concern_id) keep_objects = defaultdict(set) for task in TaskLog.objects.filter( @@ -282,6 +304,8 @@ def delete_service(service: ClusterObject) -> None: reset_hc_map() on_commit(func=partial(send_delete_service_event, service_id=service_pk)) + if concern_id: + on_commit(func=partial(notify_about_new_concern, concern_id=concern_id, related_objects=related_objects)) logger.info("service #%s is deleted", service_pk) @@ -387,11 +411,12 @@ def add_service_to_cluster(cluster: Cluster, proto: Prototype) -> ClusterObject: add_components_to_service(cluster=cluster, service=service) recalculate_own_concerns_on_add_services(cluster=cluster, services=(service,)) - redistribute_issues_and_flags(retrieve_cluster_topology(cluster.id)) + added, removed = redistribute_issues_and_flags(retrieve_cluster_topology(cluster.id)) re_apply_object_policy(apply_object=cluster) reset_hc_map() + notify_about_redistributed_concerns_from_maps(added=added, removed=removed) logger.info( "service #%s %s is added to cluster #%s %s", service.pk, @@ -459,6 +484,8 @@ def update_obj_config(obj_conf: ObjectConfig, config: dict, attr: dict, descript current_attr=old_conf.attr, ) + concern_id, related_objects = None, {} + with atomic(): config_log = save_object_config(object_config=obj_conf, config=new_conf, attr=attr, description=description) @@ -468,17 +495,19 @@ def update_obj_config(obj_conf: ObjectConfig, config: dict, attr: dict, descript ) # flag on ADCM can't be raised (only objects of `ADCMCoreType` are supported) if not isinstance(obj, ADCM): - raise_outdated_config_flag_if_required(object_=obj) + concern_id, related_objects = raise_outdated_config_flag_if_required(object_=obj) apply_policy_for_new_config(config_object=obj, config_log=config_log) send_config_creation_event(object_=obj) + if concern_id: + notify_about_new_concern(concern_id=concern_id, related_objects=related_objects) return config_log -def raise_outdated_config_flag_if_required(object_: MainObject): +def raise_outdated_config_flag_if_required(object_: MainObject) -> tuple[ConcernID | None, ConcernRelatedObjects]: if object_.state == "created" or not object_.prototype.flag_autogeneration.get("enable_outdated_config", False): - return + return None, {} flag = BuiltInFlag.ADCM_OUTDATED_CONFIG.value flag_exists = object_.concerns.filter( @@ -491,7 +520,9 @@ def raise_outdated_config_flag_if_required(object_: MainObject): concern_id = ConcernItem.objects.values_list("id", flat=True).get( name=flag.name, type=ConcernType.FLAG, owner_id=object_.id, owner_type=object_.content_type ) - distribute_concern_on_related_objects(owner=owner, concern_id=concern_id) + return concern_id, distribute_concern_on_related_objects(owner=owner, concern_id=concern_id) + + return None, {} def set_object_config_with_plugin(obj: ADCMEntity, config: dict, attr: dict) -> ConfigLog: @@ -792,7 +823,8 @@ def multi_bind(cluster: Cluster, service: ClusterObject | None, bind_list: list[ delete_issue(owner=import_target, cause=ConcernCause.IMPORT) elif retrieve_issue(owner=import_target, cause=ConcernCause.IMPORT) is None: concern = create_issue(owner=import_target, cause=ConcernCause.IMPORT) - distribute_concern_on_related_objects(owner=import_target, concern_id=concern.id) + related_objects = distribute_concern_on_related_objects(owner=import_target, concern_id=concern.id) + on_commit(func=partial(notify_about_new_concern, concern_id=concern.id, related_objects=related_objects)) return get_import(cluster=cluster, service=service) diff --git a/python/cm/services/cluster.py b/python/cm/services/cluster.py index 19c4a72b68..e35babc132 100644 --- a/python/cm/services/cluster.py +++ b/python/cm/services/cluster.py @@ -27,6 +27,7 @@ from cm.models import Cluster, ClusterObject, ConcernCause, Host, HostComponent, ServiceComponent from cm.services.concern import create_issue, delete_issue +from cm.status_api import notify_about_new_concern class ClusterDB: @@ -109,15 +110,20 @@ def perform_host_to_cluster_map( cluster = Cluster.objects.get(id=cluster_id) cluster_cod = CoreObjectDescriptor(id=cluster.id, type=ADCMCoreType.CLUSTER) + concern_id = None + related_objects = {} if not cluster_mapping_has_issue_orm_version(cluster=cluster): delete_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) elif not cluster.get_own_issue(cause=ConcernCause.HOSTCOMPONENT): concern = create_issue(owner=cluster_cod, cause=ConcernCause.HOSTCOMPONENT) - distribute_concern_on_related_objects(owner=cluster_cod, concern_id=concern.id) + concern_id = concern.id + related_objects = distribute_concern_on_related_objects(owner=cluster_cod, concern_id=concern.id) re_apply_object_policy(apply_object=cluster) status_service.reset_hc_map() + if concern_id: + notify_about_new_concern(concern_id=concern_id, related_objects=related_objects) return hosts diff --git a/python/cm/services/concern/distribution.py b/python/cm/services/concern/distribution.py index 5faf3d21d6..d9a65e3969 100644 --- a/python/cm/services/concern/distribution.py +++ b/python/cm/services/concern/distribution.py @@ -52,7 +52,14 @@ ProviderHostMap: TypeAlias = dict[HostProviderID, set[HostID]] -def redistribute_issues_and_flags(topology: ClusterTopology) -> None: +def redistribute_issues_and_flags( + topology: ClusterTopology, +) -> tuple[AffectedObjectConcernMap, AffectedObjectConcernMap]: + """ + Calculate state of concern-object links and update state in database accordingly. + + Returns added and removed concerns. + """ topology_objects: TopologyObjectMap = { ADCMCoreType.CLUSTER: (topology.cluster_id,), ADCMCoreType.SERVICE: tuple(topology.services), @@ -73,19 +80,30 @@ def redistribute_issues_and_flags(topology: ClusterTopology) -> None: if not objects_concerns: # nothing to redistribute, expected that no links will be found too - return + return {}, {} - # Step #2. Calculate new concern relations - concern_links: AffectedObjectConcernMap = _calculate_concerns_distribution_for_topology( + # Step #2. Find difference before-after in concern relations state + + # For difference, we exclude concerns that came from host/hostprovider on hosts + # to ensure they aren't deleted/added + hostprovider_hierarchy_concerns = objects_concerns.get(ADCMCoreType.HOST, {}) + full_distribution = _calculate_concerns_distribution_for_topology( topology=topology, objects_concerns=objects_concerns ) + added, removed = _find_distribution_difference( + old=_retrieve_current_concerns_distribution( + topology_objects=topology_objects, + hosts_existing_concerns=hostprovider_hierarchy_concerns, + ), + new=_remove_host_hierarchy_links_from_hosts( + concerns=full_distribution, hosts_existing_concerns=hostprovider_hierarchy_concerns + ), + ) # Step #3. Link objects to concerns - _relink_concerns_to_objects_in_db( - concern_links=concern_links, - topology_objects=topology_objects, - hosts_existing_concerns=objects_concerns.get(ADCMCoreType.HOST, {}), - ) + _update_db_concerns_state(added=added, removed=removed) + + return added, removed def _retrieve_concerns_of_objects_in_topology( @@ -151,50 +169,99 @@ def _calculate_concerns_distribution_for_topology( return concern_links -def _relink_concerns_to_objects_in_db( - concern_links: dict[ADCMCoreType, dict[ObjectID, set[int]]], - topology_objects: TopologyObjectMap, - hosts_existing_concerns: dict[ObjectID, set[int]], -) -> None: - # ADCMCoreType.HOST is a special case, because we really don't want to delete host/provider-related concerns +def _remove_host_hierarchy_links_from_hosts( + concerns: AffectedObjectConcernMap, hosts_existing_concerns: dict[HostID, set[ConcernID]] +) -> AffectedObjectConcernMap: + for host_id, linked_concerns in concerns.get(ADCMCoreType.HOST, {}).items(): + host_concerns = hosts_existing_concerns.get(host_id) + if host_concerns: + linked_concerns.difference_update(host_concerns) + + return concerns + + +def _retrieve_current_concerns_distribution( + topology_objects: TopologyObjectMap, hosts_existing_concerns: dict[ObjectID, set[ConcernID]] +) -> AffectedObjectConcernMap: + concern_links: AffectedObjectConcernMap = { + type_: {id_: set() for id_ in ids} for type_, ids in topology_objects.items() + } + + # ADCMCoreType.HOST is a special case, + # because we really don't want to work with own host / hostprovider concerns here for core_type in (ADCMCoreType.CLUSTER, ADCMCoreType.SERVICE, ADCMCoreType.COMPONENT): orm_model = core_type_to_model(core_type) id_field = f"{orm_model.__name__.lower()}_id" - m2m_model = orm_model.concerns.through - # Delete all concern relations for objects in question - m2m_model.objects.filter(**{f"{id_field}__in": topology_objects[core_type]}).exclude( - concernitem__type=ConcernType.LOCK - ).delete() - - # ... and create them again - m2m_model.objects.bulk_create( - ( - m2m_model(concernitem_id=concern_id, **{id_field: object_id}) - for object_id, concerns in concern_links[core_type].items() - for concern_id in concerns - ) - ) + for object_id, concern_id in ( + orm_model.concerns.through.objects.filter(**{f"{id_field}__in": topology_objects[core_type]}) + .exclude(concernitem__type=ConcernType.LOCK) + .values_list(id_field, "concernitem_id") + ): + concern_links[core_type][object_id].add(concern_id) # handle hosts links - m2m_model = Host.concerns.through hostprovider_hierarchy_concerns = set(chain.from_iterable(hosts_existing_concerns.values())) - # Delete all cluster/service/component related concern links, but keep host/hostprovider ones: - # thou we could recreate those concerns too, but it doesn't make much sense. - ( - m2m_model.objects.filter(host_id__in=topology_objects[ADCMCoreType.HOST]) + for object_id, concern_id in ( + Host.concerns.through.objects.filter(host_id__in=topology_objects[ADCMCoreType.HOST]) .exclude(Q(concernitem_id__in=hostprovider_hierarchy_concerns) | Q(concernitem__type=ConcernType.LOCK)) - .delete() - ) + .values_list("host_id", "concernitem_id") + ): + concern_links[ADCMCoreType.HOST][object_id].add(concern_id) - # create only cluster/service/component related concern links - m2m_model.objects.bulk_create( - ( - m2m_model(concernitem_id=concern_id, host_id=host_id) - for host_id, concerns in concern_links[ADCMCoreType.HOST].items() - for concern_id in (concerns - hostprovider_hierarchy_concerns) - ) - ) + return concern_links + + +def _find_distribution_difference( + old: AffectedObjectConcernMap, new: AffectedObjectConcernMap +) -> tuple[AffectedObjectConcernMap, AffectedObjectConcernMap]: + """ + Based on old (current DB state) and new (concerns dist based on topology) concern relations + find which "links" (relations) are "appeared" (should be added) and "gone" (should be removed). + + Returns added and removed maps. + """ + added = defaultdict(lambda: defaultdict(set)) + removed = defaultdict(lambda: defaultdict(set)) + + for core_type in (ADCMCoreType.CLUSTER, ADCMCoreType.SERVICE, ADCMCoreType.COMPONENT, ADCMCoreType.HOST): + for object_id, concern_in_old in old[core_type].items(): + concerns_in_new = new[core_type].get(object_id, set()) + added[core_type][object_id] = concerns_in_new - concern_in_old + removed[core_type][object_id] = concern_in_old - concerns_in_new + + # handle absolutely new entries + for object_id in set(new[core_type].keys()).difference(old[core_type]): + added[core_type][object_id] = new[core_type][object_id] + + return added, removed + + +def _update_db_concerns_state(added: AffectedObjectConcernMap, removed: AffectedObjectConcernMap): + for core_type in (ADCMCoreType.CLUSTER, ADCMCoreType.SERVICE, ADCMCoreType.COMPONENT, ADCMCoreType.HOST): + orm_model = core_type_to_model(core_type) + id_field = f"{orm_model.__name__.lower()}_id" + m2m_model = orm_model.concerns.through + + to_delete = removed.get(core_type) + if to_delete: + query = Q() + for object_id, concern_ids in to_delete.items(): + query |= Q(concernitem_id__in=concern_ids, **{id_field: object_id}) + + # Delete all concern relations for objects in question + m2m_model.objects.filter(query).exclude(concernitem__type=ConcernType.LOCK).delete() + + to_create = added.get(core_type) + if to_create: + # ... and create them again + m2m_model.objects.bulk_create( + ( + m2m_model(concernitem_id=concern_id, **{id_field: object_id}) + for object_id, concerns in to_create.items() + for concern_id in concerns + ) + ) # PUBLIC distribute_concern_on_related_objects @@ -202,9 +269,10 @@ def _relink_concerns_to_objects_in_db( ConcernRelatedObjects: TypeAlias = dict[ADCMCoreType, set[ObjectID]] -def distribute_concern_on_related_objects(owner: CoreObjectDescriptor, concern_id: ConcernID): +def distribute_concern_on_related_objects(owner: CoreObjectDescriptor, concern_id: ConcernID) -> ConcernRelatedObjects: distribution_targets = _find_concern_distribution_targets(owner=owner) _add_concern_links_to_objects_in_db(targets=distribution_targets, concern_id=concern_id) + return distribution_targets def _find_concern_distribution_targets(owner: CoreObjectDescriptor) -> ConcernRelatedObjects: diff --git a/python/cm/services/concern/flags.py b/python/cm/services/concern/flags.py index 629ca061c3..33525426a2 100644 --- a/python/cm/services/concern/flags.py +++ b/python/cm/services/concern/flags.py @@ -24,7 +24,7 @@ from cm.converters import core_type_to_model, model_name_to_core_type from cm.models import ADCMEntity, ConcernCause, ConcernItem, ConcernType -from cm.services.concern.distribution import distribute_concern_on_related_objects +from cm.services.concern.distribution import AffectedObjectConcernMap, distribute_concern_on_related_objects from cm.services.concern.messages import ( ADCM_ENTITY_AS_PLACEHOLDERS, ConcernMessage, @@ -118,15 +118,25 @@ def lower_all_flags(on_objects: Collection[CoreObjectDescriptor]) -> bool: return bool(deleted_count) -def update_hierarchy_for_flag(flag: ConcernFlag, on_objects: Collection[CoreObjectDescriptor]) -> None: +def update_hierarchy_for_flag( + flag: ConcernFlag, on_objects: Collection[CoreObjectDescriptor] +) -> AffectedObjectConcernMap: + # not all of these can be considered "affected", but there's little way to know the difference + processed: AffectedObjectConcernMap = defaultdict(lambda: defaultdict(set)) for concern in ConcernItem.objects.select_related("owner_type").filter( Q(name=flag.name, cause=flag.cause, type=ConcernType.FLAG) & _get_filter_for_flags_of_objects( content_type_id_map=_get_owner_ids_grouped_by_content_type(objects=on_objects) ) ): + concern_id = concern.id owner = CoreObjectDescriptor(id=concern.owner_id, type=model_name_to_core_type(concern.owner_type.model)) - distribute_concern_on_related_objects(owner=owner, concern_id=concern.id) + related_objects = distribute_concern_on_related_objects(owner=owner, concern_id=concern.id) + for core_type, object_ids in related_objects.items(): + for object_id in object_ids: + processed[core_type][object_id].add(concern_id) + + return processed def _get_filter_for_flags_of_objects(content_type_id_map: dict[ContentType, set[int]]) -> Q: diff --git a/python/cm/services/mapping.py b/python/cm/services/mapping.py index 909805fa6f..ac0f9cc951 100644 --- a/python/cm/services/mapping.py +++ b/python/cm/services/mapping.py @@ -32,11 +32,16 @@ check_service_requirements, cluster_mapping_has_issue, ) -from cm.services.concern.distribution import lock_objects, redistribute_issues_and_flags, unlock_objects +from cm.services.concern.distribution import ( + AffectedObjectConcernMap, + lock_objects, + redistribute_issues_and_flags, + unlock_objects, +) from cm.services.concern.locks import retrieve_lock_on_object from cm.services.group_config import ConfigHostGroupRepo from cm.services.status.notify import reset_hc_map, reset_objects_in_mm -from cm.status_api import send_host_component_map_update_event +from cm.status_api import notify_about_redistributed_concerns_from_maps, send_host_component_map_update_event class PerformMappingChecks(Protocol): @@ -100,7 +105,7 @@ def change_host_component_mapping( _recreate_mapping_in_db(topology=new_topology) # updates of related entities - _update_concerns( + added, removed = _update_concerns( old_topology=current_topology, new_topology=new_topology, bundle_restrictions=bundle_restrictions ) ActionHostGroupRepo().remove_unmapped_hosts_from_groups(host_difference.unmapped) @@ -111,6 +116,7 @@ def change_host_component_mapping( reset_hc_map() reset_objects_in_mm() send_host_component_map_update_event(cluster_id=cluster_id) + notify_about_redistributed_concerns_from_maps(added=added, removed=removed) return new_topology @@ -163,7 +169,7 @@ def _recreate_mapping_in_db(topology: ClusterTopology) -> None: def _update_concerns( old_topology: ClusterTopology, new_topology: ClusterTopology, bundle_restrictions: BundleRestrictions -) -> None: +) -> tuple[AffectedObjectConcernMap, AffectedObjectConcernMap]: # todo HC may break (?) # We can't be sure this method is called after some sort of "check" cluster = CoreObjectDescriptor(id=old_topology.cluster_id, type=ADCMCoreType.CLUSTER) @@ -172,7 +178,7 @@ def _update_concerns( elif retrieve_issue(owner=cluster, cause=ConcernCause.HOSTCOMPONENT) is None: create_issue(owner=cluster, cause=ConcernCause.HOSTCOMPONENT) - redistribute_issues_and_flags(topology=new_topology) + added, removed = redistribute_issues_and_flags(topology=new_topology) lock = retrieve_lock_on_object(object_=cluster) if lock: @@ -195,6 +201,10 @@ def _update_concerns( lock_id=lock.id, ) + # since mechanism for locks redistribution is different from the one for flags/issues, + # there's no need in considering them in concern update events + return added, removed + def _update_policies(topology: ClusterTopology) -> None: service_content_type = ContentType.objects.get_for_model(model=ClusterObject) diff --git a/python/cm/status_api.py b/python/cm/status_api.py index 478219164b..61581acab2 100644 --- a/python/cm/status_api.py +++ b/python/cm/status_api.py @@ -15,8 +15,10 @@ from urllib.parse import urljoin import json -from core.types import ClusterID, CoreObjectDescriptor +from api_v2.concern.serializers import ConcernSerializer +from core.types import ADCMCoreType, ClusterID, ConcernID, CoreObjectDescriptor, ObjectID from django.conf import settings +from djangorestframework_camel_case.util import camelize from requests import Response from rest_framework.status import HTTP_200_OK, HTTP_201_CREATED import requests @@ -27,10 +29,12 @@ ADCMEntity, Cluster, ClusterObject, + ConcernItem, Host, HostComponent, ServiceComponent, ) +from cm.services.concern.distribution import AffectedObjectConcernMap, ConcernRelatedObjects class EventTypes: @@ -305,3 +309,57 @@ def make_ui_host_status(host: Host, host_components: Iterable[HostComponent]) -> "status": 32 if host_map is None else host_map.get("status", 0), "hc": comp_list, } + + +def notify_about_redistributed_concerns( + added: Iterable[tuple[ADCMCoreType, ObjectID, ConcernID]], + removed: Iterable[tuple[ADCMCoreType, ObjectID, ConcernID]], +) -> None: + added_concerns = tuple(added) + serialized_concerns = { + concern.id: camelize(data=ConcernSerializer(instance=concern).data) + for concern in ConcernItem.objects.filter(id__in=(id_ for _, _, id_ in added_concerns)).prefetch_related( + "owner" + ) + } + + for core_type, object_id, concern_id in removed: + post_event(event=f"delete_{core_type.value}_concern", object_id=object_id, changes={"id": concern_id}) + + for core_type, object_id, concern_id in added_concerns: + concern = serialized_concerns.get(concern_id) + if concern: + post_event(event=f"create_{core_type.value}_concern", object_id=object_id, changes=concern) + + +def notify_about_new_concern(concern_id: ConcernID, related_objects: ConcernRelatedObjects) -> None: + notify_about_redistributed_concerns( + added=( + (core_type, object_id, concern_id) + for core_type, object_ids in related_objects.items() + for object_id in object_ids + ), + removed=(), + ) + + +def notify_about_redistributed_concerns_from_maps( + added: AffectedObjectConcernMap, + removed: AffectedObjectConcernMap, +): + """ + Convenience function to call `notify_about_redistributed_concerns` based on input of `redistribute_issues_and_flags` + """ + return notify_about_redistributed_concerns( + added=_flatten_concerns_map(added), + removed=_flatten_concerns_map(removed), + ) + + +def _flatten_concerns_map(concerns_map: AffectedObjectConcernMap) -> Iterable[tuple[ADCMCoreType, ObjectID, ConcernID]]: + return ( + (core_type, object_id, concern_id) + for core_type, objects in concerns_map.items() + for object_id, concerns in objects.items() + for concern_id in concerns + ) diff --git a/python/cm/upgrade.py b/python/cm/upgrade.py index ccd0f0fd68..ab3ac576ae 100644 --- a/python/cm/upgrade.py +++ b/python/cm/upgrade.py @@ -11,7 +11,7 @@ # limitations under the License. from abc import ABC, abstractmethod -from collections import deque +from collections import defaultdict, deque from operator import itemgetter import functools @@ -65,7 +65,11 @@ recalculate_concerns_on_cluster_upgrade, ) from cm.services.concern.checks import object_configuration_has_issue -from cm.services.concern.distribution import distribute_concern_on_related_objects, redistribute_issues_and_flags +from cm.services.concern.distribution import ( + AffectedObjectConcernMap, + distribute_concern_on_related_objects, + redistribute_issues_and_flags, +) from cm.services.job.action import ActionRunPayload, run_action from cm.services.job.types import HcAclAction from cm.services.mapping import change_host_component_mapping, check_nothing @@ -500,7 +504,7 @@ def perform(self) -> None: self._target.refresh_from_db() self._upgrade_children(old_prototype=old_prototype, new_prototype=new_prototype) - self._update_concerns() + added, removed = self._update_concerns() for policy_object, content_type in self._get_objects_map_for_policy_update().items(): for policy in Policy.objects.filter( @@ -515,7 +519,7 @@ def _upgrade_children(self, old_prototype: Prototype, new_prototype: Prototype) ... @abstractmethod - def _update_concerns(self) -> None: + def _update_concerns(self) -> tuple[AffectedObjectConcernMap, AffectedObjectConcernMap]: ... @abstractmethod @@ -581,9 +585,9 @@ def _upgrade_children(self, old_prototype: Prototype, new_prototype: Prototype) if not ServiceComponent.objects.filter(cluster=self._target, service=service).exists(): add_components_to_service(cluster=self._target, service=service) - def _update_concerns(self) -> None: + def _update_concerns(self) -> tuple[AffectedObjectConcernMap, AffectedObjectConcernMap]: recalculate_concerns_on_cluster_upgrade(cluster=self._target) - redistribute_issues_and_flags(topology=retrieve_cluster_topology(self._target.id)) + return redistribute_issues_and_flags(topology=retrieve_cluster_topology(self._target.id)) def _get_objects_map_for_policy_update(self) -> dict[Cluster | ClusterObject | ServiceComponent, ContentType]: obj_type_map = {self._target: ContentType.objects.get_for_model(Cluster)} @@ -608,12 +612,16 @@ def _upgrade_children(self, old_prototype: Prototype, new_prototype: Prototype) for host in Host.objects.filter(provider=self._target, prototype__name=prototype.name): _switch_object(host, prototype) - def _update_concerns(self) -> None: + def _update_concerns(self) -> tuple[AffectedObjectConcernMap, AffectedObjectConcernMap]: + added, removed = defaultdict(lambda: defaultdict(set)), {} target_cod = CoreObjectDescriptor(id=self._target.id, type=orm_object_to_core_type(self._target)) target_own_config_issue = retrieve_issue(owner=target_cod, cause=ConcernCause.CONFIG) if target_own_config_issue is None and object_configuration_has_issue(self._target): concern = create_issue(owner=target_cod, cause=ConcernCause.CONFIG) - distribute_concern_on_related_objects(owner=target_cod, concern_id=concern.id) + related_objects = distribute_concern_on_related_objects(owner=target_cod, concern_id=concern.id) + for core_type, object_ids in related_objects.items(): + for object_id in object_ids: + added[core_type][object_id].add(concern.id) clusters_for_redistribution: set[ClusterID] = set() m2m_model = Host.concerns.through @@ -636,13 +644,24 @@ def _update_concerns(self) -> None: ) clusters_for_redistribution.add(host.cluster_id) host_own_concerns_to_link.append(m2m_model(host_id=host.id, concernitem_id=concern.id)) + added[ADCMCoreType.HOST][host.id].add(concern.id) m2m_model.objects.bulk_create(objs=host_own_concerns_to_link) clusters_for_redistribution -= {None} if clusters_for_redistribution: for topology in retrieve_multiple_clusters_topology(cluster_ids=clusters_for_redistribution): - redistribute_issues_and_flags(topology=topology) + added_, removed_ = redistribute_issues_and_flags(topology=topology) + + for core_type, added_entries in added_.items(): + for object_id, concern_ids in added_entries.items(): + added[core_type][object_id].update(concern_ids) + + for core_type, removed_entries in removed_.items(): + for object_id, concern_ids in removed_entries.items(): + removed[core_type][object_id].update(concern_ids) + + return added, removed def _get_objects_map_for_policy_update(self) -> dict[HostProvider | Host, ContentType]: obj_type_map = {self._target: ContentType.objects.get_for_model(HostProvider)} From 5f2b0dc7e9ee50e14022a607f1a0436c586a083e Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Wed, 2 Oct 2024 07:39:38 +0000 Subject: [PATCH 81/98] ADCM-5995 Use `shutil.move` to avoid cross-device link error during archive copying --- python/cm/management/commands/collect_statistics.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/python/cm/management/commands/collect_statistics.py b/python/cm/management/commands/collect_statistics.py index cb6ea71fd7..10627d8f93 100644 --- a/python/cm/management/commands/collect_statistics.py +++ b/python/cm/management/commands/collect_statistics.py @@ -14,6 +14,7 @@ from typing import NamedTuple from urllib.parse import urlunparse import os +import shutil import socket from audit.alt.background import audit_background_operation @@ -163,9 +164,12 @@ def handle(self, *_, mode: str, **__): logger.debug(msg="Statistics collector: archive encoding") encoder = TarFileEncoder(suffix=".enc") encoded_file = encoder.encode(path_file=archive) - encoded_file = encoded_file.replace(STATISTIC_DIR / encoded_file.name) + # We use shutil here instead of Path.rename, + # because of possible cross-device link problem (e.g. -v /adcm/data): + # `OSError: [Errno 18] Cross-device link:` + encoded_file = shutil.move(str(encoded_file), str(STATISTIC_DIR / encoded_file.name)) - self.stdout.write(f"Data saved in: {str(encoded_file.absolute())}") + self.stdout.write(f"Data saved in: {encoded_file}") case _: pass From 0776eb2121ff3873523f16a8e03b2335a76be0f9 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Wed, 2 Oct 2024 13:43:54 +0000 Subject: [PATCH 82/98] ADCM-6003 Re-implement `loadcluster` --- python/api_v2/host/utils.py | 14 +- python/api_v2/host/views.py | 11 +- python/cm/services/transition/__init__.py | 11 + python/cm/services/transition/load.py | 255 ++++++++++++++++++++++ python/cm/services/transition/types.py | 108 +++++++++ 5 files changed, 390 insertions(+), 9 deletions(-) create mode 100644 python/cm/services/transition/__init__.py create mode 100644 python/cm/services/transition/load.py create mode 100644 python/cm/services/transition/types.py diff --git a/python/api_v2/host/utils.py b/python/api_v2/host/utils.py index 9da2d89f3c..f99c0fec36 100644 --- a/python/api_v2/host/utils.py +++ b/python/api_v2/host/utils.py @@ -19,12 +19,12 @@ recheck_issues, ) from cm.logger import logger -from cm.models import Cluster, Host, HostProvider, ObjectType, Prototype +from cm.models import Cluster, Host, ObjectType, Prototype from cm.services.concern import retrieve_issue from cm.services.concern.locks import get_lock_on_object from cm.services.maintenance_mode import get_maintenance_mode_response from cm.services.status.notify import reset_hc_map -from core.types import ADCMCoreType, CoreObjectDescriptor +from core.types import ADCMCoreType, BundleID, CoreObjectDescriptor, HostProviderID from rbac.models import re_apply_object_policy from rest_framework.request import Request from rest_framework.response import Response @@ -33,11 +33,15 @@ from api_v2.host.serializers import HostChangeMaintenanceModeSerializer -def create_host(provider: HostProvider, fqdn: str, cluster: Cluster | None) -> Host: - host_prototype = Prototype.objects.get(type=ObjectType.HOST, bundle=provider.prototype.bundle) +def create_host(bundle_id: BundleID, provider_id: HostProviderID, fqdn: str, cluster: Cluster | None) -> Host: + host_prototype = Prototype.objects.get(type=ObjectType.HOST, bundle_id=bundle_id) check_license(prototype=host_prototype) - return Host.objects.create(prototype=host_prototype, provider=provider, fqdn=fqdn, cluster=cluster) + host = Host.objects.create(prototype=host_prototype, provider_id=provider_id, fqdn=fqdn, cluster=cluster) + + process_config_issues_policies_hc(host) + + return host def _recheck_new_host_issues(host: Host): diff --git a/python/api_v2/host/views.py b/python/api_v2/host/views.py index 74f290d1fe..f11e409657 100644 --- a/python/api_v2/host/views.py +++ b/python/api_v2/host/views.py @@ -23,7 +23,7 @@ from audit.alt.hooks import extract_current_from_response, extract_previous_from_object, only_on_success from cm.api import delete_host from cm.errors import AdcmEx -from cm.models import Cluster, ConcernType, Host, HostProvider +from cm.models import Cluster, ConcernType, Host, HostProvider, Prototype from django.db.transaction import atomic from django_filters.rest_framework.backends import DjangoFilterBackend from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view @@ -60,7 +60,7 @@ HostSerializer, HostUpdateSerializer, ) -from api_v2.host.utils import create_host, maintenance_mode, process_config_issues_policies_hc +from api_v2.host.utils import create_host, maintenance_mode from api_v2.utils.audit import host_from_lookup, host_from_response, parent_host_from_lookup, update_host_name from api_v2.views import ADCMGenericViewSet, ObjectWithStatusViewMixin @@ -217,10 +217,13 @@ def create(self, request, *args, **kwargs): # noqa: ARG002 ) with atomic(): + bundle_id = Prototype.objects.values_list("bundle_id", flat=True).get(id=request_hostprovider.prototype_id) host = create_host( - provider=request_hostprovider, fqdn=serializer.validated_data["fqdn"], cluster=request_cluster + bundle_id=bundle_id, + provider_id=request_hostprovider.id, + fqdn=serializer.validated_data["fqdn"], + cluster=request_cluster, ) - process_config_issues_policies_hc(host=host) return Response( data=HostSerializer(instance=host, context=self.get_serializer_context()).data, status=HTTP_201_CREATED diff --git a/python/cm/services/transition/__init__.py b/python/cm/services/transition/__init__.py new file mode 100644 index 0000000000..824dd6c8fe --- /dev/null +++ b/python/cm/services/transition/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/python/cm/services/transition/load.py b/python/cm/services/transition/load.py new file mode 100644 index 0000000000..3c0828aa10 --- /dev/null +++ b/python/cm/services/transition/load.py @@ -0,0 +1,255 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque +from typing import Callable, Iterable, TypeAlias + +from api_v2.host.utils import create_host +from api_v2.service.utils import bulk_add_services_to_cluster +from core.types import ( + BundleID, + ClusterID, + ComponentName, + HostID, + HostName, + HostProviderID, + HostProviderName, + ServiceName, +) +from django.contrib.contenttypes.models import ContentType +from django.db.models import F + +from cm.api import add_cluster, add_host_provider, update_obj_config +from cm.models import ( + Bundle, + Cluster, + ClusterObject, + GroupConfig, + Host, + HostComponent, + HostProvider, + MaintenanceMode, + ObjectType, + Prototype, + ServiceComponent, +) +from cm.services.cluster import perform_host_to_cluster_map +from cm.services.status import notify +from cm.services.transition.types import ( + BundleHash, + ClusterInfo, + ConfigHostGroupInfo, + HostInfo, + HostProviderInfo, + RestorableCondition, + TransitionPayload, +) + +BundleHashIDMap: TypeAlias = dict[BundleHash, BundleID] +HostProviderNameIDsMap: TypeAlias = dict[HostProviderName, tuple[HostProviderID, BundleID]] +HostNameIDMap: TypeAlias = dict[HostName, HostID] + + +def load(data: TransitionPayload, report: Callable[[str], None] = print) -> None: + report("Load started...") + + report("Bundles discovery") + bundles = discover_bundles(data.bundles) + if len(bundles) != len(data.bundles): + missing_bundles = [ + str(data.bundles[missing_bundle_hash]) for missing_bundle_hash in set(data.bundles).difference(bundles) + ] + report(f"Not all bundles are installed.\nMissing:\n{'\n'.join(missing_bundles)}") + message = "Bundles are missing in this ADCM" + raise RuntimeError(message) + + report("Host Providers discovery/creation") + hostproviders = discover_hostproviders(hostproviders={entry.name: entry.bundle for entry in data.hostproviders}) + if hostproviders: + report(f"Some Host Providers exist, they will be used to create hosts from them: {', '.join(hostproviders)}") + + if len(hostproviders) != len(data.hostproviders): + report(f"Host Provider will be created: {', '.join(hp.name for hp in data.hostproviders)}") + + hostproviders |= create_new_hostproviders( + hostproviders=(entry for entry in data.hostproviders if entry.name not in hostproviders), bundles=bundles + ) + + report("Hosts creation") + hosts = create_new_hosts(hosts=data.hosts, hostproviders=hostproviders) + + report("Cluster creation") + create_cluster(cluster=data.cluster, bundles=bundles, hosts=hosts) + + +def discover_bundles(required_bundles: BundleHash) -> BundleHashIDMap: + return dict(Bundle.objects.values_list("hash", "id").filter(hash__in=required_bundles)) + + +def discover_hostproviders(hostproviders: dict[HostProviderName, BundleHash]) -> HostProviderNameIDsMap: + result = {} + + for id_, name, bundle_id, bundle_hash in HostProvider.objects.values_list( + "id", "name", "bundle_id", "bundle__hash" + ).filter(name__in=hostproviders): + if bundle_hash == hostproviders[name]: + result[name] = (id_, bundle_id) + + return result + + +def create_new_hostproviders( + hostproviders: Iterable[HostProviderInfo], bundles: BundleHashIDMap +) -> HostProviderNameIDsMap: + provider_protos: dict[BundleHash, Prototype] = {} + bundle_id_hash: dict[BundleID, BundleHash] = {v: k for k, v in bundles.items()} + + for prototype in Prototype.objects.filter(bundle_id__in=bundles.values(), type=ObjectType.PROVIDER): + provider_protos[bundle_id_hash[prototype.bundle_id]] = prototype + + result = {} + + for provider_info in hostproviders: + bundle_id = bundles[provider_info.bundle] + new_provider = add_host_provider( + prototype=provider_protos[bundle_id], name=provider_info.name, description=provider_info.description + ) + result[provider_info.name] = (new_provider.id, bundle_id) + _restore_state(target=new_provider, condition=provider_info.state) + + return result + + +def create_new_hosts(hosts: Iterable[HostInfo], hostproviders: HostProviderNameIDsMap) -> HostNameIDMap: + result = {} + + hosts_in_mm = deque() + + for host_info in hosts: + provider_id, bundle_id = hostproviders[host_info.hostprovider] + host = create_host(bundle_id=bundle_id, provider_id=provider_id, fqdn=host_info.name, cluster=None) + result[host_info.name] = host.id + _restore_state(target=host, condition=host_info.state) + if host_info.maintenance_mode == "on": + hosts_in_mm.append(host.id) + + if hosts_in_mm: + Host.objects.filter(id__in=hosts_in_mm).update(maintenance_mode=MaintenanceMode.ON) + + return result + + +def create_cluster(cluster: ClusterInfo, bundles: BundleHashIDMap, hosts: HostNameIDMap) -> ClusterID: + bundle_id = bundles[cluster.bundle] + cluster_prototype = Prototype.objects.get(bundle_id=bundle_id, type=ObjectType.CLUSTER) + + cluster_object = add_cluster(prototype=cluster_prototype, name=cluster.name, description=cluster.description) + services_to_add = Prototype.objects.filter( + bundle_id=bundle_id, type=ObjectType.SERVICE, name__in=(service.name for service in cluster.services) + ) + bulk_add_services_to_cluster(cluster=cluster_object, prototypes=services_to_add) + perform_host_to_cluster_map(cluster_id=cluster.pk, hosts=hosts.values(), status_service=notify) + + _restore_state(target=cluster, condition=cluster.state) + + config_host_groups: deque[tuple[Cluster | ClusterObject | ServiceComponent, ConfigHostGroupInfo]] = deque( + (cluster_object, group) for group in cluster.host_groups + ) + + orm_objects: dict[ServiceName, tuple[ClusterObject, dict[ComponentName, ServiceComponent]]] = {} + + for component in ( + ServiceComponent.objects.filter(cluster_id=cluster_object.id) + .select_related("service") + .annotate(own_name=F("prototype__name"), parent_name=F("prototype__parent__name")) + ): + if component.parent_name in orm_objects: + orm_objects[component.parent_name][1][component.own_name] = component + else: + orm_objects[component.parent_name] = (component.service, {component.own_name: component}) + + services_in_mm = deque() + components_in_mm = deque() + + for service_info in cluster.services: + service_object, component_object_mapping = orm_objects[service_info.name] + _restore_state(target=service_object, condition=service_info.state) + config_host_groups.extend((service_object, group) for group in service_info.host_groups) + if service_info.maintenance_mode == "on": + services_in_mm.append(service_object.id) + + for component_info in service_info.components: + component_object = component_object_mapping[component.name] + _restore_state(target=component_object, condition=component_info.state) + config_host_groups.extend((component_object, group) for group in component_info.host_groups) + if component_info.maintenance_mode == "on": + components_in_mm.append(component_object.id) + + if services_in_mm: + ClusterObject.objects.filter(id__in=services_in_mm).update(_maintenance_mode=MaintenanceMode.ON) + + if components_in_mm: + ServiceComponent.objects.filter(id__in=components_in_mm).update(_maintenance_mode=MaintenanceMode.ON) + + if cluster.mapping: + entries = deque() + + for hc_entry in cluster.mapping: + service_object, component_object_mapping = orm_objects[hc_entry.service] + component_object = component_object_mapping[hc_entry.component] + entries.append( + HostComponent( + cluster_id=cluster_object.id, + service_id=service_object.id, + component_id=component_object.id, + host_id=hosts[hc_entry.host], + ) + ) + + HostComponent.objects.bulk_create(objs=entries) + + if config_host_groups: + for owner, group in config_host_groups: + _create_group_config(owner=owner, group=group) + + +def _restore_state( + target: HostProvider | Host | Cluster | ClusterObject | ServiceComponent, condition: RestorableCondition +) -> None: + if condition.config: + update_obj_config( + obj_conf=target.config, config=condition.config, attr=condition.attr, description="Restored configuration" + ) + + target.set_state(condition.state) + target.set_multi_state(condition.multi_state) + + +def _create_group_config( + owner: Cluster | ClusterObject | ServiceComponent, group: ConfigHostGroupInfo, hosts: HostNameIDMap +) -> None: + # there's no business rule for that, but probably should be + host_group = GroupConfig.objects.create( + object_type=ContentType.objects.get_for_model(model=owner), + object_id=owner.pk, + name=group.name, + description=group.description, + ) + + if group.hosts: + m2m = GroupConfig.hosts.through + m2m.objects.bulk_create(objs=(m2m(groupconfig_id=host_group.id, host_id=hosts[host]) for host in group.hosts)) + + # groups without configs shouldn't be created (according to API v2 rules) + update_obj_config( + obj_conf=host_group.config, config=group.config, attr=group.attr, description="Restored configuration" + ) diff --git a/python/cm/services/transition/types.py b/python/cm/services/transition/types.py new file mode 100644 index 0000000000..596439fc2e --- /dev/null +++ b/python/cm/services/transition/types.py @@ -0,0 +1,108 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass +from typing import Any, Literal, TypeAlias + +from core.types import ComponentName, HostName, HostProviderName, ServiceName +from pydantic import BaseModel + +BundleHash: TypeAlias = str +ConfigurationDict: TypeAlias = dict[str, Any] +# we can't handle anything else during restoration +LiteralMM: TypeAlias = Literal["on", "off"] + + +@dataclass(slots=True) +class BundleExtraInfo: + name: str + version: str + edition: str + + def __str__(self) -> str: + return f"{self.name} | ver {self.version} ({self.edition})" + + +@dataclass(slots=True) +class RestorableCondition: + state: str + multi_state: list[str] + config: ConfigurationDict | None + attr: ConfigurationDict | None + + +@dataclass(slots=True) +class HostProviderInfo: + bundle: BundleHash + name: HostProviderName + description: str + state: RestorableCondition + + +@dataclass(slots=True) +class HostInfo: + hostprovider: HostProviderName + name: HostName + state: RestorableCondition + maintenance_mode: LiteralMM + + +@dataclass(slots=True) +class ConfigHostGroupInfo: + name: str + description: str + config: ConfigurationDict + attr: ConfigurationDict + hosts: list[HostName] + + +@dataclass(slots=True) +class ComponentInfo: + name: ComponentName + state: RestorableCondition + maintenance_mode: LiteralMM + host_groups: list[ConfigHostGroupInfo] + + +@dataclass(slots=True) +class ServiceInfo: + name: ServiceName + state: RestorableCondition + components: list[ComponentInfo] + maintenance_mode: LiteralMM + host_groups: list[ConfigHostGroupInfo] + + +@dataclass(slots=True) +class NamedMappingEntry: + host: HostName + service: ServiceName + component: ComponentName + + +@dataclass(slots=True) +class ClusterInfo: + bundle: BundleHash + name: str + description: str + state: RestorableCondition + services: list[ServiceInfo] + mapping: list[NamedMappingEntry] + host_groups: list[ConfigHostGroupInfo] + + +class TransitionPayload(BaseModel): + adcm_version: str + bundles: dict[BundleHash, BundleExtraInfo] + cluster: ClusterInfo + hostproviders: list[HostProviderInfo] + hosts: list[HostInfo] From baf30f1d9da67c40a85952c7df8a28fbf0145fc6 Mon Sep 17 00:00:00 2001 From: Skrynnik Daniil Date: Wed, 2 Oct 2024 16:55:24 +0300 Subject: [PATCH 83/98] ADCM-6000: Fix deleting bundle --- python/api_v2/tests/test_bundle.py | 5 ++++- python/cm/bundle.py | 4 ++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/python/api_v2/tests/test_bundle.py b/python/api_v2/tests/test_bundle.py index 53edb9791b..1138063d39 100644 --- a/python/api_v2/tests/test_bundle.py +++ b/python/api_v2/tests/test_bundle.py @@ -11,6 +11,7 @@ # limitations under the License. +from cm.bundle import _get_file_hashes from cm.models import Action, Bundle from django.conf import settings from rest_framework.status import ( @@ -90,10 +91,12 @@ def test_retrieve_not_found_fail(self): self.assertEqual(response.status_code, HTTP_404_NOT_FOUND) def test_delete_success(self): + bundle_hash = self.bundle_1.hash response = self.client.v2[self.bundle_1].delete() - self.assertEqual(Bundle.objects.filter(pk=self.bundle_1.pk).exists(), False) self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) + self.assertEqual(Bundle.objects.filter(pk=self.bundle_1.pk).exists(), False) + self.assertIsNone(_get_file_hashes(path=self.directories["DOWNLOAD_DIR"]).get(bundle_hash)) def test_delete_not_found_fail(self): response = (self.client.v2 / "bundles" / self.get_non_existent_pk(model=Bundle)).delete() diff --git a/python/cm/bundle.py b/python/cm/bundle.py index 08b15e1e5e..0382b38625 100644 --- a/python/cm/bundle.py +++ b/python/cm/bundle.py @@ -1331,6 +1331,7 @@ def delete_bundle(bundle): bundle.version, ) + bundle_hash = bundle.hash bundle.delete() for role in Role.objects.filter(class_name="ParentRole"): @@ -1339,6 +1340,9 @@ def delete_bundle(bundle): ProductCategory.re_collect() + if bundle_archive := _get_file_hashes(path=settings.DOWNLOAD_DIR).get(bundle_hash): + bundle_archive.unlink() + def check_services(): prototype_data = {} From b16a74b6ba4a464bc2d778407987852524bda632 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Fri, 4 Oct 2024 04:58:33 +0000 Subject: [PATCH 84/98] ADCM-6004 Re-implement `dumpcluster` --- python/cm/services/config/secrets.py | 81 ++++++++ python/cm/services/transition/dump.py | 268 +++++++++++++++++++++++++ python/cm/services/transition/load.py | 12 +- python/cm/services/transition/types.py | 34 ++-- 4 files changed, 372 insertions(+), 23 deletions(-) create mode 100644 python/cm/services/config/secrets.py create mode 100644 python/cm/services/transition/dump.py diff --git a/python/cm/services/config/secrets.py b/python/cm/services/config/secrets.py new file mode 100644 index 0000000000..e5a1e57d5a --- /dev/null +++ b/python/cm/services/config/secrets.py @@ -0,0 +1,81 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ansible.parsing.vault import VaultAES256, VaultSecret + + +class AnsibleSecrets: + def __init__(self) -> None: + # Import it locally for laziness support. + # There's no major need in django initialization for this init: + # 1. Secret may be read independently + # 2. Ansible secret header is constant, not an actual setting + from django.conf import settings + + secret = settings.ANSIBLE_SECRET + if not secret: + message = "Ansible secret is undefined, work with secrets is impossible" + raise ValueError(message) + + self._vault = VaultAES256() + self._secret = VaultSecret(_bytes=str(secret).encode("utf-8")) + self._encrypted_header = settings.ANSIBLE_VAULT_HEADER + + def reveal_secrets(self, source: dict) -> dict: + """ + Recursively reveal ansible secrets from given source + and return all values as new dictionary. + + Note: "nested" secrets are revealed only from `dict` and `list`, + types like `tuple` and `deque` aren't currently supported. + """ + + result = {} + + for key, value in source.items(): + if not isinstance(value, dict): + if isinstance(value, list): + result[key] = [ + entry if not isinstance(entry, dict) else self.reveal_secrets(entry) for entry in value + ] + else: + result[key] = value + + continue + + if "__ansible_vault" in value: + result[key] = self.decrypt(value["__ansible_vault"]) + else: + result[key] = self.reveal_secrets(value) + + return result + + def decrypt(self, value: str) -> str | None: + """ + Decrypt string value if it's ansible encypted, otherwise return value itself. + + Avoid using this method directly, unless you know what you're doing: + `reveal_secrets` is prefferred. + """ + + if self._encrypted_header not in value: + return value + + _, ciphertext = value.split("\n", maxsplit=1) + + decrypted = self._vault.decrypt(b_vaulttext=ciphertext, secret=self._secret) + + if decrypted is None: + # for some cases Ansible decryption may return `None` as a valid value + return decrypted + + return decrypted.decode("utf-8") diff --git a/python/cm/services/transition/dump.py b/python/cm/services/transition/dump.py new file mode 100644 index 0000000000..5fab1c8638 --- /dev/null +++ b/python/cm/services/transition/dump.py @@ -0,0 +1,268 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TypeAlias + +from core.types import ( + BundleID, + ClusterID, + ComponentID, + ComponentNameKey, + ConfigID, + HostID, + HostName, + HostProviderID, + ObjectID, + ServiceID, + ServiceNameKey, +) +from django.conf import settings +from django.contrib.contenttypes.models import ContentType +from django.db.models import F, Q + +from cm.models import ( + Bundle, + Cluster, + ClusterObject, + ConfigLog, + GroupConfig, + Host, + HostComponent, + HostInfo, + HostProvider, + MaintenanceMode, + ServiceComponent, +) +from cm.services.config.secrets import AnsibleSecrets +from cm.services.transition.types import ( + BundleExtraInfo, + BundleHash, + ClusterInfo, + ComponentInfo, + ConfigHostGroupInfo, + HostProviderInfo, + NamedMappingEntry, + RestorableCondition, + ServiceInfo, + TransitionPayload, +) + +# store to this dict conditions that should be updated based on config +ConfigUpdateAcc: TypeAlias = dict[ConfigID, RestorableCondition | ConfigHostGroupInfo] + + +def dump(cluster_id: ClusterID) -> TransitionPayload: + configs_to_set: ConfigUpdateAcc = {} + + hosts, hostprovider_ids = retrieve_hosts(cluster_id=cluster_id, config_acc=configs_to_set) + hostproviders, bundles = retrieve_hostproviders(hostproviders=hostprovider_ids, config_acc=configs_to_set) + cluster, cluster_bundle_id = retrieve_cluster( + cluster_id=cluster_id, hosts={host_id: host.name for host_id, host in hosts.items()}, config_acc=configs_to_set + ) + bundles.add(cluster_bundle_id) + bundles_info = retrieve_bundles_info(ids=bundles) + + fill_configurations(config_acc=configs_to_set) + + return TransitionPayload( + adcm_version=settings.ADCM_VERSION, + bundles=bundles_info, + hostproviders=hostproviders, + hosts=list(hosts.values()), + cluster=cluster, + ) + + +def retrieve_hosts( + cluster_id: ClusterID, config_acc: ConfigUpdateAcc +) -> tuple[dict[HostID, HostInfo], set[HostProviderID]]: + hostproviders = set() + + hosts: dict[HostID, HostInfo] = {} + + for entry in Host.objects.filter(cluster_id=cluster_id).annotate( + current_config_id=F("config__current"), provider_name=F("provider__name") + ): + if entry.maintenance_mode not in (MaintenanceMode.ON, MaintenanceMode.OFF): + message = f"Host {entry.fqdn} has unserializable Maintenance Mode state: {entry.maintenance_mode}" + raise ValueError(message) + + hostproviders.add(entry.provider_id) + + current_condition = RestorableCondition(state=entry.state, multi_state=entry.multi_state) + + if entry.current_config_id: + config_acc[entry.current_config_id] = current_condition + + hosts[entry.id] = HostInfo( + name=entry.fqdn, + hostprovider=entry.provider_name, + condition=current_condition, + maintenance_mode=str(entry.maintenance_mode).lower(), + ) + + return hosts, hostproviders + + +def retrieve_hostproviders( + hostproviders: set[HostProviderID], config_acc: ConfigUpdateAcc +) -> tuple[list[HostProviderInfo], set[BundleID]]: + bundles = set() + + result = [] + + for entry in HostProvider.objects.filter(id__in=hostproviders).annotate( + current_config_id=F("config__current"), + bundle_id_value=F("prototype__bundle_id"), + bundle_hash=F("prototype__bundle__hash"), + ): + bundles.add(entry.bundle_id_value) + + current_condition = RestorableCondition(state=entry.state, multi_state=entry.multi_state) + + if entry.current_config_id: + config_acc[entry.current_config_id] = current_condition + + result.append( + HostProviderInfo( + bundle=entry.bundle_hash, name=entry.name, description=entry.description, condition=current_condition + ) + ) + + return result, bundles + + +def retrieve_cluster( + cluster_id: ClusterID, hosts: dict[HostID, HostName], config_acc: ConfigUpdateAcc +) -> tuple[ClusterInfo, BundleID]: + cluster = Cluster.objects.annotate( + current_config_id=F("config__current"), + bundle_id_value=F("prototype__bundle_id"), + bundle_hash=F("prototype__bundle__hash"), + ).get(id=cluster_id) + + current_condition = RestorableCondition(state=cluster.state, multi_state=cluster.multi_state) + + if cluster.current_config_id: + config_acc[cluster.current_config_id] = current_condition + + cluster_info = ClusterInfo( + bundle=cluster.bundle_hash, name=cluster.name, description=cluster.description, condition=current_condition + ) + + service_id_name_map: dict[ServiceID, ServiceNameKey] = {} + component_id_name_map: dict[ComponentID, ComponentNameKey] = {} + + for service in ClusterObject.objects.filter(cluster_id=cluster_id).annotate( + current_config_id=F("config__current"), service_name=F("prototype__name") + ): + name = ServiceNameKey(service=service.service_name) + mm = service.maintenance_mode_attr + if mm not in (MaintenanceMode.ON, MaintenanceMode.OFF): + message = f"{str(name).capitalize()} has unserializable Maintenance Mode state: {mm}" + raise ValueError(message) + + service_id_name_map[service.id] = name + + condition = RestorableCondition(state=service.state, multi_state=service.multi_state) + + if service.current_config_id: + config_acc[service.current_config_id] = condition + + cluster_info.services[name.service] = ServiceInfo( + name=name.service, condition=condition, maintenance_mode=str(mm).lower() + ) + + for component in ServiceComponent.objects.filter(cluster_id=cluster_id).annotate( + current_config_id=F("config__current"), + component_name=F("prototype__name"), + service_name=F("prototype__parent__name"), + ): + name = ComponentNameKey(service=component.service_name, component=component.component_name) + mm = component.maintenance_mode_attr + if mm not in (MaintenanceMode.ON, MaintenanceMode.OFF): + message = f"{str(name).capitalize()} has unserializable Maintenance Mode state: {mm}" + raise ValueError(message) + + component_id_name_map[component.id] = name + + condition = RestorableCondition(state=component.state, multi_state=component.multi_state) + + if component.current_config_id: + config_acc[component.current_config_id] = condition + + cluster_info.services[name.service].components[name.component] = ComponentInfo( + name=name.component, condition=condition, maintenance_mode=str(mm).lower() + ) + + for host_id, component_id in HostComponent.objects.values_list("host_id", "component_id").filter( + cluster_id=cluster_id + ): + key = component_id_name_map[component_id] + cluster_info.mapping.append( + NamedMappingEntry(host=hosts[host_id], service=key.service, component=key.component) + ) + + cluster_ct = ContentType.objects.get_for_model(Cluster) + service_ct = ContentType.objects.get_for_model(ClusterObject) + component_ct = ContentType.objects.get_for_model(ServiceComponent) + + host_groups: dict[ObjectID, ConfigHostGroupInfo] = {} + + for group in ( + GroupConfig.objects.filter( + Q(object_type=cluster_ct, object_id=cluster_id) + | Q(object_type=service_ct, object_id__in=service_id_name_map) + | Q(object_type=component_ct, object_id__in=component_id_name_map) + ) + .annotate(current_config_id=F("config__current_id")) + .select_related("object_type") + ): + group_info = ConfigHostGroupInfo(name=group.name, description=group.description) + + host_groups[group.id] = group_info + config_acc[group.current_config_id] = group_info + + if group.object_type == cluster_ct: + cluster_info.host_groups.append(group_info) + elif group.object_type == service_ct: + cluster_info.services[service_id_name_map[group.object_id].service].host_groups.append(group_info) + else: + key = component_id_name_map[group.object_id] + cluster_info.services[key.service].components[key.component].host_groups.append(group_info) + + for group_id, host_id in GroupConfig.hosts.through.objects.filter(groupconfig_id__in=host_groups).values_list( + "groupconfig_id", "host_id" + ): + host_groups[group_id].hosts.append(hosts[host_id]) + + return cluster_info, cluster.bundle_id_value + + +def retrieve_bundles_info(ids: set[BundleID]) -> dict[BundleHash, BundleExtraInfo]: + return { + hash_: BundleExtraInfo(name=name, version=version, edition=edition) + for name, version, edition, hash_ in Bundle.objects.filter(id__in=ids).values_list( + "name", "version", "edition", "hash" + ) + } + + +def fill_configurations(config_acc: ConfigUpdateAcc) -> None: + secrets = AnsibleSecrets() + + for config_id, config, attr in ( + ConfigLog.objects.filter(id__in=config_acc).values_list("id", "config", "attr").iterator(chunk_size=20) + ): + target = config_acc[config_id] + target.config = secrets.reveal_secrets(config) + target.attr = attr diff --git a/python/cm/services/transition/load.py b/python/cm/services/transition/load.py index 3c0828aa10..f99d05609d 100644 --- a/python/cm/services/transition/load.py +++ b/python/cm/services/transition/load.py @@ -138,7 +138,7 @@ def create_new_hosts(hosts: Iterable[HostInfo], hostproviders: HostProviderNameI provider_id, bundle_id = hostproviders[host_info.hostprovider] host = create_host(bundle_id=bundle_id, provider_id=provider_id, fqdn=host_info.name, cluster=None) result[host_info.name] = host.id - _restore_state(target=host, condition=host_info.state) + _restore_state(target=host, condition=host_info.condition) if host_info.maintenance_mode == "on": hosts_in_mm.append(host.id) @@ -159,7 +159,7 @@ def create_cluster(cluster: ClusterInfo, bundles: BundleHashIDMap, hosts: HostNa bulk_add_services_to_cluster(cluster=cluster_object, prototypes=services_to_add) perform_host_to_cluster_map(cluster_id=cluster.pk, hosts=hosts.values(), status_service=notify) - _restore_state(target=cluster, condition=cluster.state) + _restore_state(target=cluster, condition=cluster.condition) config_host_groups: deque[tuple[Cluster | ClusterObject | ServiceComponent, ConfigHostGroupInfo]] = deque( (cluster_object, group) for group in cluster.host_groups @@ -180,16 +180,16 @@ def create_cluster(cluster: ClusterInfo, bundles: BundleHashIDMap, hosts: HostNa services_in_mm = deque() components_in_mm = deque() - for service_info in cluster.services: + for service_info in cluster.services.values(): service_object, component_object_mapping = orm_objects[service_info.name] - _restore_state(target=service_object, condition=service_info.state) + _restore_state(target=service_object, condition=service_info.condition) config_host_groups.extend((service_object, group) for group in service_info.host_groups) if service_info.maintenance_mode == "on": services_in_mm.append(service_object.id) - for component_info in service_info.components: + for component_info in service_info.components.values(): component_object = component_object_mapping[component.name] - _restore_state(target=component_object, condition=component_info.state) + _restore_state(target=component_object, condition=component_info.condition) config_host_groups.extend((component_object, group) for group in component_info.host_groups) if component_info.maintenance_mode == "on": components_in_mm.append(component_object.id) diff --git a/python/cm/services/transition/types.py b/python/cm/services/transition/types.py index 596439fc2e..b7178f718e 100644 --- a/python/cm/services/transition/types.py +++ b/python/cm/services/transition/types.py @@ -10,7 +10,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from dataclasses import dataclass +from dataclasses import dataclass, field from typing import Any, Literal, TypeAlias from core.types import ComponentName, HostName, HostProviderName, ServiceName @@ -36,8 +36,8 @@ def __str__(self) -> str: class RestorableCondition: state: str multi_state: list[str] - config: ConfigurationDict | None - attr: ConfigurationDict | None + config: ConfigurationDict | None = None + attr: ConfigurationDict | None = None @dataclass(slots=True) @@ -45,14 +45,14 @@ class HostProviderInfo: bundle: BundleHash name: HostProviderName description: str - state: RestorableCondition + condition: RestorableCondition @dataclass(slots=True) class HostInfo: hostprovider: HostProviderName name: HostName - state: RestorableCondition + condition: RestorableCondition maintenance_mode: LiteralMM @@ -60,26 +60,26 @@ class HostInfo: class ConfigHostGroupInfo: name: str description: str - config: ConfigurationDict - attr: ConfigurationDict - hosts: list[HostName] + config: ConfigurationDict = field(default_factory=dict) + attr: ConfigurationDict = field(default_factory=dict) + hosts: list[HostName] = field(default_factory=list) @dataclass(slots=True) class ComponentInfo: name: ComponentName - state: RestorableCondition + condition: RestorableCondition maintenance_mode: LiteralMM - host_groups: list[ConfigHostGroupInfo] + host_groups: list[ConfigHostGroupInfo] = field(default_factory=list) @dataclass(slots=True) class ServiceInfo: name: ServiceName - state: RestorableCondition - components: list[ComponentInfo] + condition: RestorableCondition maintenance_mode: LiteralMM - host_groups: list[ConfigHostGroupInfo] + components: dict[ComponentName, ComponentInfo] = field(default_factory=dict) + host_groups: list[ConfigHostGroupInfo] = field(default_factory=list) @dataclass(slots=True) @@ -94,10 +94,10 @@ class ClusterInfo: bundle: BundleHash name: str description: str - state: RestorableCondition - services: list[ServiceInfo] - mapping: list[NamedMappingEntry] - host_groups: list[ConfigHostGroupInfo] + condition: RestorableCondition + services: dict[ServiceName, ServiceInfo] = field(default_factory=dict) + mapping: list[NamedMappingEntry] = field(default_factory=list) + host_groups: list[ConfigHostGroupInfo] = field(default_factory=list) class TransitionPayload(BaseModel): From 041c5f10c180ae4c43ce8677c4f77faac9959536 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Fri, 4 Oct 2024 07:32:59 +0000 Subject: [PATCH 85/98] ADCM-6003 & ADCM-6004 Apply reworked cluster dump/load logic to commands --- python/cm/management/commands/dumpcluster.py | 375 +-------------- python/cm/management/commands/loadcluster.py | 459 ++----------------- python/cm/services/transition/dump.py | 8 +- python/cm/services/transition/load.py | 44 +- python/core/types.py | 2 + 5 files changed, 86 insertions(+), 802 deletions(-) diff --git a/python/cm/management/commands/dumpcluster.py b/python/cm/management/commands/dumpcluster.py index 8c428f613b..cc5eba0715 100644 --- a/python/cm/management/commands/dumpcluster.py +++ b/python/cm/management/commands/dumpcluster.py @@ -12,7 +12,6 @@ from pathlib import Path import sys -import json import base64 import getpass @@ -23,278 +22,11 @@ from django.conf import settings from django.core.management.base import BaseCommand -from cm.models import ( - Bundle, - Cluster, - ClusterObject, - ConfigLog, - GroupConfig, - Host, - HostComponent, - HostProvider, - ObjectConfig, - Prototype, - ServiceComponent, -) +from cm.models import Cluster +from cm.services.transition.dump import dump -def serialize_datetime_fields(obj, fields=None): - """ - Modifies fields of type datetime to ISO string - - :param obj: Object in dictionary format - :type obj: dict - :param fields: List of fields in datetime format - :type fields: list - """ - if fields is not None: - for field in fields: - obj[field] = obj[field].isoformat() - - -def get_object(model, object_id, fields, datetime_fields=None): - """ - The object is returned in dictionary format - - :param model: Type object - :param object_id: Object ID - :type object_id: int - :param fields: List of fields - :type fields: tuple - :param datetime_fields: List of fields in datetime format - :type datetime_fields: list - :return: Object in dictionary format - :rtype: dict - """ - obj = model.objects.values(*fields).get(id=object_id) - serialize_datetime_fields(obj, datetime_fields) - return obj - - -def get_objects(model, fields, filters, datetime_fields=None): - objects = list(model.objects.filters(**filters).values(*fields)) - for obj in objects: - serialize_datetime_fields(obj, datetime_fields) - return objects - - -def get_bundle(prototype_id): - """ - Returns bundle object in dictionary format - - :param prototype_id: Prototype object ID - :type prototype_id: int - :return: Bundle object - :rtype: dict - """ - fields = ("name", "version", "edition", "hash", "description") - prototype = Prototype.objects.get(id=prototype_id) - return get_object(Bundle, prototype.bundle_id, fields) - - -def get_bundle_hash(prototype_id): - """ - Returns the hash of the bundle - - :param prototype_id: Object ID - :type prototype_id: int - :return: The hash of the bundle - :rtype: str - """ - bundle = get_bundle(prototype_id) - return bundle["hash"] - - -def get_config(object_config_id): - """ - Returns current and previous config - - :param object_config_id: - :type object_config_id: int - :return: Current and previous config in dictionary format - :rtype: dict - """ - fields = ("config", "attr", "date", "description") - try: - object_config = ObjectConfig.objects.get(id=object_config_id) - except ObjectConfig.DoesNotExist: - return None - config = {} - for name in ["current", "previous"]: - _id = getattr(object_config, name) - if _id: - config[name] = get_object(ConfigLog, _id, fields, ["date"]) - else: - config[name] = None - return config - - -def get_groups(object_id, model_name): - """Return list of groups. Each group contain dictionary with all needed information - - :param object_id: Object ID - :type object_id: int - :param model_name: name of Type Object - :type model_name: str - :return: List with GroupConfig on that object in dict format - :rtype: list - """ - - fields = ("object_id", "name", "description", "config", "object_type") - groups = [] - for group_config in GroupConfig.objects.filter(object_id=object_id, object_type__model=model_name): - group = get_object(GroupConfig, group_config.id, fields) - group["config"] = get_config(group["config"]) - group["model_name"] = model_name - group["hosts"] = [host.id for host in group_config.hosts.order_by("id")] - groups.append(group) - - return groups - - -def get_cluster(cluster_id): - """ - Returns cluster object in dictionary format - - :param cluster_id: Object ID - :type cluster_id: int - :return: Cluster object - :rtype: dict - """ - fields = ( - "id", - "name", - "description", - "config", - "state", - "prototype", - "_multi_state", - ) - cluster = get_object(Cluster, cluster_id, fields) - cluster["config"] = get_config(cluster["config"]) - bundle = get_bundle(cluster.pop("prototype")) - cluster["bundle_hash"] = bundle["hash"] - return cluster, bundle - - -def get_provider(provider_id): - """ - Returns provider object in dictionary format - - :param provider_id: Object ID - :type provider_id: int - :return: Provider object - :rtype: dict - """ - fields = ( - "id", - "prototype", - "name", - "description", - "config", - "state", - "_multi_state", - ) - provider = get_object(HostProvider, provider_id, fields) - provider["config"] = get_config(provider["config"]) - bundle = get_bundle(provider.pop("prototype")) - provider["bundle_hash"] = bundle["hash"] - return provider, bundle - - -def get_host(host_id): - """ - Returns host object in dictionary format - - :param host_id: Object ID - :type host_id: int - :return: Host object - :rtype: dict - """ - fields = ( - "id", - "prototype", - "fqdn", - "description", - "provider", - "provider__name", - "config", - "state", - "_multi_state", - ) - host = get_object(Host, host_id, fields) - host["config"] = get_config(host["config"]) - host["bundle_hash"] = get_bundle_hash(host.pop("prototype")) - return host - - -def get_service(service_id): - """ - Returns service object in dictionary format - - :param service_id: Object ID - :type service_id: int - :return: Service object - :rtype: dict - """ - fields = ( - "id", - "prototype", - "prototype__name", - "config", - "state", - "_multi_state", - ) - service = get_object(ClusterObject, service_id, fields) - service["config"] = get_config(service["config"]) - service["bundle_hash"] = get_bundle_hash(service.pop("prototype")) - return service - - -def get_component(component_id): - """ - Returns component object in dictionary format - - :param component_id: Object ID - :type component_id: int - :return: Component object - :rtype: dict - """ - fields = ( - "id", - "prototype", - "prototype__name", - "service", - "config", - "state", - "_multi_state", - ) - component = get_object(ServiceComponent, component_id, fields) - component["config"] = get_config(component["config"]) - component["bundle_hash"] = get_bundle_hash(component.pop("prototype")) - return component - - -def get_host_component(host_component_id): - """ - Returns host_component object in dictionary format - - :param host_component_id: Object ID - :type host_component_id: int - :return: HostComponent object - :rtype: dict - """ - fields = ( - "cluster", - "host", - "service", - "component", - "state", - ) - return get_object(HostComponent, host_component_id, fields) - - -def encrypt_data(pass_from_user, result): +def encrypt_data(pass_from_user: str, result: str) -> bytes: password = pass_from_user.encode() kdf = PBKDF2HMAC( algorithm=hashes.SHA256(), @@ -305,83 +37,7 @@ def encrypt_data(pass_from_user, result): ) key = base64.urlsafe_b64encode(kdf.derive(password)) f = Fernet(key) - return f.encrypt(result) - - -def dump(cluster_id, output): - """ - Saving objects to file in JSON format - - :param cluster_id: Object ID - :type cluster_id: int - :param output: Path to file - :type output: str - """ - cluster, bundle = get_cluster(cluster_id) - - data = { - "ADCM_VERSION": settings.ADCM_VERSION, - "bundles": { - bundle["hash"]: bundle, - }, - "cluster": cluster, - "hosts": [], - "providers": [], - "services": [], - "components": [], - "host_components": [], - "groups": [], - } - - provider_ids = set() - data["groups"].extend(get_groups(cluster_id, "cluster")) - - for host_obj in Host.objects.filter(cluster_id=cluster["id"]): - host = get_host(host_obj.id) - provider_ids.add(host["provider"]) - data["hosts"].append(host) - - host_ids = [host["id"] for host in data["hosts"]] - - for provider_obj in HostProvider.objects.filter(id__in=provider_ids): - provider, bundle = get_provider(provider_obj.id) - data["providers"].append(provider) - data["groups"].extend(get_groups(provider_obj.id, "hostprovider")) - data["bundles"][bundle["hash"]] = bundle - - for service_obj in ClusterObject.objects.filter(cluster_id=cluster["id"]): - service = get_service(service_obj.id) - data["groups"].extend(get_groups(service_obj.id, "clusterobject")) - data["services"].append(service) - - service_ids = [service["id"] for service in data["services"]] - - for component_obj in ServiceComponent.objects.filter(cluster_id=cluster["id"], service_id__in=service_ids): - component = get_component(component_obj.id) - data["groups"].extend(get_groups(component_obj.id, "servicecomponent")) - data["components"].append(component) - - component_ids = [component["id"] for component in data["components"]] - - for host_component_obj in HostComponent.objects.filter( - cluster_id=cluster["id"], - host_id__in=host_ids, - service_id__in=service_ids, - component_id__in=component_ids, - ): - host_component = get_host_component(host_component_obj.id) - data["host_components"].append(host_component) - data["adcm_password"] = settings.ANSIBLE_SECRET - result = json.dumps(data, indent=2).encode(settings.ENCODING_UTF_8) - password = getpass.getpass() - encrypted = encrypt_data(password, result) - - if output is not None: - with Path(output).open(mode="wb") as f: - f.write(encrypted) - sys.stdout.write(f"Dump successfully done to file {output}\n") - else: - sys.stdout.write(encrypted.decode(settings.ENCODING_UTF_8)) + return f.encrypt(result.encode("utf-8")) class Command(BaseCommand): @@ -409,8 +65,21 @@ def add_arguments(self, parser): ) parser.add_argument("-o", "--output", help="Specifies file to which the output is written.") - def handle(self, *args, **options): # noqa: ARG002 - """Handler method""" - cluster_id = options["cluster_id"] - output = options["output"] - dump(cluster_id, output) + def handle(self, *_, cluster_id: int, output: str | None = None, **_kw) -> None: + if not Cluster.objects.filter(id=cluster_id).exists(): + message = f"Cluster with {cluster_id} doesn't exist" + raise ValueError(message) + + data_string = dump(cluster_id=cluster_id).model_dump_json() + + password = getpass.getpass() + + encrypted = encrypt_data(password, data_string) + + if output is not None: + with Path(output).open(mode="wb") as f: + f.write(encrypted) + + sys.stdout.write(f"Dump successfully done to file {output}\n") + else: + sys.stdout.write(encrypted.decode(settings.ENCODING_UTF_8)) diff --git a/python/cm/management/commands/loadcluster.py b/python/cm/management/commands/loadcluster.py index 91fc85d863..070b3f86b2 100644 --- a/python/cm/management/commands/loadcluster.py +++ b/python/cm/management/commands/loadcluster.py @@ -10,368 +10,24 @@ # See the License for the specific language governing permissions and # limitations under the License. -from datetime import datetime from pathlib import Path import sys -import json import base64 import getpass -from ansible.parsing.vault import VaultAES256, VaultSecret -from cryptography.fernet import Fernet, InvalidToken +from cryptography.fernet import Fernet from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC from django.conf import settings -from django.contrib.contenttypes.models import ContentType from django.core.management.base import BaseCommand from django.db.transaction import atomic -from django.db.utils import IntegrityError -from cm.adcm_config.config import save_file_type -from cm.errors import AdcmEx -from cm.models import ( - Bundle, - Cluster, - ClusterObject, - ConfigLog, - GroupConfig, - Host, - HostComponent, - HostProvider, - ObjectConfig, - Prototype, - PrototypeConfig, - ServiceComponent, -) +from cm.services.transition.load import load +from cm.services.transition.types import TransitionPayload -OLD_ADCM_PASSWORD = None - -def deserializer_datetime_fields(obj, fields=None): - """ - Modifies fields of type ISO string to datetime type - - :param obj: Object in dictionary format - :type obj: dict - :param fields: List of fields in ISO string format - :type fields: list - """ - if obj is not None and fields is not None: - for field in fields: - obj[field] = datetime.fromisoformat(obj[field]) - - -def get_prototype(**kwargs): - """ - Returns prototype object - - :param kwargs: Parameters for finding a prototype - :return: Prototype object - :rtype: models.Prototype - """ - bundle = Bundle.objects.get(hash=kwargs.pop("bundle_hash")) - return Prototype.objects.get(bundle=bundle, **kwargs) - - -def create_config(config, prototype=None): - """ - Creating current ConfigLog, previous ConfigLog and ObjectConfig objects - - :param config: ConfigLog object in dictionary format - :type config: dict - :return: ObjectConfig object - :rtype: models.ObjectConfig - """ - if config is not None: - current_config = process_config(prototype, config["current"]) - deserializer_datetime_fields(current_config, ["date"]) - previous_config = process_config(prototype, config["previous"]) - deserializer_datetime_fields(previous_config, ["date"]) - - conf = ObjectConfig.objects.create(current=0, previous=0) - - current = ConfigLog.objects.create(obj_ref=conf, **current_config) - current_id = current.id - if previous_config is not None: - previous = ConfigLog.objects.create(obj_ref=conf, **previous_config) - previous_id = previous.id - else: - previous_id = 0 - - conf.current = current_id - conf.previous = previous_id - conf.save() - return conf - return None - - -def create_group(group, ex_hosts_list, obj): - """ - Creating GroupConfig object - - :param group: GroupConfig object in dictionary format - :type group: dict - :param ex_hosts_list: Map of ex_host_ids and new hosts - :type ex_hosts_list: dict - :return: GroupConfig object - :rtype: models.GroupConfig - """ - model_name = group.pop("model_name") - ex_object_id = group.pop("object_id") - group.pop("object_type") - config = create_config(group.pop("config")) - hosts = [] - - for host in group.pop("hosts"): - hosts.append(ex_hosts_list[host]) - - group_config = GroupConfig.objects.create( - object_id=obj.id, - config=config, - object_type=ContentType.objects.get(model=model_name), - **group, - ) - group_config.hosts.set(hosts) - - return ex_object_id, group_config - - -def switch_encoding(msg): - ciphertext = msg - if settings.ANSIBLE_VAULT_HEADER in msg: - _, ciphertext = msg.split("\n") - vault = VaultAES256() - secret_old = VaultSecret(bytes(OLD_ADCM_PASSWORD, settings.ENCODING_UTF_8)) - data = str(vault.decrypt(ciphertext, secret_old), settings.ENCODING_UTF_8) - secret_new = VaultSecret(bytes(settings.ANSIBLE_SECRET, settings.ENCODING_UTF_8)) - ciphertext = vault.encrypt(bytes(data, settings.ENCODING_UTF_8), secret_new) - return f"{settings.ANSIBLE_VAULT_HEADER}\n{str(ciphertext, settings.ENCODING_UTF_8)}" - - -def process_config(proto, config): - if config is not None and proto is not None: - conf = config["config"] - for pconf in PrototypeConfig.objects.filter(prototype=proto, type__in=("secrettext", "password")): - if pconf.subname and conf[pconf.name][pconf.subname]: - conf[pconf.name][pconf.subname] = switch_encoding(conf[pconf.name][pconf.subname]) - elif conf.get(pconf.name) and not pconf.subname: - conf[pconf.name] = switch_encoding(conf[pconf.name]) - config["config"] = conf - return config - - -def create_file_from_config(obj, config): - if config is None: - return - - conf = config["current"]["config"] - - for pconf in PrototypeConfig.objects.filter(prototype=obj.prototype, type="file"): - if pconf.subname: - if pconf.subname in conf.get(pconf.name, {}): - save_file_type(obj, pconf.name, pconf.subname, conf[pconf.name][pconf.subname]) - elif pconf.name in conf: - save_file_type(obj, pconf.name, "", conf[pconf.name]) - - -def create_cluster(cluster): - """ - Creating Cluster object - - :param cluster: Cluster object in dictionary format - :type cluster: dict - :return: Cluster object - :rtype: models.Cluster - """ - try: - Cluster.objects.get(name=cluster["name"]) - raise AdcmEx("CLUSTER_CONFLICT", "Cluster with the same name already exist") - except Cluster.DoesNotExist: - prototype = get_prototype(bundle_hash=cluster.pop("bundle_hash"), type="cluster") - ex_id = cluster.pop("id") - config = cluster.pop("config") - cluster = Cluster.objects.create(prototype=prototype, config=create_config(config, prototype), **cluster) - create_file_from_config(cluster, config) - return ex_id, cluster - - -def create_provider(provider): - """ - Creating HostProvider object - - :param provider: HostProvider object in dictionary format - :type provider: dict - :return: HostProvider object - :rtype: models.HostProvider - """ - bundle_hash = provider.pop("bundle_hash") - ex_id = provider.pop("id") - try: - same_name_provider = HostProvider.objects.get(name=provider["name"]) - if same_name_provider.prototype.bundle.hash != bundle_hash: - raise IntegrityError("Name of provider already in use in another bundle") - create_file_from_config(same_name_provider, provider["config"]) # noqa: TRY300 - return ex_id, same_name_provider - except HostProvider.DoesNotExist: - prototype = get_prototype(bundle_hash=bundle_hash, type="provider") - config = provider.pop("config") - provider = HostProvider.objects.create( - prototype=prototype, - config=create_config(config, prototype), - **provider, - ) - create_file_from_config(provider, config) - return ex_id, provider - - -def create_host(host, cluster): - """ - Creating Host object - - :param host: Host object in dictionary format - :type host: dict - :param cluster: Cluster object - :type cluster: models.Cluster - :return: Host object - :rtype: models.Host - """ - host.pop("provider") - provider = HostProvider.objects.get(name=host.pop("provider__name")) - try: - Host.objects.get(fqdn=host["fqdn"]) - provider.delete() - cluster.delete() - raise AdcmEx("HOST_CONFLICT", "Host fqdn already in use") - except Host.DoesNotExist: - prototype = get_prototype(bundle_hash=host.pop("bundle_hash"), type="host") - ex_id = host.pop("id") - config = host.pop("config") - new_host = Host.objects.create( - prototype=prototype, - provider=provider, - config=create_config(config, prototype), - cluster=cluster, - **host, - ) - create_file_from_config(new_host, config) - return ex_id, new_host - - -def create_service(service, cluster): - """ - Creating Service object - - :param service: ClusterObject object in dictionary format - :type service: dict - :param cluster: Cluster object - :type cluster: models.Cluster - :return: ClusterObject object - :rtype: models.ClusterObject - """ - prototype = get_prototype( - bundle_hash=service.pop("bundle_hash"), - type="service", - name=service.pop("prototype__name"), - ) - ex_id = service.pop("id") - config = service.pop("config") - service = ClusterObject.objects.create( - prototype=prototype, - cluster=cluster, - config=create_config(config, prototype), - **service, - ) - create_file_from_config(service, config) - return ex_id, service - - -def create_component(component, cluster, service): - """ - Creating Component object - - :param component: ServiceComponent object in dictionary format - :type component: dict - :param cluster: Cluster object - :type cluster: models.Cluster - :param service: Service object - :type service: models.ClusterObject - :return: Component object - :rtype: models.ServiceComponent - """ - prototype = get_prototype( - bundle_hash=component.pop("bundle_hash"), - type="component", - name=component.pop("prototype__name"), - parent=service.prototype, - ) - ex_id = component.pop("id") - config = component.pop("config") - component = ServiceComponent.objects.create( - prototype=prototype, - cluster=cluster, - service=service, - config=create_config(config, prototype), - **component, - ) - create_file_from_config(component, config) - return ex_id, component - - -def create_host_component(host_component, cluster, host, service, component): - """ - Creating HostComponent object - - :param host_component: HostComponent object in dictionary format - :type host_component: dict - :param cluster: Cluster object - :type cluster: models.Cluster - :param host: Host object - :type host: models.Host - :param service: Service object - :type service: models.ClusterObject - :param component: Component object - :type component: models.ServiceComponent - :return: HostComponent object - :rtype: models.HostComponent - """ - host_component.pop("cluster") - return HostComponent.objects.create( - cluster=cluster, - host=host, - service=service, - component=component, - **host_component, - ) - - -def check(data): - """ - Checking cluster load - - :param data: Data from file - :type data: dict - """ - if data["ADCM_VERSION"] != settings.ADCM_VERSION: - raise AdcmEx( - "DUMP_LOAD_ADCM_VERSION_ERROR", - msg=( - f"ADCM versions do not match, dump version: {data['ADCM_VERSION']}," - f" load version: {settings.ADCM_VERSION}" - ), - ) - - for bundle_hash, bundle in data["bundles"].items(): - try: - Bundle.objects.get(hash=bundle_hash) - except Bundle.DoesNotExist as err: - raise AdcmEx( - "DUMP_LOAD_BUNDLE_ERROR", - msg=f"Bundle '{bundle['name']} {bundle['version']}' not found", - ) from err - - -def decrypt_file(pass_from_user, file): +def decrypt_file(pass_from_user: str, file: str) -> bytes: password = pass_from_user.encode() kdf = PBKDF2HMAC( algorithm=hashes.SHA256(), @@ -384,94 +40,49 @@ def decrypt_file(pass_from_user, file): return Fernet(key).decrypt(file.encode()) -def set_old_password(password): - global OLD_ADCM_PASSWORD - OLD_ADCM_PASSWORD = password - - -@atomic -def load(file_path): +class Command(BaseCommand): """ - Loading and creating objects from JSON file + Command for load cluster object from JSON file - :param file_path: Path to JSON file - :type file_path: str + Example: + manage.py loadcluster cluster.json """ - try: - password = getpass.getpass() - with Path(file_path).open(encoding=settings.ENCODING_UTF_8) as f: - encrypted = f.read() - decrypted = decrypt_file(password, encrypted) - data = json.loads(decrypted.decode(settings.ENCODING_UTF_8)) - except FileNotFoundError as err: - raise AdcmEx("DUMP_LOAD_CLUSTER_ERROR", msg="Loaded file not found") from err - except InvalidToken as err: - raise AdcmEx("WRONG_PASSWORD") from err - check(data) - set_old_password(data["adcm_password"]) - _, cluster = create_cluster(data["cluster"]) + help = "Load cluster object from JSON format" - ex_provider_ids = {} - for provider_data in data["providers"]: - ex_provider_id, provider = create_provider(provider_data) - ex_provider_ids[ex_provider_id] = provider + def add_arguments(self, parser): + """Parsing command line arguments""" + parser.add_argument("file_path", nargs="?") - ex_host_ids = {} - for host_data in data["hosts"]: - ex_host_id, host = create_host(host_data, cluster) - ex_host_ids[ex_host_id] = host + def handle(self, *_, file_path: str, **_kw): # noqa: ARG002 + encrypted_dump = Path(file_path) - ex_service_ids = {} - for service_data in data["services"]: - ex_service_id, service = create_service(service_data, cluster) - ex_service_ids[ex_service_id] = service + if not encrypted_dump.is_file(): + message = f"Dump file doesn't exist or isn't a file at {encrypted_dump}" + raise ValueError(message) - ex_component_ids = {} - for component_data in data["components"]: - ex_component_id, component = create_component( - component_data, - cluster, - ex_service_ids[component_data.pop("service")], - ) - ex_component_ids[ex_component_id] = component + password = getpass.getpass() - for host_component_data in data["host_components"]: - create_host_component( - host_component_data, - cluster, - ex_host_ids[host_component_data.pop("host")], - ex_service_ids[host_component_data.pop("service")], - ex_component_ids[host_component_data.pop("component")], - ) - for group_data in data["groups"]: - if group_data["model_name"] == "cluster": - obj = cluster - elif group_data["model_name"] == "clusterobject": - obj = ex_service_ids[group_data["object_id"]] - elif group_data["model_name"] == "servicecomponent": - obj = ex_component_ids[group_data["object_id"]] - elif group_data["model_name"] == "hostprovider": - obj = ex_provider_ids[group_data["object_id"]] - create_group(group_data, ex_host_ids, obj) - sys.stdout.write(f"Load successfully ended, cluster {cluster.display_name} created\n") + self._write("Decrypting dump file...") + decrypted_string = decrypt_file(password, encrypted_dump.read_text()).decode("utf-8") + self._write("Validating data...") + payload = TransitionPayload.model_validate_json(decrypted_string) -class Command(BaseCommand): - """ - Command for load cluster object from JSON file + if payload.adcm_version != settings.ADCM_VERSION: + message = ( + f"ADCM versions do not match, dump version: {payload.adcm_version}, " + f"load version: {settings.ADCM_VERSION}" + ) + raise ValueError(message) - Example: - manage.py loadcluster cluster.json - """ + with atomic(): + cluster_id = load(data=payload, report=self._write) - help = "Load cluster object from JSON format" + sys.stdout.write(f"Load successfully ended, cluster {payload.cluster.name} created with id {cluster_id}\n") - def add_arguments(self, parser): - """Parsing command line arguments""" - parser.add_argument("file_path", nargs="?") + def _write(self, line: str) -> None: + if not line.endswith("\n"): + line += "\n" - def handle(self, *args, **options): # noqa: ARG002 - """Handler method""" - file_path = options.get("file_path") - load(file_path) + sys.stdout.write(line) diff --git a/python/cm/services/transition/dump.py b/python/cm/services/transition/dump.py index 5fab1c8638..01a4ef5bdf 100644 --- a/python/cm/services/transition/dump.py +++ b/python/cm/services/transition/dump.py @@ -37,7 +37,6 @@ GroupConfig, Host, HostComponent, - HostInfo, HostProvider, MaintenanceMode, ServiceComponent, @@ -49,6 +48,7 @@ ClusterInfo, ComponentInfo, ConfigHostGroupInfo, + HostInfo, HostProviderInfo, NamedMappingEntry, RestorableCondition, @@ -224,7 +224,7 @@ def retrieve_cluster( | Q(object_type=service_ct, object_id__in=service_id_name_map) | Q(object_type=component_ct, object_id__in=component_id_name_map) ) - .annotate(current_config_id=F("config__current_id")) + .annotate(current_config_id=F("config__current")) .select_related("object_type") ): group_info = ConfigHostGroupInfo(name=group.name, description=group.description) @@ -260,9 +260,7 @@ def retrieve_bundles_info(ids: set[BundleID]) -> dict[BundleHash, BundleExtraInf def fill_configurations(config_acc: ConfigUpdateAcc) -> None: secrets = AnsibleSecrets() - for config_id, config, attr in ( - ConfigLog.objects.filter(id__in=config_acc).values_list("id", "config", "attr").iterator(chunk_size=20) - ): + for config_id, config, attr in ConfigLog.objects.filter(id__in=config_acc).values_list("id", "config", "attr"): target = config_acc[config_id] target.config = secrets.reveal_secrets(config) target.attr = attr diff --git a/python/cm/services/transition/load.py b/python/cm/services/transition/load.py index f99d05609d..98e1f483d1 100644 --- a/python/cm/services/transition/load.py +++ b/python/cm/services/transition/load.py @@ -59,16 +59,16 @@ HostNameIDMap: TypeAlias = dict[HostName, HostID] -def load(data: TransitionPayload, report: Callable[[str], None] = print) -> None: +def load(data: TransitionPayload, report: Callable[[str], None] = print) -> ClusterID: report("Load started...") report("Bundles discovery") - bundles = discover_bundles(data.bundles) + bundles = discover_bundles(data.bundles.keys()) if len(bundles) != len(data.bundles): - missing_bundles = [ + missing_bundles = "\n".join( str(data.bundles[missing_bundle_hash]) for missing_bundle_hash in set(data.bundles).difference(bundles) - ] - report(f"Not all bundles are installed.\nMissing:\n{'\n'.join(missing_bundles)}") + ) + report(f"Not all bundles are installed.\nMissing:\n{missing_bundles}") message = "Bundles are missing in this ADCM" raise RuntimeError(message) @@ -78,20 +78,19 @@ def load(data: TransitionPayload, report: Callable[[str], None] = print) -> None report(f"Some Host Providers exist, they will be used to create hosts from them: {', '.join(hostproviders)}") if len(hostproviders) != len(data.hostproviders): - report(f"Host Provider will be created: {', '.join(hp.name for hp in data.hostproviders)}") + missing_hostproviders = tuple(entry for entry in data.hostproviders if entry.name not in hostproviders) + report(f"Host Providers will be created: {', '.join(hp.name for hp in missing_hostproviders)}") - hostproviders |= create_new_hostproviders( - hostproviders=(entry for entry in data.hostproviders if entry.name not in hostproviders), bundles=bundles - ) + hostproviders |= create_new_hostproviders(hostproviders=missing_hostproviders, bundles=bundles) report("Hosts creation") hosts = create_new_hosts(hosts=data.hosts, hostproviders=hostproviders) report("Cluster creation") - create_cluster(cluster=data.cluster, bundles=bundles, hosts=hosts) + return create_cluster(cluster=data.cluster, bundles=bundles, hosts=hosts) -def discover_bundles(required_bundles: BundleHash) -> BundleHashIDMap: +def discover_bundles(required_bundles: Iterable[BundleHash]) -> BundleHashIDMap: return dict(Bundle.objects.values_list("hash", "id").filter(hash__in=required_bundles)) @@ -99,7 +98,7 @@ def discover_hostproviders(hostproviders: dict[HostProviderName, BundleHash]) -> result = {} for id_, name, bundle_id, bundle_hash in HostProvider.objects.values_list( - "id", "name", "bundle_id", "bundle__hash" + "id", "name", "prototype__bundle_id", "prototype__bundle__hash" ).filter(name__in=hostproviders): if bundle_hash == hostproviders[name]: result[name] = (id_, bundle_id) @@ -121,10 +120,12 @@ def create_new_hostproviders( for provider_info in hostproviders: bundle_id = bundles[provider_info.bundle] new_provider = add_host_provider( - prototype=provider_protos[bundle_id], name=provider_info.name, description=provider_info.description + prototype=provider_protos[provider_info.bundle], + name=provider_info.name, + description=provider_info.description, ) result[provider_info.name] = (new_provider.id, bundle_id) - _restore_state(target=new_provider, condition=provider_info.state) + _restore_state(target=new_provider, condition=provider_info.condition) return result @@ -154,12 +155,12 @@ def create_cluster(cluster: ClusterInfo, bundles: BundleHashIDMap, hosts: HostNa cluster_object = add_cluster(prototype=cluster_prototype, name=cluster.name, description=cluster.description) services_to_add = Prototype.objects.filter( - bundle_id=bundle_id, type=ObjectType.SERVICE, name__in=(service.name for service in cluster.services) + bundle_id=bundle_id, type=ObjectType.SERVICE, name__in=(service.name for service in cluster.services.values()) ) bulk_add_services_to_cluster(cluster=cluster_object, prototypes=services_to_add) - perform_host_to_cluster_map(cluster_id=cluster.pk, hosts=hosts.values(), status_service=notify) + perform_host_to_cluster_map(cluster_id=cluster_object.id, hosts=hosts.values(), status_service=notify) - _restore_state(target=cluster, condition=cluster.condition) + _restore_state(target=cluster_object, condition=cluster.condition) config_host_groups: deque[tuple[Cluster | ClusterObject | ServiceComponent, ConfigHostGroupInfo]] = deque( (cluster_object, group) for group in cluster.host_groups @@ -188,7 +189,7 @@ def create_cluster(cluster: ClusterInfo, bundles: BundleHashIDMap, hosts: HostNa services_in_mm.append(service_object.id) for component_info in service_info.components.values(): - component_object = component_object_mapping[component.name] + component_object = component_object_mapping[component_info.name] _restore_state(target=component_object, condition=component_info.condition) config_host_groups.extend((component_object, group) for group in component_info.host_groups) if component_info.maintenance_mode == "on": @@ -219,7 +220,9 @@ def create_cluster(cluster: ClusterInfo, bundles: BundleHashIDMap, hosts: HostNa if config_host_groups: for owner, group in config_host_groups: - _create_group_config(owner=owner, group=group) + _create_group_config(owner=owner, group=group, hosts=hosts) + + return cluster_object.id def _restore_state( @@ -231,7 +234,8 @@ def _restore_state( ) target.set_state(condition.state) - target.set_multi_state(condition.multi_state) + for multi_state in condition.multi_state: + target.set_multi_state(multi_state) def _create_group_config( diff --git a/python/core/types.py b/python/core/types.py index ec4a62eac8..cf4e1501aa 100644 --- a/python/core/types.py +++ b/python/core/types.py @@ -29,7 +29,9 @@ ConfigID: TypeAlias = int ConcernID: TypeAlias = int +HostProviderName: TypeAlias = str HostName: TypeAlias = str +ClusterName: TypeAlias = str ServiceName: TypeAlias = str ComponentName: TypeAlias = str From 45adee3dff6f9fcd48b85cab6423035ef803b725 Mon Sep 17 00:00:00 2001 From: Kirill Fedorenko Date: Fri, 4 Oct 2024 13:11:09 +0000 Subject: [PATCH 86/98] ADCM-6009 [UI] Add the createCrudSlice function https://tracker.yandex.ru/ADCM-6009 --- adcm-web/app/src/models/modal.ts | 14 +++++ .../store/createCrudSlice/createCrudSlice.ts | 58 +++++++++++++++++++ 2 files changed, 72 insertions(+) create mode 100644 adcm-web/app/src/models/modal.ts create mode 100644 adcm-web/app/src/store/createCrudSlice/createCrudSlice.ts diff --git a/adcm-web/app/src/models/modal.ts b/adcm-web/app/src/models/modal.ts new file mode 100644 index 0000000000..c4ac42814f --- /dev/null +++ b/adcm-web/app/src/models/modal.ts @@ -0,0 +1,14 @@ +type ModalStateActionData = { + [P in EntityName]: T | unknown; +} & { + [key: string]: unknown; +}; + +export interface ModalState { + createDialog: { + isOpen: boolean; + }; + updateDialog: ModalStateActionData; + deleteDialog: ModalStateActionData; + isActionInProgress?: boolean; +} diff --git a/adcm-web/app/src/store/createCrudSlice/createCrudSlice.ts b/adcm-web/app/src/store/createCrudSlice/createCrudSlice.ts new file mode 100644 index 0000000000..699a1fcd4b --- /dev/null +++ b/adcm-web/app/src/store/createCrudSlice/createCrudSlice.ts @@ -0,0 +1,58 @@ +import { ModalState } from '@models/modal'; +import { ActionReducerMapBuilder, SliceCaseReducers, ValidateSliceCaseReducers, createSlice } from '@reduxjs/toolkit'; + +type ExtractEntity = S extends ModalState ? E : never; + +interface CreateCrudSliceOptions< + EntityName extends string, + S extends ModalState, EntityName>, + CR extends SliceCaseReducers, + Name extends string = string, +> { + name: Name; + entityName: EntityName; + createInitialState: () => S; + reducers: ValidateSliceCaseReducers; + extraReducers?: (builder: ActionReducerMapBuilder) => void; +} + +export function createCrudSlice< + EntityName extends string, + S extends ModalState, string>, + CR extends SliceCaseReducers, +>(options: CreateCrudSliceOptions) { + const { name, entityName, createInitialState, reducers, extraReducers } = options; + + return createSlice({ + name, + initialState: createInitialState, + reducers: { + ...reducers, + setIsActionInProgress(state, action) { + state.isActionInProgress = action.payload; + }, + openCreateDialog(state) { + state.createDialog.isOpen = true; + }, + openUpdateDialog(state, action) { + state.updateDialog[entityName] = action.payload; + }, + openDeleteDialog(state, action) { + state.deleteDialog[entityName] = action.payload; + }, + closeCreateDialog(state) { + state.createDialog.isOpen = false; + }, + closeUpdateDialog(state) { + state.updateDialog[entityName] = null; + }, + closeDeleteDialog(state) { + state.deleteDialog[entityName] = null; + }, + cleanupActions() { + return createInitialState(); + }, + }, + extraReducers, + }); +} From 8a1fbb5a6f3a4251bab9f6e96c1be2d62d9d73fd Mon Sep 17 00:00:00 2001 From: Pavel Nesterovkiy Date: Tue, 8 Oct 2024 08:25:43 +0000 Subject: [PATCH 87/98] feature/ADCM-6001 update node, yarn, added lint stage https://tracker.yandex.ru/ADCM-6001 --- Dockerfile | 2 +- adcm-web/app/.husky/pre-commit | 11 +- adcm-web/app/.lintstagedrc.js | 28 + adcm-web/app/README.md | 4 +- adcm-web/app/build.sh | 6 +- adcm-web/app/package.json | 7 +- adcm-web/app/yarn.lock | 2972 +++++++++++++++++-------------- adcm-web/docker/test/Dockerfile | 2 +- 8 files changed, 1679 insertions(+), 1353 deletions(-) create mode 100644 adcm-web/app/.lintstagedrc.js diff --git a/Dockerfile b/Dockerfile index a15a42ff0f..93ff611e59 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,7 +4,7 @@ WORKDIR /code RUN sh -c "make" -FROM node:18.16-alpine AS ui_builder +FROM node:20.9.0-alpine AS ui_builder ARG ADCM_VERSION ENV ADCM_VERSION=$ADCM_VERSION COPY ./adcm-web/app /code diff --git a/adcm-web/app/.husky/pre-commit b/adcm-web/app/.husky/pre-commit index 8e31fd048b..3ab1512f54 100755 --- a/adcm-web/app/.husky/pre-commit +++ b/adcm-web/app/.husky/pre-commit @@ -2,6 +2,11 @@ . "$(dirname "$0")/_/husky.sh" cd adcm-web/app -yarn lint -yarn tsc -echo "lint" + +echo "Start lint check"; + +yarn lint-staged; + +echo "Start TS check"; + +yarn tsc; diff --git a/adcm-web/app/.lintstagedrc.js b/adcm-web/app/.lintstagedrc.js new file mode 100644 index 0000000000..d7638ab84b --- /dev/null +++ b/adcm-web/app/.lintstagedrc.js @@ -0,0 +1,28 @@ +import { ESLint } from 'eslint' + +const eslintCheck = (filenames) => `eslint ${filenames.join(' ')} --ext ts,tsx --report-unused-disable-directives --max-warnings 0`; + +/** + * lint-stage don't understand .eslintignore file + * https://www.curiouslychase.com/posts/eslint-error-file-ignored-because-of-a-matching-ignore-pattern/ + */ +const removeIgnoredFiles = async (files) => { + const eslint = new ESLint() + const isIgnored = await Promise.all( + files.map((file) => { + return eslint.isPathIgnored(file) + }) + ) + return files.filter((_, i) => !isIgnored[i]); +} + +export default { + '*.(js|jsx|ts|tsx)': async (filenames) => { + // Run ESLint on entire repo if more than 10 staged files + if (filenames.length > 10) { + return 'yarn lint' + } + const filesToLint = await removeIgnoredFiles(filenames); + return eslintCheck(filesToLint); + } +} diff --git a/adcm-web/app/README.md b/adcm-web/app/README.md index 0f9ee324a9..d39fdb062a 100644 --- a/adcm-web/app/README.md +++ b/adcm-web/app/README.md @@ -5,8 +5,8 @@ This is documentation for ADCM Frontend project part ## Development ### Requirements -1. Node > v18.16.x (recommend use [NVM](https://github.com/nvm-sh/nvm)) -2. yarn > v3.5.x ([yarn v3 install](https://yarnpkg.com/getting-started/install)) +1. Node > 20.9.x (recommend use [NVM](https://github.com/nvm-sh/nvm)) +2. yarn > v4.5.x ([yarn v4 install](https://yarnpkg.com/getting-started/install)) ### Start local Backend ``` diff --git a/adcm-web/app/build.sh b/adcm-web/app/build.sh index a2484ed21a..25be09bc06 100755 --- a/adcm-web/app/build.sh +++ b/adcm-web/app/build.sh @@ -1,5 +1,9 @@ #!/bin/sh -yarn set version berry +# Set Yarn version +YARN_VERSION=4.5.0 + +corepack enable && corepack prepare yarn@$YARN_VERSION +#yarn set version $YARN_VERSION yarn install yarn build --mode production --outDir /wwwroot diff --git a/adcm-web/app/package.json b/adcm-web/app/package.json index 36d8fa45c6..6c896d7e13 100644 --- a/adcm-web/app/package.json +++ b/adcm-web/app/package.json @@ -8,7 +8,7 @@ "build": "tsc && vite build", "lint": "eslint src --ext ts,tsx --report-unused-disable-directives --max-warnings 0", "tsc": "tsc --noEmit", - "postinstall": "[ -z \"`git --version`\" ] && echo \"git dont install\" || (frontSubDir=$(pwd) && cd $(git rev-parse --show-toplevel) && npx husky install \"$frontSubDir/.husky\")", + "postinstall": "[ -z \"`git --version`\" ] && echo \"git dont install\" || (frontSubDir=$(pwd) && cd $(git rev-parse --show-toplevel) && husky \"$frontSubDir/.husky\")", "preview": "vite preview", "storybook": "storybook dev -p 6006", "build-storybook": "storybook build", @@ -76,11 +76,12 @@ "eslint-plugin-react-refresh": "0.4.5", "eslint-plugin-spellcheck": "0.0.20", "eslint-plugin-storybook": "0.8.0", - "husky": "9.0.11", + "husky": "9.1.6", "identity-obj-proxy": "3.0.0", "jest": "29.7.0", "jest-environment-jsdom": "29.7.0", "json-schema": "0.4.0", + "lint-staged": "15.2.10", "prettier": "3.2.5", "prop-types": "15.8.1", "sass": "1.71.1", @@ -93,5 +94,5 @@ "vite-plugin-svgr": "3.2.0", "vite-tsconfig-paths": "4.2.0" }, - "packageManager": "yarn@4.0.2" + "packageManager": "yarn@4.5.0" } diff --git a/adcm-web/app/yarn.lock b/adcm-web/app/yarn.lock index db1ccd280c..89cada033d 100644 --- a/adcm-web/app/yarn.lock +++ b/adcm-web/app/yarn.lock @@ -3,19 +3,19 @@ __metadata: version: 8 - cacheKey: 10 + cacheKey: 10c0 "@aashutoshrathi/word-wrap@npm:^1.2.3": version: 1.2.6 resolution: "@aashutoshrathi/word-wrap@npm:1.2.6" - checksum: 6eebd12a5cd03cee38fcb915ef9f4ea557df6a06f642dfc7fe8eb4839eb5c9ca55a382f3604d52c14200b0c214c12af5e1f23d2a6d8e23ef2d016b105a9d6c0a + checksum: 10c0/53c2b231a61a46792b39a0d43bc4f4f776bb4542aa57ee04930676802e5501282c2fc8aac14e4cd1f1120ff8b52616b6ff5ab539ad30aa2277d726444b71619f languageName: node linkType: hard "@adobe/css-tools@npm:^4.3.2": version: 4.3.3 resolution: "@adobe/css-tools@npm:4.3.3" - checksum: 0e77057efb4e18182560855503066b75edca98671be327d3f8a7ae89ec3da6821e693114b55225909fca00d7e7ed8422f3d79d71fe95dd4d5df1f2026a9fda02 + checksum: 10c0/e76e712df713964b87cdf2aca1f0477f19bebd845484d5fcba726d3ec7782366e2f26ec8cb2dcfaf47081a5c891987d8a9f5c3f30d11e1eb3c1848adc27fcb24 languageName: node linkType: hard @@ -25,7 +25,7 @@ __metadata: dependencies: "@jridgewell/gen-mapping": "npm:^0.3.0" "@jridgewell/trace-mapping": "npm:^0.3.9" - checksum: e15fecbf3b54c988c8b4fdea8ef514ab482537e8a080b2978cc4b47ccca7140577ca7b65ad3322dcce65bc73ee6e5b90cbfe0bbd8c766dad04d5c62ec9634c42 + checksum: 10c0/92ce5915f8901d8c7cd4f4e6e2fe7b9fd335a29955b400caa52e0e5b12ca3796ada7c2f10e78c9c5b0f9c2539dff0ffea7b19850a56e1487aa083531e1e46d43 languageName: node linkType: hard @@ -36,7 +36,7 @@ __metadata: default-browser-id: "npm:3.0.0" bin: x-default-browser: bin/x-default-browser.js - checksum: f7111a6f00953f32d344a05c9a1bc1f22124dfc2696b2b7906ca856a9f845a282f272f603c997ebbb8a2d6b865664f46fda3bec1c480f040e21b815ff8ed3607 + checksum: 10c0/634c7fad7a5f4df86e3fcd3a11e50034fcb6f6302281569727574cbda7532850063cb34ec328384a686ab0812f297bf301a5e2450bc7b93b5f80a006b1f2dfd7 languageName: node linkType: hard @@ -46,14 +46,14 @@ __metadata: dependencies: "@babel/highlight": "npm:^7.23.4" chalk: "npm:^2.4.2" - checksum: 44e58529c9d93083288dc9e649c553c5ba997475a7b0758cc3ddc4d77b8a7d985dbe78cc39c9bbc61f26d50af6da1ddf0a3427eae8cc222a9370619b671ed8f5 + checksum: 10c0/a10e843595ddd9f97faa99917414813c06214f4d9205294013e20c70fbdf4f943760da37dec1d998bf3e6fc20fa2918a47c0e987a7e458663feb7698063ad7c6 languageName: node linkType: hard "@babel/compat-data@npm:^7.22.6, @babel/compat-data@npm:^7.23.3, @babel/compat-data@npm:^7.23.5": version: 7.23.5 resolution: "@babel/compat-data@npm:7.23.5" - checksum: 088f14f646ecbddd5ef89f120a60a1b3389a50a9705d44603dca77662707d0175a5e0e0da3943c3298f1907a4ab871468656fbbf74bb7842cd8b0686b2c19736 + checksum: 10c0/081278ed46131a890ad566a59c61600a5f9557bd8ee5e535890c8548192532ea92590742fd74bd9db83d74c669ef8a04a7e1c85cdea27f960233e3b83c3a957c languageName: node linkType: hard @@ -76,7 +76,7 @@ __metadata: gensync: "npm:^1.0.0-beta.2" json5: "npm:^2.2.3" semver: "npm:^6.3.1" - checksum: 268cdbb86bef1b8ea5b1300f2f325e56a1740a5051360cb228ffeaa0f80282b6674f3a2b4d6466adb0691183759b88d4c37b4a4f77232c84a49ed771c84cdc27 + checksum: 10c0/03883300bf1252ab4c9ba5b52f161232dd52873dbe5cde9289bb2bb26e935c42682493acbac9194a59a3b6cbd17f4c4c84030db8d6d482588afe64531532ff9b languageName: node linkType: hard @@ -88,7 +88,7 @@ __metadata: "@jridgewell/gen-mapping": "npm:^0.3.2" "@jridgewell/trace-mapping": "npm:^0.3.17" jsesc: "npm:^2.5.1" - checksum: 864090d5122c0aa3074471fd7b79d8a880c1468480cbd28925020a3dcc7eb6e98bedcdb38983df299c12b44b166e30915b8085a7bc126e68fa7e2aadc7bd1ac5 + checksum: 10c0/53540e905cd10db05d9aee0a5304e36927f455ce66f95d1253bb8a179f286b88fa7062ea0db354c566fe27f8bb96567566084ffd259f8feaae1de5eccc8afbda languageName: node linkType: hard @@ -97,7 +97,7 @@ __metadata: resolution: "@babel/helper-annotate-as-pure@npm:7.22.5" dependencies: "@babel/types": "npm:^7.22.5" - checksum: 53da330f1835c46f26b7bf4da31f7a496dee9fd8696cca12366b94ba19d97421ce519a74a837f687749318f94d1a37f8d1abcbf35e8ed22c32d16373b2f6198d + checksum: 10c0/5a80dc364ddda26b334bbbc0f6426cab647381555ef7d0cd32eb284e35b867c012ce6ce7d52a64672ed71383099c99d32765b3d260626527bb0e3470b0f58e45 languageName: node linkType: hard @@ -106,7 +106,7 @@ __metadata: resolution: "@babel/helper-builder-binary-assignment-operator-visitor@npm:7.22.15" dependencies: "@babel/types": "npm:^7.22.15" - checksum: 639c697a1c729f9fafa2dd4c9af2e18568190299b5907bd4c2d0bc818fcbd1e83ffeecc2af24327a7faa7ac4c34edd9d7940510a5e66296c19bad17001cf5c7a + checksum: 10c0/2535e3824ca6337f65786bbac98e562f71699f25532cecd196f027d7698b4967a96953d64e36567956658ad1a05ccbdc62d1ba79ee751c79f4f1d2d3ecc2e01c languageName: node linkType: hard @@ -119,7 +119,7 @@ __metadata: browserslist: "npm:^4.22.2" lru-cache: "npm:^5.1.1" semver: "npm:^6.3.1" - checksum: 05595cd73087ddcd81b82d2f3297aac0c0422858dfdded43d304786cf680ec33e846e2317e6992d2c964ee61d93945cbf1fa8ec80b55aee5bfb159227fb02cb9 + checksum: 10c0/ba38506d11185f48b79abf439462ece271d3eead1673dd8814519c8c903c708523428806f05f2ec5efd0c56e4e278698fac967e5a4b5ee842c32415da54bc6fa languageName: node linkType: hard @@ -138,7 +138,7 @@ __metadata: semver: "npm:^6.3.1" peerDependencies: "@babel/core": ^7.0.0 - checksum: 8b9f02526eeb03ef1d2bc89e3554377ae966b33a74078ab1f88168dfa725dc206ea5ecf4cf417c3651d8a6b3c70204f6939a9aa0401be3d0d32ddbf6024ea3c7 + checksum: 10c0/f30437aa16f3585cc3382ea630f24457ef622c22f5e4eccffbc03f6a81efbef0b6714fb5a78baa64c838884ba7e1427e3280d7b27481b9f587bc8fbbed05dd36 languageName: node linkType: hard @@ -151,7 +151,7 @@ __metadata: semver: "npm:^6.3.1" peerDependencies: "@babel/core": ^7.0.0 - checksum: 886b675e82f1327b4f7a2c69a68eefdb5dbb0b9d4762c2d4f42a694960a9ccf61e1a3bcad601efd92c110033eb1a944fcd1e5cac188aa6b2e2076b541e210e20 + checksum: 10c0/8eba4c1b7b94a83e7a82df5c3e504584ff0ba6ab8710a67ecc2c434a7fb841a29c2f5c94d2de51f25446119a1df538fa90b37bd570db22ddd5e7147fe98277c6 languageName: node linkType: hard @@ -166,14 +166,14 @@ __metadata: resolve: "npm:^1.14.2" peerDependencies: "@babel/core": ^7.4.0 || ^8.0.0-0 <8.0.0 - checksum: f849e816ec4b182a3e8fa8e09ff016f88bb95259cd6b2190b815c48f83c3d3b68e973a8ec72acc5086bfe93705cbd46ec089c06476421d858597780e42235a03 + checksum: 10c0/2b053b96a0c604a7e0f5c7d13a8a55f4451d938f7af42bd40f62a87df15e6c87a0b1dbd893a0f0bb51077b54dc3ba00a58b166531a5940ad286ab685dd8979ec languageName: node linkType: hard "@babel/helper-environment-visitor@npm:^7.22.20": version: 7.22.20 resolution: "@babel/helper-environment-visitor@npm:7.22.20" - checksum: d80ee98ff66f41e233f36ca1921774c37e88a803b2f7dca3db7c057a5fea0473804db9fb6729e5dbfd07f4bed722d60f7852035c2c739382e84c335661590b69 + checksum: 10c0/e762c2d8f5d423af89bd7ae9abe35bd4836d2eb401af868a63bbb63220c513c783e25ef001019418560b3fdc6d9a6fb67e6c0b650bcdeb3a2ac44b5c3d2bdd94 languageName: node linkType: hard @@ -183,7 +183,7 @@ __metadata: dependencies: "@babel/template": "npm:^7.22.15" "@babel/types": "npm:^7.23.0" - checksum: 7b2ae024cd7a09f19817daf99e0153b3bf2bc4ab344e197e8d13623d5e36117ed0b110914bc248faa64e8ccd3e97971ec7b41cc6fd6163a2b980220c58dcdf6d + checksum: 10c0/d771dd1f3222b120518176733c52b7cadac1c256ff49b1889dbbe5e3fed81db855b8cc4e40d949c9d3eae0e795e8229c1c8c24c0e83f27cfa6ee3766696c6428 languageName: node linkType: hard @@ -192,7 +192,7 @@ __metadata: resolution: "@babel/helper-hoist-variables@npm:7.22.5" dependencies: "@babel/types": "npm:^7.22.5" - checksum: 394ca191b4ac908a76e7c50ab52102669efe3a1c277033e49467913c7ed6f7c64d7eacbeabf3bed39ea1f41731e22993f763b1edce0f74ff8563fd1f380d92cc + checksum: 10c0/60a3077f756a1cd9f14eb89f0037f487d81ede2b7cfe652ea6869cd4ec4c782b0fb1de01b8494b9a2d2050e3d154d7d5ad3be24806790acfb8cbe2073bf1e208 languageName: node linkType: hard @@ -201,7 +201,7 @@ __metadata: resolution: "@babel/helper-member-expression-to-functions@npm:7.23.0" dependencies: "@babel/types": "npm:^7.23.0" - checksum: 325feb6e200478c8cd6e10433fabe993a7d3315cc1a2a457e45514a5f95a73dff4c69bea04cc2daea0ffe72d8ed85d504b3f00b2e0767b7d4f5ae25fec9b35b2 + checksum: 10c0/b810daddf093ffd0802f1429052349ed9ea08ef7d0c56da34ffbcdecbdafac86f95bdea2fe30e0e0e629febc7dd41b56cb5eacc10d1a44336d37b755dac31fa4 languageName: node linkType: hard @@ -210,7 +210,7 @@ __metadata: resolution: "@babel/helper-module-imports@npm:7.22.15" dependencies: "@babel/types": "npm:^7.22.15" - checksum: 5ecf9345a73b80c28677cfbe674b9f567bb0d079e37dcba9055e36cb337db24ae71992a58e1affa9d14a60d3c69907d30fe1f80aea105184501750a58d15c81c + checksum: 10c0/4e0d7fc36d02c1b8c8b3006dfbfeedf7a367d3334a04934255de5128115ea0bafdeb3e5736a2559917f0653e4e437400d54542da0468e08d3cbc86d3bbfa8f30 languageName: node linkType: hard @@ -225,7 +225,7 @@ __metadata: "@babel/helper-validator-identifier": "npm:^7.22.20" peerDependencies: "@babel/core": ^7.0.0 - checksum: 583fa580f8e50e6f45c4f46aa76a8e49c2528deb84e25f634d66461b9a0e2420e13979b0a607b67aef67eaf8db8668eb9edc038b4514b16e3879fe09e8fd294b + checksum: 10c0/211e1399d0c4993671e8e5c2b25383f08bee40004ace5404ed4065f0e9258cc85d99c1b82fd456c030ce5cfd4d8f310355b54ef35de9924eabfc3dff1331d946 languageName: node linkType: hard @@ -234,14 +234,14 @@ __metadata: resolution: "@babel/helper-optimise-call-expression@npm:7.22.5" dependencies: "@babel/types": "npm:^7.22.5" - checksum: c70ef6cc6b6ed32eeeec4482127e8be5451d0e5282d5495d5d569d39eb04d7f1d66ec99b327f45d1d5842a9ad8c22d48567e93fc502003a47de78d122e355f7c + checksum: 10c0/31b41a764fc3c585196cf5b776b70cf4705c132e4ce9723f39871f215f2ddbfb2e28a62f9917610f67c8216c1080482b9b05f65dd195dae2a52cef461f2ac7b8 languageName: node linkType: hard "@babel/helper-plugin-utils@npm:^7.0.0, @babel/helper-plugin-utils@npm:^7.10.4, @babel/helper-plugin-utils@npm:^7.12.13, @babel/helper-plugin-utils@npm:^7.14.5, @babel/helper-plugin-utils@npm:^7.18.6, @babel/helper-plugin-utils@npm:^7.22.5, @babel/helper-plugin-utils@npm:^7.8.0, @babel/helper-plugin-utils@npm:^7.8.3": version: 7.22.5 resolution: "@babel/helper-plugin-utils@npm:7.22.5" - checksum: ab220db218089a2aadd0582f5833fd17fa300245999f5f8784b10f5a75267c4e808592284a29438a0da365e702f05acb369f99e1c915c02f9f9210ec60eab8ea + checksum: 10c0/d2c4bfe2fa91058bcdee4f4e57a3f4933aed7af843acfd169cd6179fab8d13c1d636474ecabb2af107dc77462c7e893199aa26632bac1c6d7e025a17cbb9d20d languageName: node linkType: hard @@ -254,7 +254,7 @@ __metadata: "@babel/helper-wrap-function": "npm:^7.22.20" peerDependencies: "@babel/core": ^7.0.0 - checksum: 2fe6300a6f1b58211dffa0aed1b45d4958506d096543663dba83bd9251fe8d670fa909143a65b45e72acb49e7e20fbdb73eae315d9ddaced467948c3329986e7 + checksum: 10c0/aa93aa74250b636d477e8d863fbe59d4071f8c2654841b7ac608909e480c1cf3ff7d7af5a4038568829ad09d810bb681668cbe497d9c89ba5c352793dc9edf1e languageName: node linkType: hard @@ -267,7 +267,7 @@ __metadata: "@babel/helper-optimise-call-expression": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0 - checksum: 617666f57b0f94a2f430ee66b67c8f6fa94d4c22400f622947580d8f3638ea34b71280af59599ed4afbb54ae6e2bdd4f9083fe0e341184a4bb0bd26ef58d3017 + checksum: 10c0/6b0858811ad46873817c90c805015d63300e003c5a85c147a17d9845fa2558a02047c3cc1f07767af59014b2dd0fa75b503e5bc36e917f360e9b67bb6f1e79f4 languageName: node linkType: hard @@ -276,7 +276,7 @@ __metadata: resolution: "@babel/helper-simple-access@npm:7.22.5" dependencies: "@babel/types": "npm:^7.22.5" - checksum: 7d5430eecf880937c27d1aed14245003bd1c7383ae07d652b3932f450f60bfcf8f2c1270c593ab063add185108d26198c69d1aca0e6fb7c6fdada4bcf72ab5b7 + checksum: 10c0/f0cf81a30ba3d09a625fd50e5a9069e575c5b6719234e04ee74247057f8104beca89ed03e9217b6e9b0493434cedc18c5ecca4cea6244990836f1f893e140369 languageName: node linkType: hard @@ -285,7 +285,7 @@ __metadata: resolution: "@babel/helper-skip-transparent-expression-wrappers@npm:7.22.5" dependencies: "@babel/types": "npm:^7.22.5" - checksum: 1012ef2295eb12dc073f2b9edf3425661e9b8432a3387e62a8bc27c42963f1f216ab3124228015c748770b2257b4f1fda882ca8fa34c0bf485e929ae5bc45244 + checksum: 10c0/ab7fa2aa709ab49bb8cd86515a1e715a3108c4bb9a616965ba76b43dc346dee66d1004ccf4d222b596b6224e43e04cbc5c3a34459501b388451f8c589fbc3691 languageName: node linkType: hard @@ -294,28 +294,28 @@ __metadata: resolution: "@babel/helper-split-export-declaration@npm:7.22.6" dependencies: "@babel/types": "npm:^7.22.5" - checksum: e141cace583b19d9195f9c2b8e17a3ae913b7ee9b8120246d0f9ca349ca6f03cb2c001fd5ec57488c544347c0bb584afec66c936511e447fd20a360e591ac921 + checksum: 10c0/d83e4b623eaa9622c267d3c83583b72f3aac567dc393dda18e559d79187961cb29ae9c57b2664137fc3d19508370b12ec6a81d28af73a50e0846819cb21c6e44 languageName: node linkType: hard "@babel/helper-string-parser@npm:^7.23.4": version: 7.23.4 resolution: "@babel/helper-string-parser@npm:7.23.4" - checksum: c352082474a2ee1d2b812bd116a56b2e8b38065df9678a32a535f151ec6f58e54633cc778778374f10544b930703cca6ddf998803888a636afa27e2658068a9c + checksum: 10c0/f348d5637ad70b6b54b026d6544bd9040f78d24e7ec245a0fc42293968181f6ae9879c22d89744730d246ce8ec53588f716f102addd4df8bbc79b73ea10004ac languageName: node linkType: hard "@babel/helper-validator-identifier@npm:^7.22.20": version: 7.22.20 resolution: "@babel/helper-validator-identifier@npm:7.22.20" - checksum: df882d2675101df2d507b95b195ca2f86a3ef28cb711c84f37e79ca23178e13b9f0d8b522774211f51e40168bf5142be4c1c9776a150cddb61a0d5bf3e95750b + checksum: 10c0/dcad63db345fb110e032de46c3688384b0008a42a4845180ce7cd62b1a9c0507a1bed727c4d1060ed1a03ae57b4d918570259f81724aaac1a5b776056f37504e languageName: node linkType: hard "@babel/helper-validator-option@npm:^7.22.15, @babel/helper-validator-option@npm:^7.23.5": version: 7.23.5 resolution: "@babel/helper-validator-option@npm:7.23.5" - checksum: 537cde2330a8aede223552510e8a13e9c1c8798afee3757995a7d4acae564124fe2bf7e7c3d90d62d3657434a74340a274b3b3b1c6f17e9a2be1f48af29cb09e + checksum: 10c0/af45d5c0defb292ba6fd38979e8f13d7da63f9623d8ab9ededc394f67eb45857d2601278d151ae9affb6e03d5d608485806cd45af08b4468a0515cf506510e94 languageName: node linkType: hard @@ -326,7 +326,7 @@ __metadata: "@babel/helper-function-name": "npm:^7.22.5" "@babel/template": "npm:^7.22.15" "@babel/types": "npm:^7.22.19" - checksum: b22e4666dec3d401bdf8ebd01d448bb3733617dae5aa6fbd1b684a22a35653cca832edd876529fd139577713b44fb89b4f5e52b7315ab218620f78b8a8ae23de + checksum: 10c0/97b5f42ff4d305318ff2f99a5f59d3e97feff478333b2d893c4f85456d3c66372070f71d7bf9141f598c8cf2741c49a15918193633c427a88d170d98eb8c46eb languageName: node linkType: hard @@ -337,7 +337,7 @@ __metadata: "@babel/template": "npm:^7.23.9" "@babel/traverse": "npm:^7.23.9" "@babel/types": "npm:^7.23.9" - checksum: dd56daac8bbd7ed174bb00fd185926fd449e591d9a00edaceb7ac6edbdd7a8db57e2cb365b4fafda382201752789ced2f7ae010f667eab0f198a4571cda4d2c5 + checksum: 10c0/f69fd0aca96a6fb8bd6dd044cd8a5c0f1851072d4ce23355345b9493c4032e76d1217f86b70df795e127553cf7f3fcd1587ede9d1b03b95e8b62681ca2165b87 languageName: node linkType: hard @@ -348,7 +348,7 @@ __metadata: "@babel/helper-validator-identifier": "npm:^7.22.20" chalk: "npm:^2.4.2" js-tokens: "npm:^4.0.0" - checksum: 62fef9b5bcea7131df4626d009029b1ae85332042f4648a4ce6e740c3fd23112603c740c45575caec62f260c96b11054d3be5987f4981a5479793579c3aac71f + checksum: 10c0/fbff9fcb2f5539289c3c097d130e852afd10d89a3a08ac0b5ebebbc055cc84a4bcc3dcfed463d488cde12dd0902ef1858279e31d7349b2e8cee43913744bda33 languageName: node linkType: hard @@ -357,7 +357,7 @@ __metadata: resolution: "@babel/parser@npm:7.23.9" bin: parser: ./bin/babel-parser.js - checksum: 727a7a807100f6a26df859e2f009c4ddbd0d3363287b45daa50bd082ccd0d431d0c4d0e610a91f806e04a1918726cd0f5a0592c9b902a815337feed12e1cafd9 + checksum: 10c0/7df97386431366d4810538db4b9ec538f4377096f720c0591c7587a16f6810e62747e9fbbfa1ff99257fd4330035e4fb1b5b77c7bd3b97ce0d2e3780a6618975 languageName: node linkType: hard @@ -368,7 +368,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0 - checksum: ddbaf2c396b7780f15e80ee01d6dd790db076985f3dfeb6527d1a8d4cacf370e49250396a3aa005b2c40233cac214a106232f83703d5e8491848bde273938232 + checksum: 10c0/356a4e9fc52d7ca761ce6857fc58e2295c2785d22565760e6a5680be86c6e5883ab86e0ba25ef572882c01713d3a31ae6cfa3e3222cdb95e6026671dab1fa415 languageName: node linkType: hard @@ -381,7 +381,7 @@ __metadata: "@babel/plugin-transform-optional-chaining": "npm:^7.23.3" peerDependencies: "@babel/core": ^7.13.0 - checksum: 434b9d710ae856fa1a456678cc304fbc93915af86d581ee316e077af746a709a741ea39d7e1d4f5b98861b629cc7e87f002d3138f5e836775632466d4c74aef2 + checksum: 10c0/a8785f099d55ca71ed89815e0f3a636a80c16031f80934cfec17c928d096ee0798964733320c8b145ef36ba429c5e19d5107b06231e0ab6777cfb0f01adfdc23 languageName: node linkType: hard @@ -393,7 +393,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0 - checksum: 3b0c9554cd0048e6e7341d7b92f29d400dbc6a5a4fc2f86dbed881d32e02ece9b55bc520387bae2eac22a5ab38a0b205c29b52b181294d99b4dd75e27309b548 + checksum: 10c0/355746e21ad7f43e4f4daef54cfe2ef461ecd19446b2afedd53c39df1bf9aa2eeeeaabee2279b1321de89a97c9360e4f76e9ba950fee50ff1676c25f6929d625 languageName: node linkType: hard @@ -402,7 +402,7 @@ __metadata: resolution: "@babel/plugin-proposal-private-property-in-object@npm:7.21.0-placeholder-for-preset-env.2" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: fab70f399aa869275690ec6c7cedb4ef361d4e8b6f55c3d7b04bfee61d52fb93c87cec2c65d73cddbaca89fb8ef5ec0921fce675c9169d9d51f18305ab34e78a + checksum: 10c0/e605e0070da087f6c35579499e65801179a521b6842c15181a1e305c04fded2393f11c1efd09b087be7f8b083d1b75e8f3efcbc1292b4f60d3369e14812cff63 languageName: node linkType: hard @@ -413,7 +413,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.8.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 7ed1c1d9b9e5b64ef028ea5e755c0be2d4e5e4e3d6cf7df757b9a8c4cfa4193d268176d0f1f7fbecdda6fe722885c7fda681f480f3741d8a2d26854736f05367 + checksum: 10c0/d13efb282838481348c71073b6be6245b35d4f2f964a8f71e4174f235009f929ef7613df25f8d2338e2d3e44bc4265a9f8638c6aaa136d7a61fe95985f9725c8 languageName: node linkType: hard @@ -424,7 +424,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.8.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 3a10849d83e47aec50f367a9e56a6b22d662ddce643334b087f9828f4c3dd73bdc5909aaeabe123fed78515767f9ca43498a0e621c438d1cd2802d7fae3c9648 + checksum: 10c0/686891b81af2bc74c39013655da368a480f17dd237bf9fbc32048e5865cb706d5a8f65438030da535b332b1d6b22feba336da8fa931f663b6b34e13147d12dde languageName: node linkType: hard @@ -435,7 +435,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.12.13" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 24f34b196d6342f28d4bad303612d7ff566ab0a013ce89e775d98d6f832969462e7235f3e7eaf17678a533d4be0ba45d3ae34ab4e5a9dcbda5d98d49e5efa2fc + checksum: 10c0/95168fa186416195280b1264fb18afcdcdcea780b3515537b766cb90de6ce042d42dd6a204a39002f794ae5845b02afb0fd4861a3308a861204a55e68310a120 languageName: node linkType: hard @@ -446,7 +446,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.14.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 3e80814b5b6d4fe17826093918680a351c2d34398a914ce6e55d8083d72a9bdde4fbaf6a2dcea0e23a03de26dc2917ae3efd603d27099e2b98380345703bf948 + checksum: 10c0/4464bf9115f4a2d02ce1454411baf9cfb665af1da53709c5c56953e5e2913745b0fcce82982a00463d6facbdd93445c691024e310b91431a1e2f024b158f6371 languageName: node linkType: hard @@ -457,7 +457,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.8.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: ce307af83cf433d4ec42932329fad25fa73138ab39c7436882ea28742e1c0066626d224e0ad2988724c82644e41601cef607b36194f695cb78a1fcdc959637bd + checksum: 10c0/9c50927bf71adf63f60c75370e2335879402648f468d0172bc912e303c6a3876927d8eb35807331b57f415392732ed05ab9b42c68ac30a936813ab549e0246c5 languageName: node linkType: hard @@ -468,7 +468,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.8.3" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 85740478be5b0de185228e7814451d74ab8ce0a26fcca7613955262a26e99e8e15e9da58f60c754b84515d4c679b590dbd3f2148f0f58025f4ae706f1c5a5d4a + checksum: 10c0/5100d658ba563829700cd8d001ddc09f4c0187b1a13de300d729c5b3e87503f75a6d6c99c1794182f7f1a9f546ee009df4f15a0ce36376e206ed0012fa7cdc24 languageName: node linkType: hard @@ -479,7 +479,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: c6e6f355d6ace5f4a9e7bb19f1fed2398aeb9b62c4c671a189d81b124f9f5bb77c4225b6e85e19339268c60a021c1e49104e450375de5e6bb70612190d9678af + checksum: 10c0/8a5e1e8b6a3728a2c8fe6d70c09a43642e737d9c0485e1b041cd3a6021ef05376ec3c9137be3b118c622ba09b5770d26fdc525473f8d06d4ab9e46de2783dd0a languageName: node linkType: hard @@ -490,7 +490,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 883e6b35b2da205138caab832d54505271a3fee3fc1e8dc0894502434fc2b5d517cbe93bbfbfef8068a0fb6ec48ebc9eef3f605200a489065ba43d8cddc1c9a7 + checksum: 10c0/7db8b59f75667bada2293353bb66b9d5651a673b22c72f47da9f5c46e719142481601b745f9822212fd7522f92e26e8576af37116f85dae1b5e5967f80d0faab languageName: node linkType: hard @@ -501,7 +501,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 9aed7661ffb920ca75df9f494757466ca92744e43072e0848d87fa4aa61a3f2ee5a22198ac1959856c036434b5614a8f46f1fb70298835dbe28220cdd1d4c11e + checksum: 10c0/99b40d33d79205a8e04bb5dea56fd72906ffc317513b20ca7319e7683e18fce8ea2eea5e9171056f92b979dc0ab1e31b2cb5171177a5ba61e05b54fe7850a606 languageName: node linkType: hard @@ -512,7 +512,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.10.4" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 166ac1125d10b9c0c430e4156249a13858c0366d38844883d75d27389621ebe651115cb2ceb6dc011534d5055719fa1727b59f39e1ab3ca97820eef3dcab5b9b + checksum: 10c0/0b08b5e4c3128523d8e346f8cfc86824f0da2697b1be12d71af50a31aff7a56ceb873ed28779121051475010c28d6146a6bfea8518b150b71eeb4e46190172ee languageName: node linkType: hard @@ -523,7 +523,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.8.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: bf5aea1f3188c9a507e16efe030efb996853ca3cadd6512c51db7233cc58f3ac89ff8c6bdfb01d30843b161cfe7d321e1bf28da82f7ab8d7e6bc5464666f354a + checksum: 10c0/e98f31b2ec406c57757d115aac81d0336e8434101c224edd9a5c93cefa53faf63eacc69f3138960c8b25401315af03df37f68d316c151c4b933136716ed6906e languageName: node linkType: hard @@ -534,7 +534,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 89037694314a74e7f0e7a9c8d3793af5bf6b23d80950c29b360db1c66859d67f60711ea437e70ad6b5b4b29affe17eababda841b6c01107c2b638e0493bafb4e + checksum: 10c0/563bb7599b868773f1c7c1d441ecc9bc53aeb7832775da36752c926fc402a1fa5421505b39e724f71eb217c13e4b93117e081cac39723b0e11dac4c897f33c3e languageName: node linkType: hard @@ -545,7 +545,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.10.4" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: aff33577037e34e515911255cdbb1fd39efee33658aa00b8a5fd3a4b903585112d037cce1cc9e4632f0487dc554486106b79ccd5ea63a2e00df4363f6d4ff886 + checksum: 10c0/2594cfbe29411ad5bc2ad4058de7b2f6a8c5b86eda525a993959438615479e59c012c14aec979e538d60a584a1a799b60d1b8942c3b18468cb9d99b8fd34cd0b languageName: node linkType: hard @@ -556,7 +556,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.8.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 87aca4918916020d1fedba54c0e232de408df2644a425d153be368313fdde40d96088feed6c4e5ab72aac89be5d07fef2ddf329a15109c5eb65df006bf2580d1 + checksum: 10c0/2024fbb1162899094cfc81152449b12bd0cc7053c6d4bda8ac2852545c87d0a851b1b72ed9560673cbf3ef6248257262c3c04aabf73117215c1b9cc7dd2542ce languageName: node linkType: hard @@ -567,7 +567,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.10.4" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 01ec5547bd0497f76cc903ff4d6b02abc8c05f301c88d2622b6d834e33a5651aa7c7a3d80d8d57656a4588f7276eba357f6b7e006482f5b564b7a6488de493a1 + checksum: 10c0/c55a82b3113480942c6aa2fcbe976ff9caa74b7b1109ff4369641dfbc88d1da348aceb3c31b6ed311c84d1e7c479440b961906c735d0ab494f688bf2fd5b9bb9 languageName: node linkType: hard @@ -578,7 +578,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.8.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: fddcf581a57f77e80eb6b981b10658421bc321ba5f0a5b754118c6a92a5448f12a0c336f77b8abf734841e102e5126d69110a306eadb03ca3e1547cab31f5cbf + checksum: 10c0/ee1eab52ea6437e3101a0a7018b0da698545230015fc8ab129d292980ec6dff94d265e9e90070e8ae5fed42f08f1622c14c94552c77bcac784b37f503a82ff26 languageName: node linkType: hard @@ -589,7 +589,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.8.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 910d90e72bc90ea1ce698e89c1027fed8845212d5ab588e35ef91f13b93143845f94e2539d831dc8d8ededc14ec02f04f7bd6a8179edd43a326c784e7ed7f0b9 + checksum: 10c0/27e2493ab67a8ea6d693af1287f7e9acec206d1213ff107a928e85e173741e1d594196f99fec50e9dde404b09164f39dec5864c767212154ffe1caa6af0bc5af languageName: node linkType: hard @@ -600,7 +600,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.8.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: eef94d53a1453361553c1f98b68d17782861a04a392840341bc91780838dd4e695209c783631cf0de14c635758beafb6a3a65399846ffa4386bff90639347f30 + checksum: 10c0/46edddf2faa6ebf94147b8e8540dfc60a5ab718e2de4d01b2c0bdf250a4d642c2bd47cbcbb739febcb2bf75514dbcefad3c52208787994b8d0f8822490f55e81 languageName: node linkType: hard @@ -611,7 +611,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.14.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: b317174783e6e96029b743ccff2a67d63d38756876e7e5d0ba53a322e38d9ca452c13354a57de1ad476b4c066dbae699e0ca157441da611117a47af88985ecda + checksum: 10c0/69822772561706c87f0a65bc92d0772cea74d6bc0911537904a676d5ff496a6d3ac4e05a166d8125fce4a16605bace141afc3611074e170a994e66e5397787f3 languageName: node linkType: hard @@ -622,7 +622,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.14.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: bbd1a56b095be7820029b209677b194db9b1d26691fe999856462e66b25b281f031f3dfd91b1619e9dcf95bebe336211833b854d0fb8780d618e35667c2d0d7e + checksum: 10c0/14bf6e65d5bc1231ffa9def5f0ef30b19b51c218fcecaa78cd1bdf7939dfdf23f90336080b7f5196916368e399934ce5d581492d8292b46a2fb569d8b2da106f languageName: node linkType: hard @@ -633,7 +633,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: abfad3a19290d258b028e285a1f34c9b8a0cbe46ef79eafed4ed7ffce11b5d0720b5e536c82f91cbd8442cde35a3dd8e861fa70366d87ff06fdc0d4756e30876 + checksum: 10c0/4d6e9cdb9d0bfb9bd9b220fc951d937fce2ca69135ec121153572cebe81d86abc9a489208d6b69ee5f10cadcaeffa10d0425340a5029e40e14a6025021b90948 languageName: node linkType: hard @@ -645,7 +645,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.18.6" peerDependencies: "@babel/core": ^7.0.0 - checksum: a651d700fe63ff0ddfd7186f4ebc24447ca734f114433139e3c027bc94a900d013cf1ef2e2db8430425ba542e39ae160c3b05f06b59fd4656273a3df97679e9c + checksum: 10c0/9144e5b02a211a4fb9a0ce91063f94fbe1004e80bde3485a0910c9f14897cf83fabd8c21267907cff25db8e224858178df0517f14333cfcf3380ad9a4139cb50 languageName: node linkType: hard @@ -656,7 +656,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 1e99118176e5366c2636064d09477016ab5272b2a92e78b8edb571d20bc3eaa881789a905b20042942c3c2d04efc530726cf703f937226db5ebc495f5d067e66 + checksum: 10c0/b128315c058f5728d29b0b78723659b11de88247ea4d0388f0b935cddf60a80c40b9067acf45cbbe055bd796928faef152a09d9e4a0695465aca4394d9f109ca languageName: node linkType: hard @@ -670,7 +670,7 @@ __metadata: "@babel/plugin-syntax-async-generators": "npm:^7.8.4" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: d402494087a6b803803eb5ab46b837aab100a04c4c5148e38bfa943ea1bbfc1ecfb340f1ced68972564312d3580f550c125f452372e77607a558fbbaf98c31c0 + checksum: 10c0/4ff75f9ce500e1de8c0236fa5122e6475a477d19cb9a4c2ae8651e78e717ebb2e2cecfeca69d420def779deaec78b945843b9ffd15f02ecd7de5072030b4469b languageName: node linkType: hard @@ -683,7 +683,7 @@ __metadata: "@babel/helper-remap-async-to-generator": "npm:^7.22.20" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 2e9d9795d4b3b3d8090332104e37061c677f29a1ce65bcbda4099a32d243e5d9520270a44bbabf0fb1fb40d463bd937685b1a1042e646979086c546d55319c3c + checksum: 10c0/da3ffd413eef02a8e2cfee3e0bb0d5fc0fcb795c187bc14a5a8e8874cdbdc43bbf00089c587412d7752d97efc5967c3c18ff5398e3017b9a14a06126f017e7e9 languageName: node linkType: hard @@ -694,7 +694,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: e63b16d94ee5f4d917e669da3db5ea53d1e7e79141a2ec873c1e644678cdafe98daa556d0d359963c827863d6b3665d23d4938a94a4c5053a1619c4ebd01d020 + checksum: 10c0/82c12a11277528184a979163de7189ceb00129f60dd930b0d5313454310bf71205f302fb2bf0430247161c8a22aaa9fb9eec1459f9f7468206422c191978fd59 languageName: node linkType: hard @@ -705,7 +705,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: bbb965a3acdfb03559806d149efbd194ac9c983b260581a60efcb15eb9fbe20e3054667970800146d867446db1c1398f8e4ee87f4454233e49b8f8ce947bd99b + checksum: 10c0/83006804dddf980ab1bcd6d67bc381e24b58c776507c34f990468f820d0da71dba3697355ca4856532fa2eeb2a1e3e73c780f03760b5507a511cbedb0308e276 languageName: node linkType: hard @@ -717,7 +717,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 9c6f8366f667897541d360246de176dd29efc7a13d80a5b48361882f7173d9173be4646c3b7d9b003ccc0e01e25df122330308f33db921fa553aa17ad544b3fc + checksum: 10c0/bca30d576f539eef216494b56d610f1a64aa9375de4134bc021d9660f1fa735b1d7cc413029f22abc0b7cb737e3a57935c8ae9d8bd1730921ccb1deebce51bfd languageName: node linkType: hard @@ -730,7 +730,7 @@ __metadata: "@babel/plugin-syntax-class-static-block": "npm:^7.14.5" peerDependencies: "@babel/core": ^7.12.0 - checksum: c8bfaba19a674fc2eb54edad71e958647360474e3163e8226f1acd63e4e2dbec32a171a0af596c1dc5359aee402cc120fea7abd1fb0e0354b6527f0fc9e8aa1e + checksum: 10c0/fdca96640ef29d8641a7f8de106f65f18871b38cc01c0f7b696d2b49c76b77816b30a812c08e759d06dd10b4d9b3af6b5e4ac22a2017a88c4077972224b77ab0 languageName: node linkType: hard @@ -748,7 +748,7 @@ __metadata: globals: "npm:^11.1.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 4bb4b19e7a39871c4414fb44fc5f2cc47c78f993b74c43238dfb99c9dac2d15cb99b43f8a3d42747580e1807d2b8f5e13ce7e95e593fd839bd176aa090bf9a23 + checksum: 10c0/227ac5166501e04d9e7fbd5eda6869b084ffa4af6830ac12544ac6ea14953ca00eb1762b0df9349c0f6c8d2a799385910f558066cd0fb85b9ca437b1131a6043 languageName: node linkType: hard @@ -760,7 +760,7 @@ __metadata: "@babel/template": "npm:^7.22.15" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: e75593e02c5ea473c17839e3c9d597ce3697bf039b66afe9a4d06d086a87fb3d95850b4174476897afc351dc1b46a9ec3165ee6e8fbad3732c0d65f676f855ad + checksum: 10c0/3ca8a006f8e652b58c21ecb84df1d01a73f0a96b1d216fd09a890b235dd90cb966b152b603b88f7e850ae238644b1636ce5c30b7c029c0934b43383932372e4a languageName: node linkType: hard @@ -771,7 +771,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 5abd93718af5a61f8f6a97d2ccac9139499752dd5b2c533d7556fb02947ae01b2f51d4c4f5e64df569e8783d3743270018eb1fa979c43edec7dd1377acf107ed + checksum: 10c0/717e9a62c1b0c93c507f87b4eaf839ec08d3c3147f14d74ae240d8749488d9762a8b3950132be620a069bde70f4b3e4ee9867b226c973fcc40f3cdec975cde71 languageName: node linkType: hard @@ -783,7 +783,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: a2dbbf7f1ea16a97948c37df925cb364337668c41a3948b8d91453f140507bd8a3429030c7ce66d09c299987b27746c19a2dd18b6f17dcb474854b14fd9159a3 + checksum: 10c0/6c89286d1277c2a63802a453c797c87c1203f89e4c25115f7b6620f5fce15d8c8d37af613222f6aa497aa98773577a6ec8752e79e13d59bc5429270677ea010b languageName: node linkType: hard @@ -794,7 +794,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: c2a21c34dc0839590cd945192cbc46fde541a27e140c48fe1808315934664cdbf18db64889e23c4eeb6bad9d3e049482efdca91d29de5734ffc887c4fbabaa16 + checksum: 10c0/7e2640e4e6adccd5e7b0615b6e9239d7c98363e21c52086ea13759dfa11cf7159b255fc5331c2de435639ea8eb6acefae115ae0d797a3d19d12587652f8052a5 languageName: node linkType: hard @@ -806,7 +806,7 @@ __metadata: "@babel/plugin-syntax-dynamic-import": "npm:^7.8.3" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 57a722604c430d9f3dacff22001a5f31250e34785d4969527a2ae9160fa86858d0892c5b9ff7a06a04076f8c76c9e6862e0541aadca9c057849961343aab0845 + checksum: 10c0/19ae4a4a2ca86d35224734c41c48b2aa6a13139f3cfa1cbd18c0e65e461de8b65687dec7e52b7a72bb49db04465394c776aa1b13a2af5dc975b2a0cde3dcab67 languageName: node linkType: hard @@ -818,7 +818,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 00d05ab14ad0f299160fcf9d8f55a1cc1b740e012ab0b5ce30207d2365f091665115557af7d989cd6260d075a252d9e4283de5f2b247dfbbe0e42ae586e6bf66 + checksum: 10c0/5c33ee6a1bdc52fcdf0807f445b27e3fbdce33008531885e65a699762327565fffbcfde8395be7f21bcb22d582e425eddae45650c986462bb84ba68f43687516 languageName: node linkType: hard @@ -830,7 +830,7 @@ __metadata: "@babel/plugin-syntax-export-namespace-from": "npm:^7.8.3" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 9f770a81bfd03b48d6ba155d452946fd56d6ffe5b7d871e9ec2a0b15e0f424273b632f3ed61838b90015b25bbda988896b7a46c7d964fbf8f6feb5820b309f93 + checksum: 10c0/38bf04f851e36240bbe83ace4169da626524f4107bfb91f05b4ad93a5fb6a36d5b3d30b8883c1ba575ccfc1bac7938e90ca2e3cb227f7b3f4a9424beec6fd4a7 languageName: node linkType: hard @@ -842,7 +842,7 @@ __metadata: "@babel/plugin-syntax-flow": "npm:^7.23.3" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 84af4b1f6d79f1a66a2440c5cfe3ba0e2bb9355402da477add13de1867088efb8d7b2be15d67ac955f1d2a745d4a561423bbb473fe6e4622b157989598ec323f + checksum: 10c0/9ab627f9668fc1f95564b26bffd6706f86205960d9ccc168236752fbef65dbe10aa0ce74faae12f48bb3b72ec7f38ef2a78b4874c222c1e85754e981639f3b33 languageName: node linkType: hard @@ -854,7 +854,7 @@ __metadata: "@babel/helper-skip-transparent-expression-wrappers": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: b84ef1f26a2db316237ae6d10fa7c22c70ac808ed0b8e095a8ecf9101551636cbb026bee9fb95a0a7944f3b8278ff9636a9088cb4a4ac5b84830a13829242735 + checksum: 10c0/46681b6ab10f3ca2d961f50d4096b62ab5d551e1adad84e64be1ee23e72eb2f26a1e30e617e853c74f1349fffe4af68d33921a128543b6f24b6d46c09a3e2aec languageName: node linkType: hard @@ -867,7 +867,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 355c6dbe07c919575ad42b2f7e020f320866d72f8b79181a16f8e0cd424a2c761d979f03f47d583d9471b55dcd68a8a9d829b58e1eebcd572145b934b48975a6 + checksum: 10c0/89cb9747802118048115cf92a8f310752f02030549b26f008904990cbdc86c3d4a68e07ca3b5c46de8a46ed4df2cb576ac222c74c56de67253d2a3ddc2956083 languageName: node linkType: hard @@ -879,7 +879,7 @@ __metadata: "@babel/plugin-syntax-json-strings": "npm:^7.8.3" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: f9019820233cf8955d8ba346df709a0683c120fe86a24ed1c9f003f2db51197b979efc88f010d558a12e1491210fc195a43cd1c7fee5e23b92da38f793a875de + checksum: 10c0/39e82223992a9ad857722ae051291935403852ad24b0dd64c645ca1c10517b6bf9822377d88643fed8b3e61a4e3f7e5ae41cf90eb07c40a786505d47d5970e54 languageName: node linkType: hard @@ -890,7 +890,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 519a544cd58586b9001c4c9b18da25a62f17d23c48600ff7a685d75ca9eb18d2c5e8f5476f067f0a8f1fea2a31107eff950b9864833061e6076dcc4bdc3e71ed + checksum: 10c0/8292106b106201464c2bfdd5c014fe6a9ca1c0256eb0a8031deb20081e21906fe68b156186f77d993c23eeab6d8d6f5f66e8895eec7ed97ce6de5dbcafbcd7f4 languageName: node linkType: hard @@ -902,7 +902,7 @@ __metadata: "@babel/plugin-syntax-logical-assignment-operators": "npm:^7.10.4" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 2ae1dc9b4ff3bf61a990ff3accdecb2afe3a0ca649b3e74c010078d1cdf29ea490f50ac0a905306a2bcf9ac177889a39ac79bdcc3a0fdf220b3b75fac18d39b5 + checksum: 10c0/87b034dd13143904e405887e6125d76c27902563486efc66b7d9a9d8f9406b76c6ac42d7b37224014af5783d7edb465db0cdecd659fa3227baad0b3a6a35deff languageName: node linkType: hard @@ -913,7 +913,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 95cec13c36d447c5aa6b8e4c778b897eeba66dcb675edef01e0d2afcec9e8cb9726baf4f81b4bbae7a782595aed72e6a0d44ffb773272c3ca180fada99bf92db + checksum: 10c0/687f24f3ec60b627fef6e87b9e2770df77f76727b9d5f54fa4c84a495bb24eb4a20f1a6240fa22d339d45aac5eaeb1b39882e941bfd00cf498f9c53478d1ec88 languageName: node linkType: hard @@ -925,7 +925,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 48c87dee2c7dae8ed40d16901f32c9e58be4ef87bf2c3985b51dd2e78e82081f3bad0a39ee5cf6e8909e13e954e2b4bedef0a8141922f281ed833ddb59ed9be2 + checksum: 10c0/9f7ec036f7cfc588833a4dd117a44813b64aa4c1fd5bfb6c78f60198c1d290938213090c93a46f97a68a2490fad909e21a82b2472e95da74d108c125df21c8d5 languageName: node linkType: hard @@ -938,7 +938,7 @@ __metadata: "@babel/helper-simple-access": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: a3bc082d0dfe8327a29263a6d721cea608d440bc8141ba3ec6ba80ad73d84e4f9bbe903c27e9291c29878feec9b5dee2bd0563822f93dc951f5d7fc36bdfe85b + checksum: 10c0/5c8840c5c9ecba39367ae17c973ed13dbc43234147b77ae780eec65010e2a9993c5d717721b23e8179f7cf49decdd325c509b241d69cfbf92aa647a1d8d5a37d languageName: node linkType: hard @@ -952,7 +952,7 @@ __metadata: "@babel/helper-validator-identifier": "npm:^7.22.20" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 4bb800e5a9d0d668d7421ae3672fccff7d5f2a36621fd87414d7ece6d6f4d93627f9644cfecacae934bc65ffc131c8374242aaa400cca874dcab9b281a21aff0 + checksum: 10c0/1926631fe9d87c0c53427a3420ad49da62d53320d0016b6afab64e5417a672aa5bdff3ea1d24746ffa1e43319c28a80f5d8cef0ad214760d399c293b5850500f languageName: node linkType: hard @@ -964,7 +964,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: e3f3af83562d687899555c7826b3faf0ab93ee7976898995b1d20cbe7f4451c55e05b0e17bfb3e549937cbe7573daf5400b752912a241b0a8a64d2457c7626e5 + checksum: 10c0/f0d2f890a15b4367d0d8f160bed7062bdb145c728c24e9bfbc1211c7925aae5df72a88df3832c92dd2011927edfed4da1b1249e4c78402e893509316c0c2caa6 languageName: node linkType: hard @@ -976,7 +976,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0 - checksum: 3ee564ddee620c035b928fdc942c5d17e9c4b98329b76f9cefac65c111135d925eb94ed324064cd7556d4f5123beec79abea1d4b97d1c8a2a5c748887a2eb623 + checksum: 10c0/b0b072bef303670b5a98307bc37d1ac326cb7ad40ea162b89a03c2ffc465451be7ef05be95cb81ed28bfeb29670dc98fe911f793a67bceab18b4cb4c81ef48f3 languageName: node linkType: hard @@ -987,7 +987,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: e5053389316fce73ad5201b7777437164f333e24787fbcda4ae489cd2580dbbbdfb5694a7237bad91fabb46b591d771975d69beb1c740b82cb4761625379f00b + checksum: 10c0/f489b9e1f17b42b2ba6312d58351e757cb23a8409f64f2bb6af4c09d015359588a5d68943b20756f141d0931a94431c782f3ed1225228a930a04b07be0c31b04 languageName: node linkType: hard @@ -999,7 +999,7 @@ __metadata: "@babel/plugin-syntax-nullish-coalescing-operator": "npm:^7.8.3" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: a27d73ea134d3d9560a6b2e26ab60012fba15f1db95865aa0153c18f5ec82cfef6a7b3d8df74e3c2fca81534fa5efeb6cacaf7b08bdb7d123e3dafdd079886a3 + checksum: 10c0/bce490d22da5c87ff27fffaff6ad5a4d4979b8d7b72e30857f191e9c1e1824ba73bb8d7081166289369e388f94f0ce5383a593b1fc84d09464a062c75f824b0b languageName: node linkType: hard @@ -1011,7 +1011,7 @@ __metadata: "@babel/plugin-syntax-numeric-separator": "npm:^7.10.4" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 6ba0e5db3c620a3ec81f9e94507c821f483c15f196868df13fa454cbac719a5449baf73840f5b6eb7d77311b24a2cf8e45db53700d41727f693d46f7caf3eec3 + checksum: 10c0/e34902da4f5588dc4812c92cb1f6a5e3e3647baf7b4623e30942f551bf1297621abec4e322ebfa50b320c987c0f34d9eb4355b3d289961d9035e2126e3119c12 languageName: node linkType: hard @@ -1026,7 +1026,7 @@ __metadata: "@babel/plugin-transform-parameters": "npm:^7.23.3" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 656f09c4ec629856e807d5b386559166ae417ff75943abce19656b2c6de5101dfd0aaf23f9074e854339370b4e09f57518d3202457046ee5b567ded531005479 + checksum: 10c0/b56017992ffe7fcd1dd9a9da67c39995a141820316266bcf7d77dc912980d228ccbd3f36191d234f5cc389b09157b5d2a955e33e8fb368319534affd1c72b262 languageName: node linkType: hard @@ -1038,7 +1038,7 @@ __metadata: "@babel/helper-replace-supers": "npm:^7.22.20" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: e495497186f621fa79026e183b4f1fbb172fd9df812cbd2d7f02c05b08adbe58012b1a6eb6dd58d11a30343f6ec80d0f4074f9b501d70aa1c94df76d59164c53 + checksum: 10c0/a6856fd8c0afbe5b3318c344d4d201d009f4051e2f6ff6237ff2660593e93c5997a58772b13d639077c3e29ced3440247b29c496cd77b13af1e7559a70009775 languageName: node linkType: hard @@ -1050,7 +1050,7 @@ __metadata: "@babel/plugin-syntax-optional-catch-binding": "npm:^7.8.3" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: d50b5ee142cdb088d8b5de1ccf7cea85b18b85d85b52f86618f6e45226372f01ad4cdb29abd4fd35ea99a71fefb37009e0107db7a787dcc21d4d402f97470faf + checksum: 10c0/4ef61812af0e4928485e28301226ce61139a8b8cea9e9a919215ebec4891b9fea2eb7a83dc3090e2679b7d7b2c8653da601fbc297d2addc54a908b315173991e languageName: node linkType: hard @@ -1063,7 +1063,7 @@ __metadata: "@babel/plugin-syntax-optional-chaining": "npm:^7.8.3" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 0ef24e889d6151428953fc443af5f71f4dae73f373dc1b7f5dd3f6a61d511296eb77e9b870e8c2c02a933e3455ae24c1fa91738c826b72a4ff87e0337db527e8 + checksum: 10c0/305b773c29ad61255b0e83ec1e92b2f7af6aa58be4cba1e3852bddaa14f7d2afd7b4438f41c28b179d6faac7eb8d4fb5530a17920294f25d459b8f84406bfbfb languageName: node linkType: hard @@ -1074,7 +1074,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: a8c36c3fc25f9daa46c4f6db47ea809c395dc4abc7f01c4b1391f6e5b0cd62b83b6016728b02a6a8ac21aca56207c9ec66daefc0336e9340976978de7e6e28df + checksum: 10c0/a8d4cbe0f6ba68d158f5b4215c63004fc37a1fdc539036eb388a9792017c8496ea970a1932ccb929308f61e53dc56676ed01d8df6f42bc0a85c7fd5ba82482b7 languageName: node linkType: hard @@ -1086,7 +1086,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: cedc1285c49b5a6d9a3d0e5e413b756ac40b3ac2f8f68bdfc3ae268bc8d27b00abd8bb0861c72756ff5dd8bf1eb77211b7feb5baf4fdae2ebbaabe49b9adc1d0 + checksum: 10c0/745a655edcd111b7f91882b921671ca0613079760d8c9befe336b8a9bc4ce6bb49c0c08941831c950afb1b225b4b2d3eaac8842e732db095b04db38efd8c34f4 languageName: node linkType: hard @@ -1100,7 +1100,7 @@ __metadata: "@babel/plugin-syntax-private-property-in-object": "npm:^7.14.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 02eef2ee98fa86ee5052ed9bf0742d6d22b510b5df2fcce0b0f5615d6001f7786c6b31505e7f1c2f446406d8fb33603a5316d957cfa5b8365cbf78ddcc24fa42 + checksum: 10c0/8d31b28f24204b4d13514cd3a8f3033abf575b1a6039759ddd6e1d82dd33ba7281f9bc85c9f38072a665d69bfa26dc40737eefaf9d397b024654a483d2357bf5 languageName: node linkType: hard @@ -1111,7 +1111,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 16b048c8e87f25095f6d53634ab7912992f78e6997a6ff549edc3cf519db4fca01c7b4e0798530d7f6a05228ceee479251245cdd850a5531c6e6f404104d6cc9 + checksum: 10c0/b2549f23f90cf276c2e3058c2225c3711c2ad1c417e336d3391199445a9776dd791b83be47b2b9a7ae374b40652d74b822387e31fa5267a37bf49c122e1a9747 languageName: node linkType: hard @@ -1122,7 +1122,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 7f86964e8434d3ddbd3c81d2690c9b66dbf1cd8bd9512e2e24500e9fa8cf378bc52c0853270b3b82143aba5965aec04721df7abdb768f952b44f5c6e0b198779 + checksum: 10c0/3aed142af7bd1aed1df2bdad91ed33ba1cdd5c3c67ce6eafba821ff72f129162a197ffb55f1eb1775af276abd5545934489a8257fef6c6665ddf253a4f39a939 languageName: node linkType: hard @@ -1133,7 +1133,7 @@ __metadata: "@babel/plugin-transform-react-jsx": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 36bc3ff0b96bb0ef4723070a50cfdf2e72cfd903a59eba448f9fe92fea47574d6f22efd99364413719e1f3fb3c51b6c9b2990b87af088f8486a84b2a5f9e4560 + checksum: 10c0/4d2e9e68383238feb873f6111df972df4a2ebf6256d6f787a8772241867efa975b3980f7d75ab7d750e7eaad4bd454e8cc6e106301fd7572dd389e553f5f69d2 languageName: node linkType: hard @@ -1144,7 +1144,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 882bf56bc932d015c2d83214133939ddcf342e5bcafa21f1a93b19f2e052145115e1e0351730897fd66e5f67cad7875b8a8d81ceb12b6e2a886ad0102cb4eb1f + checksum: 10c0/6b586508fc58998483d4ee93a7e784c4f4d2350e2633739cf1990b7ad172e13906f72382fdaf7f07b4e3c7e7555342634d392bdeb1a079bb64762c6368ca9a32 languageName: node linkType: hard @@ -1155,7 +1155,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 92287fb797e522d99bdc77eaa573ce79ff0ad9f1cf4e7df374645e28e51dce0adad129f6f075430b129b5bac8dad843f65021970e12e992d6d6671f0d65bb1e0 + checksum: 10c0/a3aad7cf738e9bfaddc26cdbb83bb9684c2e689d26fb0793d772af0c8da0cd25bb02523d192fbc6946c32143e56b472c1d33fa82466b3f2d3346e1ce8fe83cf6 languageName: node linkType: hard @@ -1170,7 +1170,7 @@ __metadata: "@babel/types": "npm:^7.23.4" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: d83806701349addfb77b8347b4f0dc8e76fb1c9ac21bdef69f4002394fce2396d61facfc6e1a3de54cbabcdadf991a1f642e69edb5116ac14f95e33d9f7c221d + checksum: 10c0/8851b3adc515cd91bdb06ff3a23a0f81f0069cfef79dfb3fa744da4b7a82e3555ccb6324c4fa71ecf22508db13b9ff6a0ed96675f95fc87903b9fc6afb699580 languageName: node linkType: hard @@ -1182,7 +1182,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 9ea3698b1d422561d93c0187ac1ed8f2367e4250b10e259785ead5aa643c265830fd0f4cf5087a5bedbc4007444c06da2f2006686613220acf0949895f453666 + checksum: 10c0/76287adeab656fb7f39243e5ab6a8c60069cf69fffeebd1566457d56cb2f966366a23bd755d3e369f4d0437459e3b76243df370caa7d7d2287a8560b66c53ca2 languageName: node linkType: hard @@ -1194,7 +1194,7 @@ __metadata: regenerator-transform: "npm:^0.15.2" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 7fdacc7b40008883871b519c9e5cdea493f75495118ccc56ac104b874983569a24edd024f0f5894ba1875c54ee2b442f295d6241c3280e61c725d0dd3317c8e6 + checksum: 10c0/3b0e989ae5db78894ee300b24e07fbcec490c39ab48629c519377581cf94e90308f4ddc10a8914edc9f403e2d3ac7a7ae0ae09003629d852da03e2ba846299c6 languageName: node linkType: hard @@ -1205,7 +1205,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 298c4440ddc136784ff920127cea137168e068404e635dc946ddb5d7b2a27b66f1dd4c4acb01f7184478ff7d5c3e7177a127279479926519042948fb7fa0fa48 + checksum: 10c0/4e6d61f6c9757592661cfbd2c39c4f61551557b98cb5f0995ef10f5540f67e18dde8a42b09716d58943b6e4b7ef5c9bcf19902839e7328a4d49149e0fecdbfcd languageName: node linkType: hard @@ -1216,7 +1216,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 5d677a03676f9fff969b0246c423d64d77502e90a832665dc872a5a5e05e5708161ce1effd56bb3c0f2c20a1112fca874be57c8a759d8b08152755519281f326 + checksum: 10c0/c423c66fec0b6503f50561741754c84366ef9e9818442c8881fbaa90cc363fd137084b9431cdc00ed2f1fd8c8a1a5982c4a7e1f2af3769db4caf2ac7ea55d4f0 languageName: node linkType: hard @@ -1228,7 +1228,7 @@ __metadata: "@babel/helper-skip-transparent-expression-wrappers": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: c6372d2f788fd71d85aba12fbe08ee509e053ed27457e6674a4f9cae41ff885e2eb88aafea8fadd0ccf990601fc69ec596fa00959e05af68a15461a8d97a548d + checksum: 10c0/a348e4ae47e4ceeceb760506ec7bf835ccc18a2cf70ec74ebfbe41bc172fa2412b05b7d1b86836f8aee375e41a04ff20486074778d0e2d19d668b33dc52e9dbb languageName: node linkType: hard @@ -1239,7 +1239,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 53e55eb2575b7abfdb4af7e503a2bf7ef5faf8bf6b92d2cd2de0700bdd19e934e5517b23e6dfed94ba50ae516b62f3f916773ef7d9bc81f01503f585051e2949 + checksum: 10c0/cd15c407906b41e4b924ea151e455c11274dba050771ee7154ad88a1a274140ac5e84efc8d08c4379f2f0cec8a09e4a0a3b2a3a954ba6a67d9fb35df1c714c56 languageName: node linkType: hard @@ -1250,7 +1250,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: b16c5cb0b8796be0118e9c144d15bdc0d20a7f3f59009c6303a6e9a8b74c146eceb3f05186f5b97afcba7cfa87e34c1585a22186e3d5b22f2fd3d27d959d92b2 + checksum: 10c0/9b5f43788b9ffcb8f2b445a16b1aa40fcf23cb0446a4649445f098ec6b4cb751f243a535da623d59fefe48f4c40552f5621187a61811779076bab26863e3373d languageName: node linkType: hard @@ -1261,7 +1261,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 0af7184379d43afac7614fc89b1bdecce4e174d52f4efaeee8ec1a4f2c764356c6dba3525c0685231f1cbf435b6dd4ee9e738d7417f3b10ce8bbe869c32f4384 + checksum: 10c0/50e81d84c6059878be2a0e41e0d790cab10882cfb8fa85e8c2665ccb0b3cd7233f49197f17427bc7c1b36c80e07076640ecf1b641888d78b9cb91bc16478d84a languageName: node linkType: hard @@ -1275,7 +1275,7 @@ __metadata: "@babel/plugin-syntax-typescript": "npm:^7.23.3" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: a816811129f3fcb0af1aeb52b84285be390ed8a0eedab17d31fa8e6847c4ca39b4b176d44831f20a8561b3f586974053570ad7bdfa51f89566276e6b191786d2 + checksum: 10c0/e08f7a981fe157e32031070b92cd77030018b002d063e4be3711ffb7ec04539478b240d8967a4748abb56eccc0ba376f094f30711ef6a028b2a89d15d6ddc01f languageName: node linkType: hard @@ -1286,7 +1286,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 561c429183a54b9e4751519a3dfba6014431e9cdc1484fad03bdaf96582dfc72c76a4f8661df2aeeae7c34efd0fa4d02d3b83a2f63763ecf71ecc925f9cc1f60 + checksum: 10c0/f1ed54742dc982666f471df5d087cfda9c6dbf7842bec2d0f7893ed359b142a38c0210358f297ab5c7a3e11ec0dfb0e523de2e2edf48b62f257aaadd5f068866 languageName: node linkType: hard @@ -1298,7 +1298,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 2298461a194758086d17c23c26c7de37aa533af910f9ebf31ebd0893d4aa317468043d23f73edc782ec21151d3c46cf0ff8098a83b725c49a59de28a1d4d6225 + checksum: 10c0/dca5702d43fac70351623a12e4dfa454fd028a67498888522b644fd1a02534fabd440106897e886ebcc6ce6a39c58094ca29953b6f51bc67372aa8845a5ae49f languageName: node linkType: hard @@ -1310,7 +1310,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: c5f835d17483ba899787f92e313dfa5b0055e3deab332f1d254078a2bba27ede47574b6599fcf34d3763f0c048ae0779dc21d2d8db09295edb4057478dc80a9a + checksum: 10c0/df824dcca2f6e731f61d69103e87d5dd974d8a04e46e28684a4ba935ae633d876bded09b8db890fd72d0caf7b9638e2672b753671783613cc78d472951e2df8c languageName: node linkType: hard @@ -1322,7 +1322,7 @@ __metadata: "@babel/helper-plugin-utils": "npm:^7.22.5" peerDependencies: "@babel/core": ^7.0.0 - checksum: 79d0b4c951955ca68235c87b91ab2b393c96285f8aeaa34d6db416d2ddac90000c9bd6e8c4d82b60a2b484da69930507245035f28ba63c6cae341cf3ba68fdef + checksum: 10c0/30fe1d29af8395a867d40a63a250ca89072033d9bc7d4587eeebeaf4ad7f776aab83064321bfdb1d09d7e29a1d392852361f4f60a353f0f4d1a3b435dcbf256b languageName: node linkType: hard @@ -1412,7 +1412,7 @@ __metadata: semver: "npm:^6.3.1" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 0214ac9434a2496eac7f56c0c91164421232ff2083a66e1ccab633ca91e262828e54a5cbdb9036e8fe53d53530b6597aa98c99de8ff07b5193ffd95f21dc9d2c + checksum: 10c0/2837a42089180e51bfd6864b6d197e01fc0abec1920422e71c0513c2fc8fb5f3bfe694ed778cc4e45856c546964945bc53bf8105e4b26f3580ce3685fa50cc0f languageName: node linkType: hard @@ -1425,7 +1425,7 @@ __metadata: "@babel/plugin-transform-flow-strip-types": "npm:^7.23.3" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 60b5dde79621ae89943af459c4dc5b6030795f595a20ca438c8100f8d82c9ebc986881719030521ff5925799518ac5aa7f3fe62af8c33ab96be3681a71f88d03 + checksum: 10c0/1cf109925791f2af679f03289848d27596b4f27cb0ad4ee74a8dd4c1cbecc119bdef3b45cbbe12489bc9bdf61163f94c1c0bf6013cc58c325f1cc99edc01bda9 languageName: node linkType: hard @@ -1438,7 +1438,7 @@ __metadata: esutils: "npm:^2.0.2" peerDependencies: "@babel/core": ^7.0.0-0 || ^8.0.0-0 <8.0.0 - checksum: 039aba98a697b920d6440c622aaa6104bb6076d65356b29dad4b3e6627ec0354da44f9621bafbeefd052cd4ac4d7f88c9a2ab094efcb50963cb352781d0c6428 + checksum: 10c0/9d02f70d7052446c5f3a4fb39e6b632695fb6801e46d31d7f7c5001f7c18d31d1ea8369212331ca7ad4e7877b73231f470b0d559162624128f1b80fe591409e6 languageName: node linkType: hard @@ -1454,7 +1454,7 @@ __metadata: "@babel/plugin-transform-react-pure-annotations": "npm:^7.23.3" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: ef6aef131b2f36e2883e9da0d832903643cb3c9ad4f32e04fb3eecae59e4221d583139e8d8f973e25c28d15aafa6b3e60fe9f25c5fd09abd3e2df03b8637bdd2 + checksum: 10c0/cecb2493e09fd4ffa5effcef1d06e968386b1bfe077a99834f7e8ef249208274fca62fe5a6b3986ef1c1c3900b2eb409adb528ae1b73dba31397b16f9262e83c languageName: node linkType: hard @@ -1469,7 +1469,7 @@ __metadata: "@babel/plugin-transform-typescript": "npm:^7.23.3" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: c4add0f3fcbb3f4a305c48db9ccb32694f1308ed9971ccbc1a8a3c76d5a13726addb3c667958092287d7aa080186c5c83dbfefa55eacf94657e6cde39e172848 + checksum: 10c0/e72b654c7f0f08b35d7e1c0e3a59c0c13037f295c425760b8b148aa7dde01e6ddd982efc525710f997a1494fafdd55cb525738c016609e7e4d703d02014152b7 languageName: node linkType: hard @@ -1484,14 +1484,14 @@ __metadata: source-map-support: "npm:^0.5.16" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: c72a6d4856ef04f13490370d805854d2d98a77786bfaec7d85e2c585e1217011c4f3df18197a890e14520906c9111bef95551ba1a9b59c88df4dfc2dfe2c8d1b + checksum: 10c0/b2466e41a4394e725b57e139ba45c3f61b88546d3cb443e84ce46cb34071b60c6cdb706a14c58a1443db530691a54f51da1f0c97f6c1aecbb838a2fb7eb5dbb9 languageName: node linkType: hard "@babel/regjsgen@npm:^0.8.0": version: 0.8.0 resolution: "@babel/regjsgen@npm:0.8.0" - checksum: c57fb730b17332b7572574b74364a77d70faa302a281a62819476fa3b09822974fd75af77aea603ad77378395be64e81f89f0e800bf86cbbf21652d49ce12ee8 + checksum: 10c0/4f3ddd8c7c96d447e05c8304c1d5ba3a83fcabd8a716bc1091c2f31595cdd43a3a055fff7cb5d3042b8cb7d402d78820fcb4e05d896c605a7d8bcf30f2424c4a languageName: node linkType: hard @@ -1500,7 +1500,7 @@ __metadata: resolution: "@babel/runtime@npm:7.23.9" dependencies: regenerator-runtime: "npm:^0.14.0" - checksum: 9a520fe1bf72249f7dd60ff726434251858de15cccfca7aa831bd19d0d3fb17702e116ead82724659b8da3844977e5e13de2bae01eb8a798f2823a669f122be6 + checksum: 10c0/e71205fdd7082b2656512cc98e647d9ea7e222e4fe5c36e9e5adc026446fcc3ba7b3cdff8b0b694a0b78bb85db83e7b1e3d4c56ef90726682b74f13249cf952d languageName: node linkType: hard @@ -1511,7 +1511,7 @@ __metadata: "@babel/code-frame": "npm:^7.23.5" "@babel/parser": "npm:^7.23.9" "@babel/types": "npm:^7.23.9" - checksum: 1b011ba9354dc2e646561d54b6862e0df51760e6179faadd79be05825b0b6da04911e4e192df943f1766748da3037fd8493615b38707f7cadb0cf0c96601c170 + checksum: 10c0/0e8b60119433787742bc08ae762bbd8d6755611c4cabbcb7627b292ec901a55af65d93d1c88572326069efb64136ef151ec91ffb74b2df7689bbab237030833a languageName: node linkType: hard @@ -1529,7 +1529,7 @@ __metadata: "@babel/types": "npm:^7.23.9" debug: "npm:^4.3.1" globals: "npm:^11.1.0" - checksum: e2bb845f7f229feb7c338f7e150f5f1abc5395dcd3a6a47f63a25242ec3ec6b165f04a6df7d4849468547faee34eb3cf52487eb0bd867a7d3c42fec2a648266f + checksum: 10c0/d1615d1d02f04d47111a7ea4446a1a6275668ca39082f31d51f08380de9502e19862be434eaa34b022ce9a17dbb8f9e2b73a746c654d9575f3a680a7ffdf5630 languageName: node linkType: hard @@ -1540,42 +1540,42 @@ __metadata: "@babel/helper-string-parser": "npm:^7.23.4" "@babel/helper-validator-identifier": "npm:^7.22.20" to-fast-properties: "npm:^2.0.0" - checksum: bed9634e5fd0f9dc63c84cfa83316c4cb617192db9fedfea464fca743affe93736d7bf2ebf418ee8358751a9d388e303af87a0c050cb5d87d5870c1b0154f6cb + checksum: 10c0/edc7bb180ce7e4d2aea10c6972fb10474341ac39ba8fdc4a27ffb328368dfdfbf40fca18e441bbe7c483774500d5c05e222cec276c242e952853dcaf4eb884f7 languageName: node linkType: hard "@base2/pretty-print-object@npm:1.0.1": version: 1.0.1 resolution: "@base2/pretty-print-object@npm:1.0.1" - checksum: c1b78a521ac712baa076589f3bc81318d07c34a5747e9177b6af37043592252587d98f9b7b59ec174968c6bea31a99fe4d7884121173a449b75fe602b7eb2839 + checksum: 10c0/98f77ea185a30c854897feb2a68fe51be8451a1a0b531bac61a5dd67033926a0ba0c9be6e0f819b8cb72ca349b3e7648bf81c12fd21df0b45219c75a3a75784b languageName: node linkType: hard "@bcoe/v8-coverage@npm:^0.2.3": version: 0.2.3 resolution: "@bcoe/v8-coverage@npm:0.2.3" - checksum: 1a1f0e356a3bb30b5f1ced6f79c413e6ebacf130421f15fac5fcd8be5ddf98aedb4404d7f5624e3285b700e041f9ef938321f3ca4d359d5b716f96afa120d88d + checksum: 10c0/6b80ae4cb3db53f486da2dc63b6e190a74c8c3cca16bb2733f234a0b6a9382b09b146488ae08e2b22cf00f6c83e20f3e040a2f7894f05c045c946d6a090b1d52 languageName: node linkType: hard "@braintree/browser-detection@npm:^2.0.0": version: 2.0.0 resolution: "@braintree/browser-detection@npm:2.0.0" - checksum: b509669a044550cfabbe2b107b4c8066c977f5cd5942a36e4dffd090a50df609029337367207735c9609bc87794ec829f5caf418cd31aa6849160014c352d15a + checksum: 10c0/ce9ac99d46547e8fb3a48352897cfa21d0788f34f76e58ade3201b8f8bcc9a1dc9403ff3b213f82f6f9a629e27cc629f7d09114642ea664e2f4add5e82b675d7 languageName: node linkType: hard "@colors/colors@npm:1.5.0": version: 1.5.0 resolution: "@colors/colors@npm:1.5.0" - checksum: 9d226461c1e91e95f067be2bdc5e6f99cfe55a721f45afb44122e23e4b8602eeac4ff7325af6b5a369f36396ee1514d3809af3f57769066d80d83790d8e53339 + checksum: 10c0/eb42729851adca56d19a08e48d5a1e95efd2a32c55ae0323de8119052be0510d4b7a1611f2abcbf28c044a6c11e6b7d38f99fccdad7429300c37a8ea5fb95b44 languageName: node linkType: hard "@colors/colors@npm:1.6.0, @colors/colors@npm:^1.6.0": version: 1.6.0 resolution: "@colors/colors@npm:1.6.0" - checksum: 66d00284a3a9a21e5e853b256942e17edbb295f4bd7b9aa7ef06bbb603568d5173eb41b0f64c1e51748bc29d382a23a67d99956e57e7431c64e47e74324182d9 + checksum: 10c0/9328a0778a5b0db243af54455b79a69e3fb21122d6c15ef9e9fcc94881d8d17352d8b2b2590f9bdd46fac5c2d6c1636dcfc14358a20c70e22daf89e1a759b629 languageName: node linkType: hard @@ -1586,14 +1586,14 @@ __metadata: colorspace: "npm:1.1.x" enabled: "npm:2.0.x" kuler: "npm:^2.0.0" - checksum: 14e449a7f42f063f959b472f6ce02d16457a756e852a1910aaa831b63fc21d86f6c32b2a1aa98a4835b856548c926643b51062d241fb6e9b2b7117996053e6b9 + checksum: 10c0/a5133df8492802465ed01f2f0a5784585241a1030c362d54a602ed1839816d6c93d71dde05cf2ddb4fd0796238c19774406bd62fa2564b637907b495f52425fe languageName: node linkType: hard "@discoveryjs/json-ext@npm:^0.5.3": version: 0.5.7 resolution: "@discoveryjs/json-ext@npm:0.5.7" - checksum: b95682a852448e8ef50d6f8e3b7ba288aab3fd98a2bafbe46881a3db0c6e7248a2debe9e1ee0d4137c521e4743ca5bbcb1c0765c9d7b3e0ef53231506fec42b4 + checksum: 10c0/e10f1b02b78e4812646ddf289b7d9f2cb567d336c363b266bd50cd223cf3de7c2c74018d91cd2613041568397ef3a4a2b500aba588c6e5bd78c38374ba68f38c languageName: node linkType: hard @@ -1602,7 +1602,7 @@ __metadata: resolution: "@emotion/use-insertion-effect-with-fallbacks@npm:1.0.1" peerDependencies: react: ">=16.8.0" - checksum: 7d7ead9ba3f615510f550aea67815281ec5a5487de55aafc250f820317afc1fd419bd9e9e27602a0206ec5c152f13dc6130bccad312c1036706c584c65d66ef7 + checksum: 10c0/a15b2167940e3a908160687b73fc4fcd81e59ab45136b6967f02c7c419d9a149acd22a416b325c389642d4f1c3d33cf4196cad6b618128b55b7c74f6807a240b languageName: node linkType: hard @@ -1767,14 +1767,14 @@ __metadata: eslint-visitor-keys: "npm:^3.3.0" peerDependencies: eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 - checksum: 8d70bcdcd8cd279049183aca747d6c2ed7092a5cf0cf5916faac1ef37ffa74f0c245c2a3a3d3b9979d9dfdd4ca59257b4c5621db699d637b847a2c5e02f491c2 + checksum: 10c0/7e559c4ce59cd3a06b1b5a517b593912e680a7f981ae7affab0d01d709e99cd5647019be8fafa38c350305bc32f1f7d42c7073edde2ab536c745e365f37b607e languageName: node linkType: hard "@eslint-community/regexpp@npm:^4.5.1, @eslint-community/regexpp@npm:^4.6.1": version: 4.10.0 resolution: "@eslint-community/regexpp@npm:4.10.0" - checksum: 8c36169c815fc5d726078e8c71a5b592957ee60d08c6470f9ce0187c8046af1a00afbda0a065cc40ff18d5d83f82aed9793c6818f7304a74a7488dc9f3ecbd42 + checksum: 10c0/c5f60ef1f1ea7649fa7af0e80a5a79f64b55a8a8fa5086de4727eb4c86c652aedee407a9c143b8995d2c0b2d75c1222bec9ba5d73dbfc1f314550554f0979ef4 languageName: node linkType: hard @@ -1791,21 +1791,21 @@ __metadata: js-yaml: "npm:^4.1.0" minimatch: "npm:^3.1.2" strip-json-comments: "npm:^3.1.1" - checksum: 7a3b14f4b40fc1a22624c3f84d9f467a3d9ea1ca6e9a372116cb92507e485260359465b58e25bcb6c9981b155416b98c9973ad9b796053fd7b3f776a6946bce8 + checksum: 10c0/32f67052b81768ae876c84569ffd562491ec5a5091b0c1e1ca1e0f3c24fb42f804952fdd0a137873bc64303ba368a71ba079a6f691cee25beee9722d94cc8573 languageName: node linkType: hard "@eslint/js@npm:8.57.0": version: 8.57.0 resolution: "@eslint/js@npm:8.57.0" - checksum: 3c501ce8a997cf6cbbaf4ed358af5492875e3550c19b9621413b82caa9ae5382c584b0efa79835639e6e0ddaa568caf3499318e5bdab68643ef4199dce5eb0a0 + checksum: 10c0/9a518bb8625ba3350613903a6d8c622352ab0c6557a59fe6ff6178bf882bf57123f9d92aa826ee8ac3ee74b9c6203fe630e9ee00efb03d753962dcf65ee4bd94 languageName: node linkType: hard "@fal-works/esbuild-plugin-global-externals@npm:^2.1.2": version: 2.1.2 resolution: "@fal-works/esbuild-plugin-global-externals@npm:2.1.2" - checksum: fd68714cccfbd33a8ec31d11ac7c6373100a5e1b8e31941a45c723c802feccb0a00dde946f55cc91d58bff77d405adc2064b22f0faf5ee165968965e5da758a1 + checksum: 10c0/2c84a8e6121b00ac8e4eb2469ab8f188142db2f1927391758e5d0142cb684b7eb0fad0c9d6caf358616eb2a77af2c067e08b9ec8e05749b415fc4dd0ef96d0fe languageName: node linkType: hard @@ -1814,7 +1814,7 @@ __metadata: resolution: "@floating-ui/core@npm:1.6.0" dependencies: "@floating-ui/utils": "npm:^0.2.1" - checksum: d6a47cacde193cd8ccb4c268b91ccc4ca254dffaec6242b07fd9bcde526044cc976d27933a7917f9a671de0a0e27f8d358f46400677dbd0c8199de293e9746e1 + checksum: 10c0/667a68036f7dd5ed19442c7792a6002ca02d1799221c4396691bbe0b6008b48f6ccad581225e81fa266bb91232f6c66838a5f825f554217e1ec886178b93381b languageName: node linkType: hard @@ -1824,7 +1824,7 @@ __metadata: dependencies: "@floating-ui/core": "npm:^1.0.0" "@floating-ui/utils": "npm:^0.2.0" - checksum: 83e97076c7a5f55c3506f574bc53f03d38bed6eb8181920c8733076889371e287e9ae6f28c520a076967759b9b6ff425362832a5cdf16a999069530dbb9cce53 + checksum: 10c0/d6cac10877918ce5a8d1a24b21738d2eb130a0191043d7c0dd43bccac507844d3b4dc5d4107d3891d82f6007945ca8fb4207a1252506e91c37e211f0f73cf77e languageName: node linkType: hard @@ -1836,7 +1836,7 @@ __metadata: peerDependencies: react: ">=16.8.0" react-dom: ">=16.8.0" - checksum: e57b2a498aecf8de0ec28adf434257fca7893bd9bd7e78b63ac98c63b29b9fc086fc175630154352f3610f5c4a0d329823837f4f6c235cc0459fde6417065590 + checksum: 10c0/4d87451e2dcc54b4753a0d81181036e47821cfd0d4c23f7e9c31590c7c91fb15fb0a5a458969a5ddabd61601eca5875ebd4e40bff37cee31f373b8f1ccc64518 languageName: node linkType: hard @@ -1850,14 +1850,14 @@ __metadata: peerDependencies: react: ">=16.8.0" react-dom: ">=16.8.0" - checksum: 997f5a471ac6080c5162ad86fbb8e5bc0eca9335c40d8445597a90ba645e5d35ee796c29bdc66d868182afe5901804ecd52b82560332ae123b0c269400421e63 + checksum: 10c0/769a6bc33c4fa6c8c38b2e1c91622854e5e8fdf39cb92b0998a7ca099dc831a551a005cd0aec7e98edf9bfed4f697397335f03034b5f41a0f4fb17c97fce6d20 languageName: node linkType: hard "@floating-ui/utils@npm:^0.2.0, @floating-ui/utils@npm:^0.2.1": version: 0.2.1 resolution: "@floating-ui/utils@npm:0.2.1" - checksum: 33c9ab346e7b05c5a1e6a95bc902aafcfc2c9d513a147e2491468843bd5607531b06d0b9aa56aa491cbf22a6c2495c18ccfc4c0344baec54a689a7bb8e4898d6 + checksum: 10c0/ee77756712cf5b000c6bacf11992ffb364f3ea2d0d51cc45197a7e646a17aeb86ea4b192c0b42f3fbb29487aee918a565e84f710b8c3645827767f406a6b4cc9 languageName: node linkType: hard @@ -1868,21 +1868,21 @@ __metadata: "@humanwhocodes/object-schema": "npm:^2.0.2" debug: "npm:^4.3.1" minimatch: "npm:^3.0.5" - checksum: 3ffb24ecdfab64014a230e127118d50a1a04d11080cbb748bc21629393d100850496456bbcb4e8c438957fe0934430d731042f1264d6a167b62d32fc2863580a + checksum: 10c0/66f725b4ee5fdd8322c737cb5013e19fac72d4d69c8bf4b7feb192fcb83442b035b92186f8e9497c220e58b2d51a080f28a73f7899bc1ab288c3be172c467541 languageName: node linkType: hard "@humanwhocodes/module-importer@npm:^1.0.1": version: 1.0.1 resolution: "@humanwhocodes/module-importer@npm:1.0.1" - checksum: e993950e346331e5a32eefb27948ecdee2a2c4ab3f072b8f566cd213ef485dd50a3ca497050608db91006f5479e43f91a439aef68d2a313bd3ded06909c7c5b3 + checksum: 10c0/909b69c3b86d482c26b3359db16e46a32e0fb30bd306a3c176b8313b9e7313dba0f37f519de6aa8b0a1921349e505f259d19475e123182416a506d7f87e7f529 languageName: node linkType: hard "@humanwhocodes/object-schema@npm:^2.0.2": version: 2.0.2 resolution: "@humanwhocodes/object-schema@npm:2.0.2" - checksum: ef915e3e2f34652f3d383b28a9a99cfea476fa991482370889ab14aac8ecd2b38d47cc21932526c6d949da0daf4a4a6bf629d30f41b0caca25e146819cbfa70e + checksum: 10c0/6fd83dc320231d71c4541d0244051df61f301817e9f9da9fd4cb7e44ec8aacbde5958c1665b0c419401ab935114fdf532a6ad5d4e7294b1af2f347dd91a6983f languageName: node linkType: hard @@ -1896,7 +1896,7 @@ __metadata: strip-ansi-cjs: "npm:strip-ansi@^6.0.1" wrap-ansi: "npm:^8.1.0" wrap-ansi-cjs: "npm:wrap-ansi@^7.0.0" - checksum: e9ed5fd27c3aec1095e3a16e0c0cf148d1fee55a38665c35f7b3f86a9b5d00d042ddaabc98e8a1cb7463b9378c15f22a94eb35e99469c201453eb8375191f243 + checksum: 10c0/b1bf42535d49f11dc137f18d5e4e63a28c5569de438a221c369483731e9dac9fb797af554e8bf02b6192d1e5eba6e6402cf93900c3d0ac86391d00d04876789e languageName: node linkType: hard @@ -1909,14 +1909,14 @@ __metadata: get-package-type: "npm:^0.1.0" js-yaml: "npm:^3.13.1" resolve-from: "npm:^5.0.0" - checksum: b000a5acd8d4fe6e34e25c399c8bdbb5d3a202b4e10416e17bfc25e12bab90bb56d33db6089ae30569b52686f4b35ff28ef26e88e21e69821d2b85884bd055b8 + checksum: 10c0/dd2a8b094887da5a1a2339543a4933d06db2e63cbbc2e288eb6431bd832065df0c099d091b6a67436e71b7d6bf85f01ce7c15f9253b4cbebcc3b9a496165ba42 languageName: node linkType: hard "@istanbuljs/schema@npm:^0.1.2, @istanbuljs/schema@npm:^0.1.3": version: 0.1.3 resolution: "@istanbuljs/schema@npm:0.1.3" - checksum: a9b1e49acdf5efc2f5b2359f2df7f90c5c725f2656f16099e8b2cd3a000619ecca9fc48cf693ba789cf0fd989f6e0df6a22bc05574be4223ecdbb7997d04384b + checksum: 10c0/61c5286771676c9ca3eb2bd8a7310a9c063fb6e0e9712225c8471c582d157392c88f5353581c8c9adbe0dff98892317d2fdfc56c3499aa42e0194405206a963a languageName: node linkType: hard @@ -1930,7 +1930,7 @@ __metadata: jest-message-util: "npm:^29.7.0" jest-util: "npm:^29.7.0" slash: "npm:^3.0.0" - checksum: 4a80c750e8a31f344233cb9951dee9b77bf6b89377cb131f8b3cde07ff218f504370133a5963f6a786af4d2ce7f85642db206ff7a15f99fe58df4c38ac04899e + checksum: 10c0/7be408781d0a6f657e969cbec13b540c329671819c2f57acfad0dae9dbfe2c9be859f38fe99b35dba9ff1536937dc6ddc69fdcd2794812fa3c647a1619797f6c languageName: node linkType: hard @@ -1971,7 +1971,7 @@ __metadata: peerDependenciesMeta: node-notifier: optional: true - checksum: ab6ac2e562d083faac7d8152ec1cc4eccc80f62e9579b69ed40aedf7211a6b2d57024a6cd53c4e35fd051c39a236e86257d1d99ebdb122291969a0a04563b51e + checksum: 10c0/934f7bf73190f029ac0f96662c85cd276ec460d407baf6b0dbaec2872e157db4d55a7ee0b1c43b18874602f662b37cb973dda469a4e6d88b4e4845b521adeeb2 languageName: node linkType: hard @@ -1983,7 +1983,7 @@ __metadata: "@jest/types": "npm:^29.6.3" "@types/node": "npm:*" jest-mock: "npm:^29.7.0" - checksum: 90b5844a9a9d8097f2cf107b1b5e57007c552f64315da8c1f51217eeb0a9664889d3f145cdf8acf23a84f4d8309a6675e27d5b059659a004db0ea9546d1c81a8 + checksum: 10c0/c7b1b40c618f8baf4d00609022d2afa086d9c6acc706f303a70bb4b67275868f620ad2e1a9efc5edd418906157337cce50589a627a6400bbdf117d351b91ef86 languageName: node linkType: hard @@ -1992,7 +1992,7 @@ __metadata: resolution: "@jest/expect-utils@npm:29.7.0" dependencies: jest-get-type: "npm:^29.6.3" - checksum: ef8d379778ef574a17bde2801a6f4469f8022a46a5f9e385191dc73bb1fc318996beaed4513fbd7055c2847227a1bed2469977821866534593a6e52a281499ee + checksum: 10c0/60b79d23a5358dc50d9510d726443316253ecda3a7fb8072e1526b3e0d3b14f066ee112db95699b7a43ad3f0b61b750c72e28a5a1cac361d7a2bb34747fa938a languageName: node linkType: hard @@ -2002,7 +2002,7 @@ __metadata: dependencies: expect: "npm:^29.7.0" jest-snapshot: "npm:^29.7.0" - checksum: fea6c3317a8da5c840429d90bfe49d928e89c9e89fceee2149b93a11b7e9c73d2f6e4d7cdf647163da938fc4e2169e4490be6bae64952902bc7a701033fd4880 + checksum: 10c0/b41f193fb697d3ced134349250aed6ccea075e48c4f803159db102b826a4e473397c68c31118259868fd69a5cba70e97e1c26d2c2ff716ca39dc73a2ccec037e languageName: node linkType: hard @@ -2016,7 +2016,7 @@ __metadata: jest-message-util: "npm:^29.7.0" jest-mock: "npm:^29.7.0" jest-util: "npm:^29.7.0" - checksum: 9b394e04ffc46f91725ecfdff34c4e043eb7a16e1d78964094c9db3fde0b1c8803e45943a980e8c740d0a3d45661906de1416ca5891a538b0660481a3a828c27 + checksum: 10c0/cf0a8bcda801b28dc2e2b2ba36302200ee8104a45ad7a21e6c234148932f826cb3bc57c8df3b7b815aeea0861d7b6ca6f0d4778f93b9219398ef28749e03595c languageName: node linkType: hard @@ -2028,7 +2028,7 @@ __metadata: "@jest/expect": "npm:^29.7.0" "@jest/types": "npm:^29.6.3" jest-mock: "npm:^29.7.0" - checksum: 97dbb9459135693ad3a422e65ca1c250f03d82b2a77f6207e7fa0edd2c9d2015fbe4346f3dc9ebff1678b9d8da74754d4d440b7837497f8927059c0642a22123 + checksum: 10c0/a385c99396878fe6e4460c43bd7bb0a5cc52befb462cc6e7f2a3810f9e7bcce7cdeb51908fd530391ee452dc856c98baa2c5f5fa8a5b30b071d31ef7f6955cea languageName: node linkType: hard @@ -2065,7 +2065,7 @@ __metadata: peerDependenciesMeta: node-notifier: optional: true - checksum: a17d1644b26dea14445cedd45567f4ba7834f980be2ef74447204e14238f121b50d8b858fde648083d2cd8f305f81ba434ba49e37a5f4237a6f2a61180cc73dc + checksum: 10c0/a754402a799541c6e5aff2c8160562525e2a47e7d568f01ebfc4da66522de39cbb809bbb0a841c7052e4270d79214e70aec3c169e4eae42a03bc1a8a20cb9fa2 languageName: node linkType: hard @@ -2074,7 +2074,7 @@ __metadata: resolution: "@jest/schemas@npm:29.6.3" dependencies: "@sinclair/typebox": "npm:^0.27.8" - checksum: 910040425f0fc93cd13e68c750b7885590b8839066dfa0cd78e7def07bbb708ad869381f725945d66f2284de5663bbecf63e8fdd856e2ae6e261ba30b1687e93 + checksum: 10c0/b329e89cd5f20b9278ae1233df74016ebf7b385e0d14b9f4c1ad18d096c4c19d1e687aa113a9c976b16ec07f021ae53dea811fb8c1248a50ac34fbe009fdf6be languageName: node linkType: hard @@ -2085,7 +2085,7 @@ __metadata: "@jridgewell/trace-mapping": "npm:^0.3.18" callsites: "npm:^3.0.0" graceful-fs: "npm:^4.2.9" - checksum: bcc5a8697d471396c0003b0bfa09722c3cd879ad697eb9c431e6164e2ea7008238a01a07193dfe3cbb48b1d258eb7251f6efcea36f64e1ebc464ea3c03ae2deb + checksum: 10c0/a2f177081830a2e8ad3f2e29e20b63bd40bade294880b595acf2fc09ec74b6a9dd98f126a2baa2bf4941acd89b13a4ade5351b3885c224107083a0059b60a219 languageName: node linkType: hard @@ -2097,7 +2097,7 @@ __metadata: "@jest/types": "npm:^29.6.3" "@types/istanbul-lib-coverage": "npm:^2.0.0" collect-v8-coverage: "npm:^1.0.0" - checksum: c073ab7dfe3c562bff2b8fee6cc724ccc20aa96bcd8ab48ccb2aa309b4c0c1923a9e703cea386bd6ae9b71133e92810475bb9c7c22328fc63f797ad3324ed189 + checksum: 10c0/7de54090e54a674ca173470b55dc1afdee994f2d70d185c80236003efd3fa2b753fff51ffcdda8e2890244c411fd2267529d42c4a50a8303755041ee493e6a04 languageName: node linkType: hard @@ -2109,7 +2109,7 @@ __metadata: graceful-fs: "npm:^4.2.9" jest-haste-map: "npm:^29.7.0" slash: "npm:^3.0.0" - checksum: 4420c26a0baa7035c5419b0892ff8ffe9a41b1583ec54a10db3037cd46a7e29dd3d7202f8aa9d376e9e53be5f8b1bc0d16e1de6880a6d319b033b01dc4c8f639 + checksum: 10c0/593a8c4272797bb5628984486080cbf57aed09c7cfdc0a634e8c06c38c6bef329c46c0016e84555ee55d1cd1f381518cf1890990ff845524c1123720c8c1481b languageName: node linkType: hard @@ -2132,7 +2132,7 @@ __metadata: pirates: "npm:^4.0.4" slash: "npm:^3.0.0" write-file-atomic: "npm:^4.0.2" - checksum: 30f42293545ab037d5799c81d3e12515790bb58513d37f788ce32d53326d0d72ebf5b40f989e6896739aa50a5f77be44686e510966370d58511d5ad2637c68c1 + checksum: 10c0/7f4a7f73dcf45dfdf280c7aa283cbac7b6e5a904813c3a93ead7e55873761fc20d5c4f0191d2019004fac6f55f061c82eb3249c2901164ad80e362e7a7ede5a6 languageName: node linkType: hard @@ -2145,7 +2145,7 @@ __metadata: "@types/node": "npm:*" "@types/yargs": "npm:^16.0.0" chalk: "npm:^4.0.0" - checksum: d3ca1655673539c54665f3e9135dc70887feb6b667b956e712c38f42e513ae007d3593b8075aecea8f2db7119f911773010f17f93be070b1725fbc6225539b6e + checksum: 10c0/4598b302398db0eb77168b75a6c58148ea02cc9b9f21c5d1bbe985c1c9257110a5653cf7b901c3cab87fba231e3fed83633687f1c0903b4bc6939ab2a8452504 languageName: node linkType: hard @@ -2159,7 +2159,7 @@ __metadata: "@types/node": "npm:*" "@types/yargs": "npm:^17.0.8" chalk: "npm:^4.0.0" - checksum: f74bf512fd09bbe2433a2ad460b04668b7075235eea9a0c77d6a42222c10a79b9747dc2b2a623f140ed40d6865a2ed8f538f3cbb75169120ea863f29a7ed76cd + checksum: 10c0/ea4e493dd3fb47933b8ccab201ae573dcc451f951dc44ed2a86123cd8541b82aa9d2b1031caf9b1080d6673c517e2dcc25a44b2dc4f3fbc37bfc965d444888c0 languageName: node linkType: hard @@ -2177,7 +2177,7 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: 9237499394b1f5f1320c9a489dbf5db2ba4b1d68081bf767a08895b70d0d0830adb9f0f1e2c5c94202e5bee63fe031ea2b91870a6bc806ed5e370be6b06df2e8 + checksum: 10c0/31098ad8fcc2440437534599c111d9f2951dd74821e8ba46c521b969bae4c918d830b7bb0484efbad29a51711bb62d3bc623d5a1ed5b1695b5b5594ea9dd4ca0 languageName: node linkType: hard @@ -2188,28 +2188,28 @@ __metadata: "@jridgewell/set-array": "npm:^1.0.1" "@jridgewell/sourcemap-codec": "npm:^1.4.10" "@jridgewell/trace-mapping": "npm:^0.3.9" - checksum: c111a3d52fffd63a719035f9a453e0a9b4ba403a559b2f170f81e385ba5ed9cd4549575e166b20d3534e2aad9ea8473b8b17cee11b1c6595323be90d4e4c50d1 + checksum: 10c0/dd6c48341ad01a75bd93bae17fcc888120d063bdf927d4c496b663aa68e22b9e51e898ba38abe7457b28efd3fa5cde43723dba4dc5f94281119fa709cb5046be languageName: node linkType: hard "@jridgewell/resolve-uri@npm:^3.1.0": version: 3.1.2 resolution: "@jridgewell/resolve-uri@npm:3.1.2" - checksum: 97106439d750a409c22c8bff822d648f6a71f3aa9bc8e5129efdc36343cd3096ddc4eeb1c62d2fe48e9bdd4db37b05d4646a17114ecebd3bbcacfa2de51c3c1d + checksum: 10c0/d502e6fb516b35032331406d4e962c21fe77cdf1cbdb49c6142bcbd9e30507094b18972778a6e27cbad756209cfe34b1a27729e6fa08a2eb92b33943f680cf1e languageName: node linkType: hard "@jridgewell/set-array@npm:^1.0.1": version: 1.1.2 resolution: "@jridgewell/set-array@npm:1.1.2" - checksum: 69a84d5980385f396ff60a175f7177af0b8da4ddb81824cb7016a9ef914eee9806c72b6b65942003c63f7983d4f39a5c6c27185bbca88eb4690b62075602e28e + checksum: 10c0/bc7ab4c4c00470de4e7562ecac3c0c84f53e7ee8a711e546d67c47da7febe7c45cd67d4d84ee3c9b2c05ae8e872656cdded8a707a283d30bd54fbc65aef821ab languageName: node linkType: hard "@jridgewell/sourcemap-codec@npm:^1.4.10, @jridgewell/sourcemap-codec@npm:^1.4.13, @jridgewell/sourcemap-codec@npm:^1.4.14, @jridgewell/sourcemap-codec@npm:^1.4.15": version: 1.4.15 resolution: "@jridgewell/sourcemap-codec@npm:1.4.15" - checksum: 89960ac087781b961ad918978975bcdf2051cd1741880469783c42de64239703eab9db5230d776d8e6a09d73bb5e4cb964e07d93ee6e2e7aea5a7d726e865c09 + checksum: 10c0/0c6b5ae663087558039052a626d2d7ed5208da36cfd707dcc5cea4a07cfc918248403dcb5989a8f7afaf245ce0573b7cc6fd94c4a30453bd10e44d9363940ba5 languageName: node linkType: hard @@ -2219,14 +2219,14 @@ __metadata: dependencies: "@jridgewell/resolve-uri": "npm:^3.1.0" "@jridgewell/sourcemap-codec": "npm:^1.4.14" - checksum: eb8d167f8aeb3ac55e7726eda1bb6240787987fd66d480edbe15fc98ad594ec10cb584289f649e2074b9e117862c82efdec07db13850f3dc4cb242258bb2b67d + checksum: 10c0/26190e09129b184a41c83ce896ce41c0636ddc1285a22627a48ec7981829346ced655d5774bdca30446250baf0e4fb519c47732760d128edda51a6222b40397a languageName: node linkType: hard "@juggle/resize-observer@npm:^3.3.1": version: 3.4.0 resolution: "@juggle/resize-observer@npm:3.4.0" - checksum: 73d1d00ee9132fb6f0aea0531940a6b93603e935590bd450fc6285a328d906102eeeb95dea77b2edac0e779031a9708aa8c82502bd298ee4dd26e7dff48f397a + checksum: 10c0/12930242357298c6f2ad5d4ec7cf631dfb344ca7c8c830ab7f64e6ac11eb1aae486901d8d880fd08fb1b257800c160a0da3aee1e7ed9adac0ccbb9b7c5d93347 languageName: node linkType: hard @@ -2238,7 +2238,7 @@ __metadata: "@types/react": "npm:>=16" peerDependencies: react: ">=16" - checksum: bce1cb1dde0a9a2b786cd9167b9e2bc0e3be52c195a4a79aaf1677470566d1fd2979d01baca2380c76aa4a1a27cd89f051484e595fdc4144a428d6af39bb667a + checksum: 10c0/6d647115703dbe258f7fe372499fa8c6fe17a053ff0f2a208111c9973a71ae738a0ed376770445d39194d217e00e1a015644b24f32c2f7cb4f57988de0649b15 languageName: node linkType: hard @@ -2249,7 +2249,7 @@ __metadata: gunzip-maybe: "npm:^1.4.2" pump: "npm:^3.0.0" tar-fs: "npm:^2.1.1" - checksum: 39697cef2b92f6e08e3590467cc6da88cd6757b2a27cb9208879c2316ed71d6be4608892ee0a86eb0343140da1a5df498f93a32c2aaf8f1fbd90f883f08b5f63 + checksum: 10c0/d66e76c6c990745d691c85d1dfa7f3dfd181405bb52c295baf4d1838b847d40c686e24602ea0ab1cdeb14d409db59f6bb9e2f96f56fe53da275da9cccf778e27 languageName: node linkType: hard @@ -2259,14 +2259,14 @@ __metadata: dependencies: "@nodelib/fs.stat": "npm:2.0.5" run-parallel: "npm:^1.1.9" - checksum: 6ab2a9b8a1d67b067922c36f259e3b3dfd6b97b219c540877a4944549a4d49ea5ceba5663905ab5289682f1f3c15ff441d02f0447f620a42e1cb5e1937174d4b + checksum: 10c0/732c3b6d1b1e967440e65f284bd06e5821fedf10a1bea9ed2bb75956ea1f30e08c44d3def9d6a230666574edbaf136f8cfd319c14fd1f87c66e6a44449afb2eb languageName: node linkType: hard "@nodelib/fs.stat@npm:2.0.5, @nodelib/fs.stat@npm:^2.0.2": version: 2.0.5 resolution: "@nodelib/fs.stat@npm:2.0.5" - checksum: 012480b5ca9d97bff9261571dbbec7bbc6033f69cc92908bc1ecfad0792361a5a1994bc48674b9ef76419d056a03efadfce5a6cf6dbc0a36559571a7a483f6f0 + checksum: 10c0/88dafe5e3e29a388b07264680dc996c17f4bda48d163a9d4f5c1112979f0ce8ec72aa7116122c350b4e7976bc5566dc3ddb579be1ceaacc727872eb4ed93926d languageName: node linkType: hard @@ -2276,7 +2276,7 @@ __metadata: dependencies: "@nodelib/fs.scandir": "npm:2.1.5" fastq: "npm:^1.6.0" - checksum: 40033e33e96e97d77fba5a238e4bba4487b8284678906a9f616b5579ddaf868a18874c0054a75402c9fbaaa033a25ceae093af58c9c30278e35c23c9479e79b0 + checksum: 10c0/db9de047c3bb9b51f9335a7bb46f4fcfb6829fb628318c12115fbaf7d369bfce71c15b103d1fc3b464812d936220ee9bc1c8f762d032c9f6be9acc99249095b1 languageName: node linkType: hard @@ -2289,7 +2289,7 @@ __metadata: https-proxy-agent: "npm:^7.0.1" lru-cache: "npm:^10.0.1" socks-proxy-agent: "npm:^8.0.1" - checksum: d4a48128f61e47f2f5c89315a5350e265dc619987e635bd62b52b29c7ed93536e724e721418c0ce352ceece86c13043c67aba1b70c3f5cc72fce6bb746706162 + checksum: 10c0/38ee5cbe8f3cde13be916e717bfc54fd1a7605c07af056369ff894e244c221e0b56b08ca5213457477f9bc15bca9e729d51a4788829b5c3cf296b3c996147f76 languageName: node linkType: hard @@ -2298,21 +2298,21 @@ __metadata: resolution: "@npmcli/fs@npm:3.1.0" dependencies: semver: "npm:^7.3.5" - checksum: f3a7ab3a31de65e42aeb6ed03ed035ef123d2de7af4deb9d4a003d27acc8618b57d9fb9d259fe6c28ca538032a028f37337264388ba27d26d37fff7dde22476e + checksum: 10c0/162b4a0b8705cd6f5c2470b851d1dc6cd228c86d2170e1769d738c1fbb69a87160901411c3c035331e9e99db72f1f1099a8b734bf1637cc32b9a5be1660e4e1e languageName: node linkType: hard "@pkgjs/parseargs@npm:^0.11.0": version: 0.11.0 resolution: "@pkgjs/parseargs@npm:0.11.0" - checksum: 115e8ceeec6bc69dff2048b35c0ab4f8bbee12d8bb6c1f4af758604586d802b6e669dcb02dda61d078de42c2b4ddce41b3d9e726d7daa6b4b850f4adbf7333ff + checksum: 10c0/5bd7576bb1b38a47a7fc7b51ac9f38748e772beebc56200450c4a817d712232b8f1d3ef70532c80840243c657d491cf6a6be1e3a214cff907645819fdc34aadd languageName: node linkType: hard "@pkgr/core@npm:^0.1.0": version: 0.1.1 resolution: "@pkgr/core@npm:0.1.1" - checksum: 6f25fd2e3008f259c77207ac9915b02f1628420403b2630c92a07ff963129238c9262afc9e84344c7a23b5cc1f3965e2cd17e3798219f5fd78a63d144d3cceba + checksum: 10c0/3f7536bc7f57320ab2cf96f8973664bef624710c403357429fbf680a5c3b4843c1dbd389bb43daa6b1f6f1f007bb082f5abcb76bb2b5dc9f421647743b71d3d8 languageName: node linkType: hard @@ -2321,7 +2321,7 @@ __metadata: resolution: "@radix-ui/number@npm:1.0.1" dependencies: "@babel/runtime": "npm:^7.13.10" - checksum: 621ea8b7d4195d1a65a9c0aee918e8335e7f198088eec91577512c89c2ba3a3bab4a767cfb872a2b9c3092a78ff41cad9a924845a939f6bb87fe9356241ea0ea + checksum: 10c0/42e4870cd14459da6da03e43c7507dc4c807ed787a87bda52912a0d1d6d5013326b697c18c9625fc6a2cf0af2b45d9c86747985b45358fd92ab646b983978e3c languageName: node linkType: hard @@ -2330,7 +2330,7 @@ __metadata: resolution: "@radix-ui/primitive@npm:1.0.1" dependencies: "@babel/runtime": "npm:^7.13.10" - checksum: 2b93e161d3fdabe9a64919def7fa3ceaecf2848341e9211520c401181c9eaebb8451c630b066fad2256e5c639c95edc41de0ba59c40eff37e799918d019822d1 + checksum: 10c0/912216455537db3ca77f3e7f70174fb2b454fbd4a37a0acb7cfadad9ab6131abdfb787472242574460a3c301edf45738340cc84f6717982710082840fde7d916 languageName: node linkType: hard @@ -2350,7 +2350,7 @@ __metadata: optional: true "@types/react-dom": optional: true - checksum: 8cca086f0dbb33360e3c0142adf72f99fc96352d7086d6c2356dbb2ea5944cfb720a87d526fc48087741c602cd8162ca02b0af5e6fdf5f56d20fddb44db8b4c3 + checksum: 10c0/c931f6d7e0bac50fd1654a0303a303aff74a68a13a33a851a43a7c88677b53a92ca6557920b9105144a3002f899ce888437d20ddd7803a5c716edac99587626d languageName: node linkType: hard @@ -2373,7 +2373,7 @@ __metadata: optional: true "@types/react-dom": optional: true - checksum: 2ac740ab746f411942dc95100f1eb60b9a3670960a805e266533fa1bc7dec31a6dabddd746ab788ebd5a9c22b468e38922f39d30447925515f8e44f0a3b2e56c + checksum: 10c0/cefa56383d7451ca79e4bd5a29aaeef6c205a04297213efd149aaead82fc8cde4fb8298e20e6b3613e5696e43f814fb4489805428f6604834fb31f73c6725fa8 languageName: node linkType: hard @@ -2388,7 +2388,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: 2b9a613b6db5bff8865588b6bf4065f73021b3d16c0a90b2d4c23deceeb63612f1f15de188227ebdc5f88222cab031be617a9dd025874c0487b303be3e5cc2a8 + checksum: 10c0/be06f8dab35b5a1bffa7a5982fb26218ddade1acb751288333e3b89d7b4a7dfb5a6371be83876dac0ec2ebe0866d295e8618b778608e1965342986ea448040ec languageName: node linkType: hard @@ -2403,7 +2403,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: a02187a3bae3a0f1be5fab5ad19c1ef06ceff1028d957e4d9994f0186f594a9c3d93ee34bacb86d1fa8eb274493362944398e1c17054d12cb3b75384f9ae564b + checksum: 10c0/3de5761b32cc70cd61715527f29d8c699c01ab28c195ced972ccbc7025763a373a68f18c9f948c7a7b922e469fd2df7fee5f7536e3f7bad44ffc06d959359333 languageName: node linkType: hard @@ -2418,7 +2418,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: 5336a8b0d4f1cde585d5c2b4448af7b3d948bb63a1aadb37c77771b0e5902dc6266e409cf35fd0edaca7f33e26424be19e64fb8f9d7f7be2d6f1714ea2764210 + checksum: 10c0/b1a45b4d1d5070ca3b5864b920f6c6210c962bdb519abb62b38b1baef9d06737dc3d8ecdb61860b7504a735235a539652f5977c7299ec021da84e6b0f64d988a languageName: node linkType: hard @@ -2442,7 +2442,7 @@ __metadata: optional: true "@types/react-dom": optional: true - checksum: bcc14f0704fdc19430a2b922106a278e64401decffd6e47f427aa5de2d63367ba3e848b012c464a6b39a6e057060e41ad16964385941735a329e319cea46711a + checksum: 10c0/a7b9695092cd4109a7b4a4a66b7f634c42d4f39aa0893621a8ee5e8bc90f8ae27e741df66db726c341a60d2115e3f813520fee1f5cc4fb05d77914b4ade3819f languageName: node linkType: hard @@ -2457,7 +2457,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: 1f8ca8f83b884b3612788d0742f3f054e327856d90a39841a47897dbed95e114ee512362ae314177de226d05310047cabbf66b686ae86ad1b65b6b295be24ef7 + checksum: 10c0/d5fd4e5aa9d9a87c8ad490b3b4992d6f1d9eddf18e56df2a2bcf8744c4332b275d73377fd193df3e6ba0ad9608dc497709beca5c64de2b834d5f5350b3c9a272 languageName: node linkType: hard @@ -2479,7 +2479,7 @@ __metadata: optional: true "@types/react-dom": optional: true - checksum: d62631cc06a2f37d483d106f3732ffc00831498fc2306df51c675d7cdb9727169512a1ca43ce06d1bfd578e8d8d67a80858c7531579bacaf6079d3aaf0ca8663 + checksum: 10c0/bfff46919666c122f5b812ee427494ae8408c0eebee30337bd2ce0eedf539f0feaa242f790304ef9df15425b837010ffc6061ce467bedd2c5fd9373bee2b95da languageName: node linkType: hard @@ -2495,7 +2495,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: 446a453d799cc790dd2a1583ff8328da88271bff64530b5a17c102fa7fb35eece3cf8985359d416f65e330cd81aa7b8fe984ea125fc4f4eaf4b3801d698e49fe + checksum: 10c0/e2859ca58bea171c956098ace7ecf615cf9432f58a118b779a14720746b3adcf0351c36c75de131548672d3cd290ca238198acbd33b88dc4706f98312e9317ad languageName: node linkType: hard @@ -2524,7 +2524,7 @@ __metadata: optional: true "@types/react-dom": optional: true - checksum: be32677e846ef93e8cbf219550e55b99583cb927b572a9ee466b0c242156d42ddc70f43135e22acffe48bba4cd3fe28888cc3f929947e078d8732bee958df4c4 + checksum: 10c0/4bd069b79f7046af2c0967b8e43f727cd09834cbd6df1e3d5a943c4f83428ff8b646882737fdf7593c22e261a1d13768a5c020138d79503862ae2e1729081bba languageName: node linkType: hard @@ -2544,7 +2544,7 @@ __metadata: optional: true "@types/react-dom": optional: true - checksum: d352bcd6ad65eb43c9e0d72d0755c2aae85e03fb287770866262be3a2d5302b2885aee3cd99f2bbf62ecd14fcb1460703f1dcdc40351f77ad887b931c6f0012a + checksum: 10c0/baf295bbbf09ead37b64ee1dc025a6a540960f5e60552766d78f6065504c67d4bcf49fad5e2073617d9a3011daafad625aa3bd1da7a886c704833b22a49e888f languageName: node linkType: hard @@ -2564,7 +2564,7 @@ __metadata: optional: true "@types/react-dom": optional: true - checksum: bedb934ac07c710dc5550a7bfc7065d47e099d958cde1d37e4b1947ae5451f1b7e6f8ff5965e242578bf2c619065e6038c3a3aa779e5eafa7da3e3dbc685799f + checksum: 10c0/67a66ff8898a5e7739eda228ab6f5ce808858da1dce967014138d87e72b6bbfc93dc1467c706d98d1a2b93bf0b6e09233d1a24d31c78227b078444c1a69c42be languageName: node linkType: hard @@ -2592,7 +2592,7 @@ __metadata: optional: true "@types/react-dom": optional: true - checksum: a23ffb1e3e29a8209b94ce3857bf559dcf2175c4f316169dc47d018e8e94cd018dc914331a1d1762f32448e2594b7c8945efaa7059056f9940ce92cc35cc7026 + checksum: 10c0/61e3ddfd1647e64fba855434ff41e8e7ba707244fe8841f78c450fbdce525383b64259279475615d030dbf1625cbffd8eeebee72d91bf6978794f5dbcf887fc0 languageName: node linkType: hard @@ -2632,7 +2632,7 @@ __metadata: optional: true "@types/react-dom": optional: true - checksum: 4d7b6d9d988f78764783a4b2fd6523457ff735436829e122dae824bdea4f2835ad0150dfc060517d6c29d953ef61ee12d7ce10cf160593e56967e528bf6f8ee5 + checksum: 10c0/888fffa703a8f79b45c01d5f03ad9aae66250ddfff827bbba4f222c4d0720aa2f01a3e4b6bd80acabaf5e2fa7ad79de9e9dfd14831f7f4c24337d4d8dfb58ccc languageName: node linkType: hard @@ -2652,7 +2652,7 @@ __metadata: optional: true "@types/react-dom": optional: true - checksum: b5ea8f1996c86d3f9df73c72926f3d1a400a2eb46a482a345d486651c503895af2ccf9d7723f97a4e612f7c1317eb622078ddf014b13e2b26070d8cf0ad0da1d + checksum: 10c0/87bcde47343f2bc4439a0dc34381f557905d9b3c1e8c5a0d32ceea62a8ef84f3abf671c5cb29309fc87759ad41d39af619ba546cf54109d64c8746e3ca683de3 languageName: node linkType: hard @@ -2668,7 +2668,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: 734866561e991438fbcf22af06e56b272ed6ee8f7b536489ee3bf2f736f8b53bf6bc14ebde94834aa0aceda854d018a0ce20bb171defffbaed1f566006cbb887 + checksum: 10c0/3af6ea4891e6fa8091e666802adffe7718b3cd390a10fa9229a5f40f8efded9f3918ea01b046103d93923d41cc32119505ebb6bde76cad07a87b6cf4f2119347 languageName: node linkType: hard @@ -2694,7 +2694,7 @@ __metadata: optional: true "@types/react-dom": optional: true - checksum: 96ea35f0e959399f239ff3b75dcad72d5880c66966114c80293ab1450801c87353c0cb2a7a4a5e9825f43c9bd3d881f312a9c14bdacfa70f4050d406bec98c2b + checksum: 10c0/4f4761965022759ac0950ac026029b64049e1f18ef07a01ddde788b7606efcb262c9ae3a418de0c0756bf7285182ed0d268502c6f17ba86d2ff27eee5507bbf7 languageName: node linkType: hard @@ -2716,7 +2716,7 @@ __metadata: optional: true "@types/react-dom": optional: true - checksum: ed5407f48254f20cda542017774f259d0b2c0007ea4bd7287d10d751016dbf269cb13d1142591432c269c3ab768cde2f1ba0344743027d36bbec10af909f19de + checksum: 10c0/9b487dad213ea7e70b0aa205e7c6f790a6f2bf394c39912e22dbe003403fd0d24a41c2efd31695fc31ab7bac286f28253dbb2fc5202cacd572ebf909f1fdc86c languageName: node linkType: hard @@ -2742,7 +2742,7 @@ __metadata: optional: true "@types/react-dom": optional: true - checksum: 57f75b6617d4e2bb8f7782d6065e70fd0db44038588b3e8e5f8cd1101dc2c94744bd52b9c011c7b722cb5f9ca96d21fc78ee7caac07722894453019fd5ade3b0 + checksum: 10c0/3ed7ebe22ef2e8369e08bb59776671a7b8c413628249c338b8db86b4b9ac40127b4201d5bd4a9c23ea1fd21464769b4fa427d3ebcda3a7fcdbd45b256b5a753a languageName: node linkType: hard @@ -2757,7 +2757,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: b9fd39911c3644bbda14a84e4fca080682bef84212b8d8931fcaa2d2814465de242c4cfd8d7afb3020646bead9c5e539d478cea0a7031bee8a8a3bb164f3bc4c + checksum: 10c0/331b432be1edc960ca148637ae6087220873ee828ceb13bd155926ef8f49e862812de5b379129f6aaefcd11be53715f3237e6caa9a33d9c0abfff43f3ba58938 languageName: node linkType: hard @@ -2773,7 +2773,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: dee2be1937d293c3a492cb6d279fc11495a8f19dc595cdbfe24b434e917302f9ac91db24e8cc5af9a065f3f209c3423115b5442e65a5be9fd1e9091338972be9 + checksum: 10c0/29b069dbf09e48bca321af6272574ad0fc7283174e7d092731a10663fe00c0e6b4bde5e1b5ea67725fe48dcbe8026e7ff0d69d42891c62cbb9ca408498171fbe languageName: node linkType: hard @@ -2789,7 +2789,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: c6ed0d9ce780f67f924980eb305af1f6cce2a8acbaf043a58abe0aa3cc551d9aa76ccee14531df89bbee302ead7ecc7fce330886f82d4672c5eda52f357ef9b8 + checksum: 10c0/3c94c78902dcb40b60083ee2184614f45c95a189178f52d89323b467bd04bcf5fdb1bc4d43debecd7f0b572c3843c7e04edbcb56f40a4b4b43936fb2770fb8ad languageName: node linkType: hard @@ -2804,7 +2804,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: bed9c7e8de243a5ec3b93bb6a5860950b0dba359b6680c84d57c7a655e123dec9b5891c5dfe81ab970652e7779fe2ad102a23177c7896dde95f7340817d47ae5 + checksum: 10c0/13cd0c38395c5838bc9a18238020d3bcf67fb340039e6d1cbf438be1b91d64cf6900b78121f3dc9219faeb40dcc7b523ce0f17e4a41631655690e5a30a40886a languageName: node linkType: hard @@ -2819,7 +2819,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: 66b4312e857c58b75f3bf62a2048ef090b79a159e9da06c19a468c93e62336969c33dbef60ff16969f00b20386cc25d138f6a353f1658b35baac0a6eff4761b9 + checksum: 10c0/f5fbc602108668484a4ed506b7842482222d1d03094362e26abb7fdd593eee8794fc47d85b3524fb9d00884801c89a6eefd0bed0971eba1ec189c637b6afd398 languageName: node linkType: hard @@ -2835,7 +2835,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: 433f07e61e04eb222349825bb05f3591fca131313a1d03709565d6226d8660bd1d0423635553f95ee4fcc25c8f2050972d848808d753c388e2a9ae191ebf17f3 + checksum: 10c0/94c5ab31dfd3678c0cb77a30025e82b3a287577c1a8674b0d703a36d27434bc9c59790e0bebf57ed153f0b8e0d8c3b9675fc9787b9eac525a09abcda8fa9e7eb languageName: node linkType: hard @@ -2851,7 +2851,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: 6cc150ad1e9fa85019c225c5a5d50a0af6cdc4653dad0c21b4b40cd2121f36ee076db326c43e6bc91a69766ccff5a84e917d27970176b592577deea3c85a3e26 + checksum: 10c0/b109a4b3781781c4dc641a1173f0a6fcb0b0f7b2d7cdba5848a46070c9fb4e518909a46c20a3c2efbc78737c64859c59ead837f2940e8c8394d1c503ef58773b languageName: node linkType: hard @@ -2871,7 +2871,7 @@ __metadata: optional: true "@types/react-dom": optional: true - checksum: 2e9d0c8253f97e7d6ffb2e52a5cfd40ba719f813b39c3e2e42c496d54408abd09ef66b5aec4af9b8ab0553215e32452a5d0934597a49c51dd90dc39181ed0d57 + checksum: 10c0/0cbc12c2156b3fa0e40090cafd8525ce84c16a6b5a038a8e8fc7cbb16ed6da9ab369593962c57a18c41a16ec8713e0195c68ea34072ef1ca254ed4d4c0770bb4 languageName: node linkType: hard @@ -2880,7 +2880,7 @@ __metadata: resolution: "@radix-ui/rect@npm:1.0.1" dependencies: "@babel/runtime": "npm:^7.13.10" - checksum: e25492cb8a683246161d781f0f3205f79507280a60f50eb763f06e8b6fa211b940b784aa581131ed76695bd5df5d1033a6246b43a6996cf8959a326fe4d3eb00 + checksum: 10c0/4c5159661340acc31b11e1f2ebd87a1521d39bfa287544dd2cd75b399539a4b625d38a1501c90ceae21fcca18ed164b0c3735817ff140ae334098192c110e571 languageName: node linkType: hard @@ -2900,14 +2900,14 @@ __metadata: optional: true react-redux: optional: true - checksum: 11c718270bb378e5b26e172eb84cc549d6f263748b6f330b07ee9c366c6474b013fd410e5b2f65a5742e73b7873a3ac14e06cae4bb01480ba03b423c4fd92583 + checksum: 10c0/fa0aa4b7c6973ac87ce0ac7e45faa02c73b66c4ee0bc950d178494539a42a1bb908d109297102458b7ea14d5e7dae356e7a7ce9a1b9849b0e8451e6dd70fca9c languageName: node linkType: hard "@remix-run/router@npm:1.6.2": version: 1.6.2 resolution: "@remix-run/router@npm:1.6.2" - checksum: c261c3b52f08d7fcacce9c66d68dba3b6f0c8263ea15f69f9f1c89734685cdfe4f383c879324acade68cb331d48e3deca9ec00734abe08d9694e529096907f40 + checksum: 10c0/73da6884e53873e4290abb3978373cafc3f351994273b0663eda5e12c81cb427fc6fe4df1924569d9a214f701d0106cf37122455951e0239d7e6fa35071df558 languageName: node linkType: hard @@ -3036,7 +3036,7 @@ __metadata: optional: true "@resvg/resvg-js-win32-x64-msvc": optional: true - checksum: f3fbba7d11e912a931955fdec911db7913c4dae9bf63e5fa5b97faad2b845068438e281bf5cb72371ecd8c95ff31a1f40d413e996f95972e5c8fd47ab5ea3ecd + checksum: 10c0/f04192e98d7ead730f9474f07e81aa9d9b28dc18218028f2a15c808e1ab0de511f07d8a77e01d058534e78f0ef7e9f33201f15f052e28bbaabaf159c68698a5e languageName: node linkType: hard @@ -3046,7 +3046,7 @@ __metadata: dependencies: estree-walker: "npm:^2.0.1" picomatch: "npm:^2.2.2" - checksum: 503a6f0a449e11a2873ac66cfdfb9a3a0b77ffa84c5cad631f5e4bc1063c850710e8d5cd5dab52477c0d66cda2ec719865726dbe753318cd640bab3fff7ca476 + checksum: 10c0/3ee56b2c8f1ed8dfd0a92631da1af3a2dfdd0321948f089b3752b4de1b54dc5076701eadd0e5fc18bd191b77af594ac1db6279e83951238ba16bf8a414c64c48 languageName: node linkType: hard @@ -3062,14 +3062,14 @@ __metadata: peerDependenciesMeta: rollup: optional: true - checksum: abb15eaec5b36f159ec351b48578401bedcefdfa371d24a914cfdbb1e27d0ebfbf895299ec18ccc343d247e71f2502cba21202bc1362d7ef27d5ded699e5c2b2 + checksum: 10c0/c7bed15711f942d6fdd3470fef4105b73991f99a478605e13d41888963330a6f9e32be37e6ddb13f012bc7673ff5e54f06f59fd47109436c1c513986a8a7612d languageName: node linkType: hard "@sinclair/typebox@npm:^0.27.8": version: 0.27.8 resolution: "@sinclair/typebox@npm:0.27.8" - checksum: 297f95ff77c82c54de8c9907f186076e715ff2621c5222ba50b8d40a170661c0c5242c763cba2a4791f0f91cb1d8ffa53ea1d7294570cf8cd4694c0e383e484d + checksum: 10c0/ef6351ae073c45c2ac89494dbb3e1f87cc60a93ce4cde797b782812b6f97da0d620ae81973f104b43c9b7eaa789ad20ba4f6a1359f1cc62f63729a55a7d22d4e languageName: node linkType: hard @@ -3078,7 +3078,7 @@ __metadata: resolution: "@sinonjs/commons@npm:3.0.1" dependencies: type-detect: "npm:4.0.8" - checksum: a0af217ba7044426c78df52c23cedede6daf377586f3ac58857c565769358ab1f44ebf95ba04bbe38814fba6e316ca6f02870a009328294fc2c555d0f85a7117 + checksum: 10c0/1227a7b5bd6c6f9584274db996d7f8cee2c8c350534b9d0141fc662eaf1f292ea0ae3ed19e5e5271c8fd390d27e492ca2803acd31a1978be2cdc6be0da711403 languageName: node linkType: hard @@ -3087,7 +3087,7 @@ __metadata: resolution: "@sinonjs/fake-timers@npm:10.3.0" dependencies: "@sinonjs/commons": "npm:^3.0.0" - checksum: 78155c7bd866a85df85e22028e046b8d46cf3e840f72260954f5e3ed5bd97d66c595524305a6841ffb3f681a08f6e5cef572a2cce5442a8a232dc29fb409b83e + checksum: 10c0/2e2fb6cc57f227912814085b7b01fede050cd4746ea8d49a1e44d5a0e56a804663b0340ae2f11af7559ea9bf4d087a11f2f646197a660ea3cb04e19efc04aa63 languageName: node linkType: hard @@ -3101,7 +3101,7 @@ __metadata: dequal: "npm:^2.0.2" polished: "npm:^4.2.2" uuid: "npm:^9.0.0" - checksum: 79ec0da9bc1c8a8990b33d937c28c02f84d4febfc1c660fb5ebfd6b246a02ba4f6ad7f31577306dad4a11dca969edc660a9e7e323c1747eb60156ce3fcefa6ec + checksum: 10c0/91d20a7c35fff6a0b2aa33f2c1171d457c68fb9d955da12629d6f75d931d5aa3756837e413ab7bb928c4cc4b48dcc5cdd63510e6028e7bd8fc8c82d93be967d0 languageName: node linkType: hard @@ -3112,7 +3112,7 @@ __metadata: "@storybook/global": "npm:^5.0.0" memoizerific: "npm:^1.11.3" ts-dedent: "npm:^2.0.0" - checksum: ee237ae5e1b0d696b2726d80137b4f8bc75740f34e9b94bbab3a1d04ea6304c67de0feb72650c7556ee05aa4db4143cfde7794bbe15ec2e36cd36d3aeaa13707 + checksum: 10c0/43518d762efa8dd140d029541e8e2bb748173a8428e3de67287ca132525e33e443282a2b06f3b381250d9557ada9ea3a07039aa69cf3de6b04aec02027fb9943 languageName: node linkType: hard @@ -3123,7 +3123,7 @@ __metadata: "@storybook/blocks": "npm:7.6.17" lodash: "npm:^4.17.21" ts-dedent: "npm:^2.0.0" - checksum: d9ae67dc3a208e07a07576529df3f34d41d8b3e4a1acc31573850ea39c8680c4676e6536108fef00c156b67ec3dd9cc5ae4d08dbc0e261b475e401511692d905 + checksum: 10c0/da66466b801064a916e059ce127efb2ab074a5c80fb65b568ac361d09fe55e0e993cd5400d6b0361bdfd783725e59449bbd30f87643964fa0db8e02a5f9550fd languageName: node linkType: hard @@ -3153,7 +3153,7 @@ __metadata: peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - checksum: c4e1442b837350773a67990448c5bfdfd8060757bc5842cc1f617b01be8408dc566f90045423c321bf99b65976f62a63d916793c7591920cf42f908545ec6b2b + checksum: 10c0/b43666832f1657f4dfac976ac8b8071995d65860a29f1ac66b80adb69a0d02f0d1d70684d94ddb76f0957f003b94b4252599e19f1e6a4342686598bbb40280ae languageName: node linkType: hard @@ -3178,7 +3178,7 @@ __metadata: peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - checksum: d63a5359c8cb3fb69d120bb75582db66d32e9d350048dea20ab5cc5ad2107db813db8212f63c7d31e58f918dca22cd45ca615fb0d11fbc3562fa4f63675a7966 + checksum: 10c0/3208790b219e88fadc634aa00134eb3f0da9d2c05cd84e733d07e201177c58bccb85879ee4c26441a35b1e7fd318111dd668fdd8b3e57b37da512a658d4f50e9 languageName: node linkType: hard @@ -3187,7 +3187,7 @@ __metadata: resolution: "@storybook/addon-highlight@npm:7.6.17" dependencies: "@storybook/global": "npm:^5.0.0" - checksum: b2d213b101013de5da40d6b66999b36e66a321834684c5c594de1a5c96e527d9ee9add844006e0ca36a4a638cea8325c9db0e3618616da04a1039b8d18b92ea3 + checksum: 10c0/5f16a648a38257bdd66f592b519cc6b4ecf36c50d0cb01696f1c42c6c9fa2b44b7056b64d611579f2ec4764787b6bd34ea6b9ebddb01b0e562b3eb8100b1cf96 languageName: node linkType: hard @@ -3200,7 +3200,7 @@ __metadata: jest-mock: "npm:^27.0.6" polished: "npm:^4.2.2" ts-dedent: "npm:^2.2.0" - checksum: 4e22b7113137a6bd02a5ab10a0ea156871a8175a668837106750697e0691d734f30a21e3b44a4346c763da19180552adf823a67f8fd8b8bbc276d5e15b4d0271 + checksum: 10c0/f0910e8db378f502270747508c42174bdb75671620d24868264638a2693c60b35f088e4c06cb2239a69f4aa176f8dc8cf9e215f872d5aeefec933643225b66b8 languageName: node linkType: hard @@ -3216,7 +3216,7 @@ __metadata: peerDependenciesMeta: react: optional: true - checksum: 9865c3ff69257350ec13c17afd797de931df702419926f34a931fe136938b3e2323a54fdd162db374c6f56f3d9844c723b46097f90bf005ca582dc0820211ac5 + checksum: 10c0/c95aa5629a948f07a260430fecb8bed283a1bcfa97d8925b5edf3d4eab46155c85dec1814a00db4206a6de8323803b3d8bf74665c97caf34bb229a403f5b03d7 languageName: node linkType: hard @@ -3226,7 +3226,7 @@ __metadata: dependencies: "@storybook/global": "npm:^5.0.0" tiny-invariant: "npm:^1.3.1" - checksum: 098e3ac71ca5467cfc96af6e4e9cc5b4ba6ff4dce910d2823ab659ed764d38df1a5ecec293ed9ef5ea04b27c11a1f6962e1d658dc48363fe7aa0ef0d569ac47d + checksum: 10c0/18c26fd08c6b369ae74cbea4447ae0791efb6968875223b12e84021cf1c7a48496d56c35c6b1de03603081b650c3e4b54530b8704b68467bc667cbf550623ef9 languageName: node linkType: hard @@ -3236,14 +3236,14 @@ __metadata: dependencies: "@storybook/global": "npm:^5.0.0" ts-dedent: "npm:^2.0.0" - checksum: b6dafc517ac1cedb4709803110c87580105d2049bc1020f2d7e254c9202993d8a411c2b7e8972a97085d1240ff6af8f9d3728d4b5fa33a5052cbcc9fdc4012e7 + checksum: 10c0/840a554504c457e3dee273266ba90a7f36b7488a72644d046f0233c305d7fe3a0773848d104a3dc7d6efafc3e1b41a3fc4d6cdd7a37b3a3fe75a03fcde206efb languageName: node linkType: hard "@storybook/addon-toolbars@npm:7.6.17": version: 7.6.17 resolution: "@storybook/addon-toolbars@npm:7.6.17" - checksum: c1e051a5d9d1627aff9293c8ba33622c22851d443469227e36f018e005b8143fe5346512a3fe5ce6571af9d69ae051d3254a81fa7ed7f24b115d514dcd901eac + checksum: 10c0/af4453848c29ab8edb0cf6ca42ff14750841eaf3b523920620e42c27c0f07574a83c0dfe75f6a0de1846178aafb6833d59cef7faa7268777c24ed490da647814 languageName: node linkType: hard @@ -3252,7 +3252,7 @@ __metadata: resolution: "@storybook/addon-viewport@npm:7.6.17" dependencies: memoizerific: "npm:^1.11.3" - checksum: 96e7648fff610d9c8233103e7285d15cd3a585049907ef11f0c714f6c6e721bda41a85963ec12e18ffe4a697801a340767f10d3842251c7db0edc5692dc8c14b + checksum: 10c0/d888954f45ab358189cf0172e1c9b8a1bd2b68aa99d5d6518abe7fc355bbfeb91cc1c21c64e461994f5987652d05944aaa270366e22475eaeccadc701419b0d7 languageName: node linkType: hard @@ -3286,7 +3286,7 @@ __metadata: peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - checksum: 82667ec9f5b2a812658b30974a98bd2c06b80c850f0cc310f5ff41d00d63ee5087af84de2e9619ba2b641b39c4f9cfa54f831fa8e11983b23869c4ce657f57b7 + checksum: 10c0/f38233c935679345b4893d3d75b38ca8e74f3749b1f42a2356b61754bf1886cde8565546cdf53217335c8318506c56954aee7cc23c627b06f2d8c3b842d5d12b languageName: node linkType: hard @@ -3310,7 +3310,7 @@ __metadata: fs-extra: "npm:^11.1.0" process: "npm:^0.11.10" util: "npm:^0.12.4" - checksum: ad544213969e13bf67931026d5b6c2060617fa12b3939f37b604ff1f697ee785ff12e17ebb057bf076e7c7da2cbfdee76300d71e916d1ea3aa42242077740cec + checksum: 10c0/1b2ca77f7f3bf3c72890e949cfadc45d633fee7315ebcabfc1d6e23cd259db93114cbd9b9197597057f90c5fd60b3e72b0782a284a4f80c6efdd15f118b2c594 languageName: node linkType: hard @@ -3346,7 +3346,7 @@ __metadata: optional: true vite-plugin-glimmerx: optional: true - checksum: 1fa346b3cdd20fd25b1f114f4c9de6c035f4b895088d5f8c4805d0fb50dd399f2bb19e4a0153943edd8151365c2a05ced615eb1a1399c567d92f67782c875b1c + checksum: 10c0/eaa70e474240efd44adfdc8e7f6f57c3c1daddc966c221da981a0191fad322d78b279e954e03f20369eaa8223d11267f0a101ed3e9c16a3f7096f76fafc7388e languageName: node linkType: hard @@ -3360,7 +3360,7 @@ __metadata: qs: "npm:^6.10.0" telejson: "npm:^7.2.0" tiny-invariant: "npm:^1.3.1" - checksum: 6a3ea0b94b76a5b4e3614d5ad04207ea71eb9f67fa5f8cad51f2d9199003d8390b471669798b1f306b70b2d3834a87f0588fbd0dd50fd7ea275e18916cc4462a + checksum: 10c0/7109b67a60c656d22deb1b9b44bf0e26b565044de6ccf63589b0e52188931e2eaa11b78f7a0e1b59396f654537f79ac4264c715417d467aca602a6e80495f49e languageName: node linkType: hard @@ -3411,7 +3411,7 @@ __metadata: bin: getstorybook: ./bin/index.js sb: ./bin/index.js - checksum: 8be534cd4fafa5b0a9ffea94d945d442a6976491d4db19494640630000472929de72c037d8cb583d0afa4fe28341e73519b1e199548282aefedb91c6ffc91438 + checksum: 10c0/8d8d426a1eca5d58a4cafa8418a1c8a41736e21a89c66307d18cea98c583976d672ae0773ab53e4e38f110dad2db788bd5d8daef3970ae14834db205818713ef languageName: node linkType: hard @@ -3420,7 +3420,7 @@ __metadata: resolution: "@storybook/client-logger@npm:7.6.17" dependencies: "@storybook/global": "npm:^5.0.0" - checksum: a6e4f76eee426fcf9aae4ae660d0b81d71f60b29e36d81136901d73b79d19799df4f86f740d023c076f954d8c8e732cad8b0c91b3dffe774509a155f613d4f2c + checksum: 10c0/77ebd176e65171b10b94f65ce7f10ed8c78e162b54462f5b87604f568e747f1604b4eb62ff7a601bf02d7e72b32e373fb980dd9c688a655706e74c025ebb82f3 languageName: node linkType: hard @@ -3442,7 +3442,7 @@ __metadata: lodash: "npm:^4.17.21" prettier: "npm:^2.8.0" recast: "npm:^0.23.1" - checksum: bf8125f375308782da65323a36d89ee5096ce4f81cdfa6faea8d420329efe7b31f2c74b265e6520201e22d54f9b158d247055c3cc037e7c12dd31293f64b98e6 + checksum: 10c0/b8428203dfa551ea34b34659e5231cdc03eeb0fba2c53f801794b732515b173131bbe3df14dff9a540c18d3dfdafa7f94d11dbf34bf4dbaf03a47dd7c80d09ae languageName: node linkType: hard @@ -3463,7 +3463,7 @@ __metadata: peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - checksum: 2e288e0e66bdca16a0b74bd160b54ed55595a303143449176b776ea1345fe8e9a52717339e8fe1ed231919bde42f57bbb555d8338ef24e83cdcfe0445d2ee81b + checksum: 10c0/199421d7668a3afcce9375c567443704778b4288bed16a39f02e5c1aaa9892b4ffba829b47d5a3fa8328521f6e0c26e5e7e7beed898cc0f8f835a99ec8f125a6 languageName: node linkType: hard @@ -3473,7 +3473,7 @@ __metadata: dependencies: "@storybook/client-logger": "npm:7.6.17" "@storybook/preview-api": "npm:7.6.17" - checksum: adb1bc7d32810612b1c108ebefdba73ec156e57dcdf1078366eb2d3ae20919526e0d3cef26090ebd90244a7c67c0abc2d066ec03de32b48b6b674824e1a1b095 + checksum: 10c0/3342367bce219d46ac0c5b494688ae86aeb5c4006d98749dec2e30518850bc76a8b255611e9151f043d5141d11deb781b972c8610e98565cab4112dc86b7c1d5 languageName: node linkType: hard @@ -3504,7 +3504,7 @@ __metadata: pretty-hrtime: "npm:^1.0.3" resolve-from: "npm:^5.0.0" ts-dedent: "npm:^2.0.0" - checksum: 80ff478a8e11871a898cd6a5e26b3c939c987194b384fd22baee39275d21679522e905932a0903339e7a1f1ae355b3e778095cb7a8cf48482dccf9385455ad76 + checksum: 10c0/5be46d8f2d97dcde4a45de688278baed78185b44895825fe2f9423b70410fa88214a9709f40e7656cebe218a2c57cfa9979228e9f2b522eb47cf5af825d1133d languageName: node linkType: hard @@ -3513,7 +3513,7 @@ __metadata: resolution: "@storybook/core-events@npm:7.6.17" dependencies: ts-dedent: "npm:^2.0.0" - checksum: 07b54f574972c0a36e7356ef9908318c8132d33543b7e3a1d4f7e3cae08f4790fe8ee8dfca0b178025601c7267f2e947b15767e745f178bb8876c43498bc592f + checksum: 10c0/ab6410da3a456a61138b4a760a28b74bb9dc6f4c81de0d5ff7760b1853c6a437f8a0d05301c291f45503575d60c3be4805db4178f649eccd32c5ffd98a790250 languageName: node linkType: hard @@ -3562,7 +3562,7 @@ __metadata: util-deprecate: "npm:^1.0.2" watchpack: "npm:^2.2.0" ws: "npm:^8.2.3" - checksum: 20118728a4acf593f25fbcbbf9e4d945b2f9a6bce0e9366ad27da1bc19a9c596fe15a1e74629aff75fc04e752d22a0887f236e04a90fdf4c99f60c2851812cb6 + checksum: 10c0/b56077bea18c22151adb72c96efb1717034314b08bba5cae12b1f8a0e4135773f5c1e334ad3523dfeb578078b2d41a6091e2b0a992a110ca1859fdd89b1a4702 languageName: node linkType: hard @@ -3572,7 +3572,7 @@ __metadata: dependencies: "@storybook/csf-tools": "npm:7.6.17" unplugin: "npm:^1.3.1" - checksum: d3689b7a4d22f4b06f889a20e3d54c9f72bf1a6e5aa732cba7d60068b468745c099dbf333f7750a34309d9fcbada15fb895961f92c5e4e1279e60055df4cfef5 + checksum: 10c0/720ecbd2e845f6d6d575b8fb5b05a085ddba1eb486318a9b7d6f2ea6646fe3e62d7c9589e18aab15ce0a715c653c9d24b2e0f38117e92845e636f0410a85f76d languageName: node linkType: hard @@ -3589,7 +3589,7 @@ __metadata: fs-extra: "npm:^11.1.0" recast: "npm:^0.23.1" ts-dedent: "npm:^2.0.0" - checksum: d21fe4e09d1688465099bc3eef3088b0bde697fcf7618695a5f53d4dd50a84d2160a42734cee19cd5f3ff95c42d123bec471422b985c3f03fac45b126d638b3c + checksum: 10c0/827458c97de27127a026d6f4592ad8760f27b69dc1082251710b8067b0616bf2c6b9c13b12cbf12a8162a6528d92ca81839cf78d0d10d09978d3ccdedaca7bce languageName: node linkType: hard @@ -3598,7 +3598,7 @@ __metadata: resolution: "@storybook/csf@npm:0.0.1" dependencies: lodash: "npm:^4.17.15" - checksum: f6bb019bccd8abc14e45a85258158b7bd8cc525887ac8dc9151ed8c4908be3b5f5523da8a7a9b96ff11b13b6c1744e1a0e070560d63d836b950f595f9a5719d4 + checksum: 10c0/7b0f75763415f9147692a460b44417ee56ea9639433716a1fd4d1df4c8b0221cbc71b8da0fbed4dcecb3ccd6c7ed64be39f5c255c713539a6088a1d6488aaa24 languageName: node linkType: hard @@ -3607,14 +3607,14 @@ __metadata: resolution: "@storybook/csf@npm:0.1.2" dependencies: type-fest: "npm:^2.19.0" - checksum: 11168df65e7b6bd0e5d31e7e805c8ba80397fc190cb33424e043b72bbd85d8f826dba082503992d7f606b72484337ab9d091eca947550613e241fbef57780d4c + checksum: 10c0/b51a55292e5d2af8b1d135a28ecaa94f8860ddfedcb393adfa2cca1ee23853156066f737d8be1cb5412f572781aa525dc0b2f6e4a6f6ce805489f0149efe837c languageName: node linkType: hard "@storybook/docs-mdx@npm:^0.1.0": version: 0.1.0 resolution: "@storybook/docs-mdx@npm:0.1.0" - checksum: f830eda81606a8af86d2bbf9ed6e36c70d9e88442990684139629742f2cc5d7ddddba91338fe2fc5e9b98e74af1940a9899fde471a8bfbfec744deaa990592e7 + checksum: 10c0/e4d510f0452a7a3cb09d9617920c18b974f836299dfba38d6b2e62fbfea418d71f340b6c280a87201b1336a7221c7cc16b47794c1f8e81d01dcfa1f599343085 languageName: node linkType: hard @@ -3629,14 +3629,14 @@ __metadata: assert: "npm:^2.1.0" doctrine: "npm:^3.0.0" lodash: "npm:^4.17.21" - checksum: c2900d523b0490cb9cb1ff81764540d40064a2960fbda52ba77a8f09bd998e46440af2d629eb13a22f8de824d7991d6b44ef90f718adeb11569ce498286d3ea4 + checksum: 10c0/38473d0ce609cee38df5a8f3ad34a23ce6050e06b492cab51052ba67a2c6ecece532e0dee9f5e3cc5dee3d7105233289d05465a7ae0f5cb94fd2bbda1c267d38 languageName: node linkType: hard "@storybook/global@npm:^5.0.0": version: 5.0.0 resolution: "@storybook/global@npm:5.0.0" - checksum: 0e7b495f4fe7f36447e793926f1c0460ec07fd66f0da68e3150da5878f6043c9eeb9b41614a45c5ec0d48d5d383c59ca8f88b6dc7882a2a784ac9b20375d8edb + checksum: 10c0/8f1b61dcdd3a89584540896e659af2ecc700bc740c16909a7be24ac19127ea213324de144a141f7caf8affaed017d064fea0618d453afbe027cf60f54b4a6d0b languageName: node linkType: hard @@ -3658,35 +3658,35 @@ __metadata: store2: "npm:^2.14.2" telejson: "npm:^7.2.0" ts-dedent: "npm:^2.0.0" - checksum: 0e21042d06effabdd89a9b1edb972584a894eddb0e5551fd5f0e58e22b4fc8ef3c760f01b63f37dd22e3287f4bb16a508b1792014ce676854e32351fbb903328 + checksum: 10c0/475d0e0d37a72087c6b4f4e0bfe6ad648c27b5ea34951580b2e339f883d697ac7c4d99926db544a7c58b0aba959ad2d70129d7a7cee4bafaccd3810329a51e03 languageName: node linkType: hard "@storybook/manager@npm:7.6.17": version: 7.6.17 resolution: "@storybook/manager@npm:7.6.17" - checksum: 41dc7609235410088e20839e233bfbb833242defeb70fab951d95c72cd8a29db2e2a90a49bfd011dd944d14c33370f76c8af638969549bbea8907f14d1d867ef + checksum: 10c0/e703466e95b0fca58963ac0abec188164e6bce904471171dd360c0d63ead0183a5b242db034af63157acd42d38348984e5fe4e6414af6190234c4d5d41608cee languageName: node linkType: hard "@storybook/mdx2-csf@npm:^1.0.0": version: 1.1.0 resolution: "@storybook/mdx2-csf@npm:1.1.0" - checksum: acc368a8c8915e9487aa8e0c59241a39533d83635ddcc37fa4095cc239268a75900ec2bbfff65b573ead6ebcadcb1de0e4d70c9112faf105e0821de0a4803ca2 + checksum: 10c0/ba4496a51efae35edb3e509e488cd16066ccf0768d2dc527bbc2650d0bc0f630540985205772d63d1711d1a5dae66136a919077c90fa2ac7a02a13de43446baa languageName: node linkType: hard "@storybook/node-logger@npm:7.6.17": version: 7.6.17 resolution: "@storybook/node-logger@npm:7.6.17" - checksum: 10f9141caabf8377492470f242ec75008a680a22632ec47f5bc2e37886938eddfb3b25c6c4f757df92badd5c23ea19f8712c0448f4e620dae2ca82cdf0236efb + checksum: 10c0/7b91f10812b8ea4e8716c3b133c5a78ac419e6bcd6a6ab80117cee25287aa973c1710a74a882238697499a1eca6521c4171f4f2d2e8651fb8ef6e28b7ee167fe languageName: node linkType: hard "@storybook/postinstall@npm:7.6.17": version: 7.6.17 resolution: "@storybook/postinstall@npm:7.6.17" - checksum: bdafa7bbcf8e6bbb94e5b4590bd3af2433a31cb2bb03716e91303f41fafd20a34bca906b97a3c675ac5c7de53d2b145cf1ac070d25bdd0cec23db815bbceb1b3 + checksum: 10c0/62038e1feacfa5b9acc85afd1cdcbee3c9d780c8dbb6d2eb8cf7bfbb6a14d989fa61351958f512415761d5190075367f1f3641e104c0cec0a2c8dd056617dea6 languageName: node linkType: hard @@ -3708,14 +3708,14 @@ __metadata: synchronous-promise: "npm:^2.0.15" ts-dedent: "npm:^2.0.0" util-deprecate: "npm:^1.0.2" - checksum: 4a2b8350b3d048966313cf6a1edadf36e59af1455425a0eba05255e6ae9be3afe986045d6ee08f3b7198ad285bd8841761e50e8c48ecbdbbd993b438e64b2d58 + checksum: 10c0/b4357ee0c1f9b05feee051d0c0ed3343972277f12d9d033fcc59acfb18d336cecc4a5f0b23998011af4a92c8126e785b2931dbdbdf79787aac5756a01c32aee0 languageName: node linkType: hard "@storybook/preview@npm:7.6.17": version: 7.6.17 resolution: "@storybook/preview@npm:7.6.17" - checksum: 3359606fbe96df4fbb1da9e8644cbbf8315703fedb4576d6765a8dffc65ff6df19fa1d0898c1ecc9f3b9432e55fbf30ac75f8f2dd2df03cd8fe09f50e14d10ab + checksum: 10c0/b4a2394c4622ff7291ba1b161d537902c53ed52ae3511c65e10c934b04463f6e7e55487b88889800acab55ea1c0aa33ea2a207786f3e06eda4617787f859da6b languageName: node linkType: hard @@ -3725,7 +3725,7 @@ __metadata: peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - checksum: c1424ee03c2404e2970078719a0fd1b8c0f23bfd0c3161758bbfa5d9a69969dd4e0b78d1566058ebaa6c148a70fbb462217cfd1f05499ae84218b550a1bc2477 + checksum: 10c0/20558c58f9f0a3a00c5a1bbf2aa3517e3d318e6528f503129c99fb9ee4b604a225e79725f67e01e6e99d5d8c7db0614575dcc89af7768381afe59c976cb7cfc0 languageName: node linkType: hard @@ -3744,7 +3744,7 @@ __metadata: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 vite: ^3.0.0 || ^4.0.0 || ^5.0.0 - checksum: a8db6321421c0e67006b5dc869c5cf732e7738c58d7f0a808baa07c5b1dfd54f4ffe4f13d0e3b7d33fa27d8d24dc6a960d41490f736e7e517b3ca2bfb8ec3f9a + checksum: 10c0/2b45d09f17421d102b7599c55495b9c1688012f9761492493abf55dcfe8c23d65a4465ed6d5f96bb8e41475bbca103f4e0a285f65df85e17f8e82dce673b77dc languageName: node linkType: hard @@ -3780,7 +3780,7 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: 7582967e72448b6b23e086c616784637b91f05550b85858e530b9c1492bf1568b5f86e8e357ccc6b99186e77512e11a6644b762b9ee804736bf17b5e473f7adf + checksum: 10c0/747bb48413865701716652b9587c8c5b07cc51cb1d54125a69a4ec355f24fdcfc3a9d925a0b6268786875e97addf435e10efe737450e50eea1d19408049674e6 languageName: node linkType: hard @@ -3791,7 +3791,7 @@ __metadata: "@storybook/client-logger": "npm:7.6.17" memoizerific: "npm:^1.11.3" qs: "npm:^6.10.0" - checksum: 370157c9bed6bfdbc3605d2edb17a78c15f03c6176568b22aaa5adaa5bf814b049bb030aa24a836848411c6dd645276d105953f5efdfaac08cacf4e8b4b81312 + checksum: 10c0/8e5f354bd835319ca3c7f3ea8248914e7c22dee5815b1bdcbdbf6a9dc018f608683e482013767004105bc726d42c71f001a6c8d10c2177a511e6c0e093b7cf2d languageName: node linkType: hard @@ -3807,7 +3807,7 @@ __metadata: fetch-retry: "npm:^5.0.2" fs-extra: "npm:^11.1.0" read-pkg-up: "npm:^7.0.1" - checksum: ffbab1025e972ba77521ed6107ace7ab4ccd1ba8d5cb93e5bcc3ac45731ea8a742dee3d2a42ecbc32456fd13bf6c0f9d93707a0615106d58759e7a6dcf53736c + checksum: 10c0/2d13afef0fd73982c1efec1598583ed592bd608bbc61f9c4d96c47be9202d80043041764e00ea3b10b0636417cfbfe7b3d13c6898187a09554c8a696f89ac226 languageName: node linkType: hard @@ -3818,7 +3818,7 @@ __metadata: "@testing-library/dom": "npm:^9.0.0" "@testing-library/user-event": "npm:^14.4.0" ts-dedent: "npm:^2.2.0" - checksum: 85a8c39b432009c5ebac40569deef48b54f3b65f91bd1902bc86d4f130433bff2866c8bece9acdbd3f5cc92da5a1401f7405d11457570c96d3a30ba21c976b7b + checksum: 10c0/3179c74148c92267ea449068ce9fb00bf960dbf06654354de7869428415d16dc730a0d58b5adca7619d21e5a058ae0bf713e34c09be8bca574388ec0106c5068 languageName: node linkType: hard @@ -3833,7 +3833,7 @@ __metadata: peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 - checksum: 1fb988364b02ddcd84f18800c5f952c8d90a1b20b129821dee8a965136f14a1d15973903be7239513d9fc3b3419cbc0e79e305ecea26b15d86cc689a439a8d38 + checksum: 10c0/f18c52b236554056a97d9df23c5ecf186ffe2ef22eae3812a961b5d9beff96c2a05134ce2a39ad246c2b4ae0d5904a4e7148f7eb3d38d9c7b676d6d0a6c30595 languageName: node linkType: hard @@ -3845,7 +3845,7 @@ __metadata: "@types/babel__core": "npm:^7.0.0" "@types/express": "npm:^4.7.0" file-system-cache: "npm:2.3.0" - checksum: 6105905f8df6c7dad957c95718fc009b0cd6e96106ed3dab8c148af919464488532920449ab2fd21a0a6aea049098a4c7ab26248b6d2859e2a9d5f23149d908b + checksum: 10c0/7de04987b44b2d78d9e6ff39b54ece657b1d5266cc180a6b1a192ab394f893f8352578d9c8d0d2327e21689843a1c314f08e05eec18992d78a8d9347b0bcc72a languageName: node linkType: hard @@ -3854,7 +3854,7 @@ __metadata: resolution: "@svgr/babel-plugin-add-jsx-attribute@npm:7.0.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: ecdf432de38a6789e419758425e766651c14c78e6c537158796dfdbbb930f69fb36f11b5ad046c6fbb70d4c6ad567d6ffc45e3afa3fc5f3330234c34299e96a7 + checksum: 10c0/66714c2961f21409b0d33f0f65cf52f2496838b4ed056e98c872faa9f60754fae491ca4397717991eaa9884a0a44ae8920fd550101c9877759bd73f361a49800 languageName: node linkType: hard @@ -3863,7 +3863,7 @@ __metadata: resolution: "@svgr/babel-plugin-remove-jsx-attribute@npm:7.0.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 808ba216eea6904b2c0b664957b1ad4d3e0d9e36635ad2fca7fb2667031730cbbe067421ac0d50209f7c83dc3b6c2eff8f377780268cd1606c85603bc47b18d7 + checksum: 10c0/8b2320919d918e83d8b5fc9d194a4354e3aac98801863defe4f732954bb48b665812a5e3813f2eaf8bdb0c8d78f0a2c9934675a2df5248b99d2eb7a33688d408 languageName: node linkType: hard @@ -3872,7 +3872,7 @@ __metadata: resolution: "@svgr/babel-plugin-remove-jsx-empty-expression@npm:7.0.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: da0cae989cc99b5437c877412da6251eef57edfff8514b879c1245b6519edfda101ebc4ba2be3cce3aa9a6014050ea4413e004084d839afd8ac8ffc587a921bf + checksum: 10c0/c9d338206aade1bd280a4d45ec3f80f72b91e0a27502d38eeb68024e5fa21b0fcd20f72b6e591eb0e82cca9793012680888e66c2fd04bdcf17e79385f512e946 languageName: node linkType: hard @@ -3881,7 +3881,7 @@ __metadata: resolution: "@svgr/babel-plugin-replace-jsx-attribute-value@npm:7.0.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: e624918b545e414a1d0fbace6fc6f8c1c27dac4bf6e5fd4cbc9d8fbc9353fdf4bf6c4fe8b84fb938dfb5c0076cd2ed90b91ac60c0a7011f6e8b0cb71eabe60b3 + checksum: 10c0/9a39807bd09fb00c121e2b6952e24b90b6d9cd2318105176b93ccc4e1ec5b87b9999b96bce6f9f5e7769033583565908b440951de89ac9c3cb82ea0e0a3db686 languageName: node linkType: hard @@ -3890,7 +3890,7 @@ __metadata: resolution: "@svgr/babel-plugin-svg-dynamic-title@npm:7.0.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 3ffc97cc61573ae4fb2e013ec0572b2273f55e8e125bb6c7fc69ae9fb433a675dc879f85166979cf21e1d0f1a5e168dabf116dcc468f132e83928b66cd791e1a + checksum: 10c0/49dd7907a63bd7643e6081d0bc4daee23e3fc095b6eafc58760f5d67314eee1ea60a6788ccbe68e2457f083ea31522c847119fe48eb6e2dc20956b9bb3316cbb languageName: node linkType: hard @@ -3899,7 +3899,7 @@ __metadata: resolution: "@svgr/babel-plugin-svg-em-dimensions@npm:7.0.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 0f98ee5269983038ec8098fd1906f600199a9c7a48caca9ced1644f988cdb06acc434ec239554d8987bc2098a772c5b472f1cbb6a46dc8f39aa353aea818c963 + checksum: 10c0/9d5b569b75a612074b03aab20837dd1858f97d002b05fc9a2ec939aebbc8053e893960e264a1f2261bf0c426e4f8fa93c72313bcf7dface89fc09bc643147ebd languageName: node linkType: hard @@ -3908,7 +3908,7 @@ __metadata: resolution: "@svgr/babel-plugin-transform-react-native-svg@npm:7.0.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 20067965349a9ed5ec339d63a2983a613135ae4dac416bd754683e41fdc91671f62d1950955f4ae57ec03525d13d7b0db467d4c2eb31ec22eafbe240fc840836 + checksum: 10c0/9091bd61d787e8506965f10a946dec463881b337aa435eedb0d5423ece1d0589fa643c2e01003cbb3447d3dbdf5d937ff7bae487a3098abbbe94ac04c84022d8 languageName: node linkType: hard @@ -3917,7 +3917,7 @@ __metadata: resolution: "@svgr/babel-plugin-transform-svg-component@npm:7.0.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 0cd529df17943386d74d5d739779e377993cd531fb2607582a451dbe602eb28e051cf5260c90d7ce1578deed8be829552ea793f17e3a4e549764e67aeb983452 + checksum: 10c0/715c371bdae660fa9452083f2be6c1736d9ad516dc7134656c6e70374799de94eacda596504394aa6934aacb6da9099acd99569089220d66aaf91b34aa934c7b languageName: node linkType: hard @@ -3935,7 +3935,7 @@ __metadata: "@svgr/babel-plugin-transform-svg-component": "npm:^7.0.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 8c3ff1df1627b2db03e4755281b02e7f440323c9c9f71e3c8ebdab0e1966e24ca16686224da72a92e34b722e693bfa408aca5c62d42b02382e0c528bd3860be6 + checksum: 10c0/7d0755e2f007d4108b9ccbd7ccb2de2787ed3aa54cf873426bb211666996fe7a4fde73710a76bbdc169e1e72d7eca1dec5a6b26f14ab3124ff154ecbe387b69a languageName: node linkType: hard @@ -3947,7 +3947,7 @@ __metadata: "@svgr/babel-preset": "npm:^7.0.0" camelcase: "npm:^6.2.0" cosmiconfig: "npm:^8.1.3" - checksum: 47cd50b74dcf1be2e41ae894da425346d83c5f4caf5f268ac9f215b24936b72afaf6da1774b7963fde2634710cf5dac60c57f722a4ba7a8a4a5794b0bc515cb7 + checksum: 10c0/347617081188fc0ed5de53a8643b70949c8737a1b5baf6e4a2dd23ecb8311de111d4e76f8f005959ec66e7d53a5f8155249f6b947c8111042b978fc798f53c4c languageName: node linkType: hard @@ -3957,7 +3957,7 @@ __metadata: dependencies: "@babel/types": "npm:^7.21.3" entities: "npm:^4.4.0" - checksum: 71cf7fc641fef2f20ec5c90a77223a1c85aad7015617236d232b9738660bb4982cb60a62364010151ca1f6ec5927a89d2714a9c5a3248b83dafa056cf64496c4 + checksum: 10c0/2d6880fac9445559cc2e29f87782a52c37d2db7b99a4892f65def1e79a8239d7961c483934ff9ce2d37cb087f5b34c80ca5a51f7bc9eaceacfe0bd66e4e64373 languageName: node linkType: hard @@ -3969,7 +3969,7 @@ __metadata: "@svgr/babel-preset": "npm:^7.0.0" "@svgr/hast-util-to-babel-ast": "npm:^7.0.0" svg-parser: "npm:^2.0.4" - checksum: 009421b8e3f32bf13ebec4d47c7997106cd806c6922349871f2d9a77cd3304f55d30630dd8948ff77a9ead2ee1869ac39ad65cf95ab95b2192ef21d5704bd367 + checksum: 10c0/bd649a306b83bc355315265046461cfa089c81604785b081fe0ccffd0112dc8bfad1e19d8e042d85339792458ab2e9022f8bf29fdd64bfea90718a40553ce00e languageName: node linkType: hard @@ -3985,7 +3985,7 @@ __metadata: dom-accessibility-api: "npm:^0.5.9" lz-string: "npm:^1.5.0" pretty-format: "npm:^27.0.2" - checksum: 510da752ea76f4a10a0a4e3a77917b0302cf03effe576cd3534cab7e796533ee2b0e9fb6fb11b911a1ebd7c70a0bb6f235bf4f816c9b82b95b8fe0cddfd10975 + checksum: 10c0/147da340e8199d7f98f3a4ad8aa22ed55b914b83957efa5eb22bfea021a979ebe5a5182afa9c1e5b7a5f99a7f6744a5a4d9325ae46ec3b33b5a15aed8750d794 languageName: node linkType: hard @@ -4018,7 +4018,7 @@ __metadata: optional: true vitest: optional: true - checksum: 7ee1e51caffad032734a4a43a00bf72d49080cf1bbf53021b443e91c7fa3762a66f55ce68f1c6643590fe66fbc4df92142659b8cf17c92166a3fb22691987e0d + checksum: 10c0/e7eba527b34ce30cde94424d2ec685bdfed51daaafb7df9b68b51aec6052e99a50c8bfe654612dacdf857a1eb81d68cf294fc89de558ee3a992bf7a6019fffcc languageName: node linkType: hard @@ -4032,7 +4032,7 @@ __metadata: peerDependencies: react: ^18.0.0 react-dom: ^18.0.0 - checksum: e02b2f32ae79665a79fc4d8ee053fd3832bfcd4753aa1dba05cdece1a9f59c72a0fae91e0a9387597dcb686d631a722729f2878e38dc95e6f23b291ad8d09b6c + checksum: 10c0/83b35cf8bf5640f1b63b32223ebc75799dc1a8e034d819120b26838fba0b0ab10bdbe6ad07dd8ae8287365f2b0c52dc9892a6fa11bb24d3e63ad97dfb7f2f296 languageName: node linkType: hard @@ -4041,28 +4041,28 @@ __metadata: resolution: "@testing-library/user-event@npm:14.5.2" peerDependencies: "@testing-library/dom": ">=7.21.4" - checksum: 49821459d81c6bc435d97128d6386ca24f1e4b3ba8e46cb5a96fe3643efa6e002d88c1b02b7f2ec58da593e805c59b78d7fdf0db565c1f02ba782f63ee984040 + checksum: 10c0/68a0c2aa28a3c8e6eb05cafee29705438d7d8a9427423ce5064d44f19c29e89b5636de46dd2f28620fb10abba75c67130185bbc3aa23ac1163a227a5f36641e1 languageName: node linkType: hard "@tootallnate/once@npm:2": version: 2.0.0 resolution: "@tootallnate/once@npm:2.0.0" - checksum: ad87447820dd3f24825d2d947ebc03072b20a42bfc96cbafec16bff8bbda6c1a81fcb0be56d5b21968560c5359a0af4038a68ba150c3e1694fe4c109a063bed8 + checksum: 10c0/073bfa548026b1ebaf1659eb8961e526be22fa77139b10d60e712f46d2f0f05f4e6c8bec62a087d41088ee9e29faa7f54838568e475ab2f776171003c3920858 languageName: node linkType: hard "@trysound/sax@npm:0.2.0": version: 0.2.0 resolution: "@trysound/sax@npm:0.2.0" - checksum: 7379713eca480ac0d9b6c7b063e06b00a7eac57092354556c81027066eb65b61ea141a69d0cc2e15d32e05b2834d4c9c2184793a5e36bbf5daf05ee5676af18c + checksum: 10c0/44907308549ce775a41c38a815f747009ac45929a45d642b836aa6b0a536e4978d30b8d7d680bbd116e9dd73b7dbe2ef0d1369dcfc2d09e83ba381e485ecbe12 languageName: node linkType: hard "@types/aria-query@npm:^5.0.1": version: 5.0.4 resolution: "@types/aria-query@npm:5.0.4" - checksum: c0084c389dc030daeaf0115a92ce43a3f4d42fc8fef2d0e22112d87a42798d4a15aac413019d4a63f868327d52ad6740ab99609462b442fe6b9286b172d2e82e + checksum: 10c0/dc667bc6a3acc7bba2bccf8c23d56cb1f2f4defaa704cfef595437107efaa972d3b3db9ec1d66bc2711bfc35086821edd32c302bffab36f2e79b97f312069f08 languageName: node linkType: hard @@ -4075,7 +4075,7 @@ __metadata: "@types/babel__generator": "npm:*" "@types/babel__template": "npm:*" "@types/babel__traverse": "npm:*" - checksum: c32838d280b5ab59d62557f9e331d3831f8e547ee10b4f85cb78753d97d521270cebfc73ce501e9fb27fe71884d1ba75e18658692c2f4117543f0fc4e3e118b3 + checksum: 10c0/bdee3bb69951e833a4b811b8ee9356b69a61ed5b7a23e1a081ec9249769117fa83aaaf023bb06562a038eb5845155ff663e2d5c75dd95c1d5ccc91db012868ff languageName: node linkType: hard @@ -4084,7 +4084,7 @@ __metadata: resolution: "@types/babel__generator@npm:7.6.8" dependencies: "@babel/types": "npm:^7.0.0" - checksum: b53c215e9074c69d212402990b0ca8fa57595d09e10d94bda3130aa22b55d796e50449199867879e4ea0ee968f3a2099e009cfb21a726a53324483abbf25cd30 + checksum: 10c0/f0ba105e7d2296bf367d6e055bb22996886c114261e2cb70bf9359556d0076c7a57239d019dee42bb063f565bade5ccb46009bce2044b2952d964bf9a454d6d2 languageName: node linkType: hard @@ -4094,7 +4094,7 @@ __metadata: dependencies: "@babel/parser": "npm:^7.1.0" "@babel/types": "npm:^7.0.0" - checksum: d7a02d2a9b67e822694d8e6a7ddb8f2b71a1d6962dfd266554d2513eefbb205b33ca71a0d163b1caea3981ccf849211f9964d8bd0727124d18ace45aa6c9ae29 + checksum: 10c0/cc84f6c6ab1eab1427e90dd2b76ccee65ce940b778a9a67be2c8c39e1994e6f5bbc8efa309f6cea8dc6754994524cd4d2896558df76d92e7a1f46ecffee7112b languageName: node linkType: hard @@ -4103,7 +4103,7 @@ __metadata: resolution: "@types/babel__traverse@npm:7.20.5" dependencies: "@babel/types": "npm:^7.20.7" - checksum: f0352d537448e1e37f27e6bb8c962d7893720a92fde9d8601a68a93dbc14e15c088b4c0c8f71021d0966d09fba802ef3de11fdb6766c33993f8cf24f1277c6a9 + checksum: 10c0/033abcb2f4c084ad33e30c3efaad82161240f351e3c71b6154ed289946b33b363696c0fbd42502b68e4582a87413c418321f40eb1ea863e34fe525641345e05b languageName: node linkType: hard @@ -4113,7 +4113,7 @@ __metadata: dependencies: "@types/connect": "npm:*" "@types/node": "npm:*" - checksum: 1e251118c4b2f61029cc43b0dc028495f2d1957fe8ee49a707fb940f86a9bd2f9754230805598278fe99958b49e9b7e66eec8ef6a50ab5c1f6b93e1ba2aaba82 + checksum: 10c0/aebeb200f25e8818d8cf39cd0209026750d77c9b85381cdd8deeb50913e4d18a1ebe4b74ca9b0b4d21952511eeaba5e9fbbf739b52731a2061e206ec60d568df languageName: node linkType: hard @@ -4122,7 +4122,7 @@ __metadata: resolution: "@types/connect@npm:3.4.38" dependencies: "@types/node": "npm:*" - checksum: 7eb1bc5342a9604facd57598a6c62621e244822442976c443efb84ff745246b10d06e8b309b6e80130026a396f19bf6793b7cecd7380169f369dac3bfc46fb99 + checksum: 10c0/2e1cdba2c410f25649e77856505cd60223250fa12dff7a503e492208dbfdd25f62859918f28aba95315251fd1f5e1ffbfca1e25e73037189ab85dd3f8d0a148c languageName: node linkType: hard @@ -4131,49 +4131,49 @@ __metadata: resolution: "@types/cross-spawn@npm:6.0.6" dependencies: "@types/node": "npm:*" - checksum: b4172927cd1387cf037c3ade785ef46c87537b7bc2803d7f6663b4904d0c5d6f726415d1adb2fee4fecb21746738f11336076449265d46be4ce110cc3a8c8436 + checksum: 10c0/e3d476bb6b3a54a8934a97fe6ee4bd13e2e5eb29073929a4be76a52466602ffaea420b20774ffe8503f9fa24f3ae34817e95e7f625689fb0d1c10404f5b2889c languageName: node linkType: hard "@types/detect-port@npm:^1.3.0": version: 1.3.5 resolution: "@types/detect-port@npm:1.3.5" - checksum: 923cf04c6a05af59090743baeb9948f1938ceb98c1f7ea93db7ac310210426b385aa00005d23039ebb8019a9d13e141f5246e9c733b290885018d722a4787921 + checksum: 10c0/d8dd9d0e643106a2263f530b24ffdc3409d9391c50fc5e404018ba3633947aa3777db7fb094aeb0f49a13cc998aae8889747ad9edaa02b13a2de2385f37106ef languageName: node linkType: hard "@types/doctrine@npm:^0.0.3": version: 0.0.3 resolution: "@types/doctrine@npm:0.0.3" - checksum: 398c30efc903a750c71166c7385d763c98605723dfae23f0134d6de4d365a8f0a5585a0fe6f959569ff33646e7f43fa83bacb5f2a4d5929cd0f6163d06e4f6b3 + checksum: 10c0/566dcdc988c97ff01d14493ceb2223643347f07cf0a88c86cd7cb7c2821cfc837fd39295e6809a29614fdfdc6c4e981408155ca909b2e5da5d947af939b6c966 languageName: node linkType: hard "@types/doctrine@npm:^0.0.9": version: 0.0.9 resolution: "@types/doctrine@npm:0.0.9" - checksum: 64ef06e6eea2f4f9684d259fedbcb8bf21c954630b96ea2e04875ca42763552b0ba3b01b3dd27ec0f9ea6f8b3b0dba4965d31d5a925cd4c6225fd13a93ae9354 + checksum: 10c0/cdaca493f13c321cf0cacd1973efc0ae74569633145d9e6fc1128f32217a6968c33bea1f858275239fe90c98f3be57ec8f452b416a9ff48b8e8c1098b20fa51c languageName: node linkType: hard "@types/ejs@npm:^3.1.1": version: 3.1.5 resolution: "@types/ejs@npm:3.1.5" - checksum: 918898fd279108087722c1713e2ddb0c152ab839397946d164db8a18b5bbd732af9746373882a9bcf4843d35c6b191a8f569a7a4e51e90726d24501b39f40367 + checksum: 10c0/13d994cf0323d7e0ad33b9384914ccd3b4cd8bf282eced3649b1621b66ee7c784ac2d120a9d7b1f43d6f873518248fb8c3221b06a649b847860b9c2389a0b0ed languageName: node linkType: hard "@types/emscripten@npm:^1.39.6": version: 1.39.10 resolution: "@types/emscripten@npm:1.39.10" - checksum: 6ed97aa115761e83665897b3d5d259895db60c10d2378c1bf84f94746c3c178715004812f5f42bcfb6e439664144f812318e8175103c76806aa6eaaf126a94f0 + checksum: 10c0/c9adde9307d54efb5152931bfe99966fbe12fbd4d07663fb5cdc4cc1bd3a1f030882d50d4a27875b7b2d9713d160609e67b72e92177a021c9f4699ee5ac41035 languageName: node linkType: hard "@types/escodegen@npm:^0.0.6": version: 0.0.6 resolution: "@types/escodegen@npm:0.0.6" - checksum: 2e91554a47eb98076a3ba6e3548640e50b28a0f5b69134f99dd1e0ce5223c0a1726f23d25aafd40e4c7961d7c3c519782949aa606d58d0e7431c7fb1ec011c4c + checksum: 10c0/bbef189319c7b0386486bc7224369f118c7aedf35cc13e40ae5879b9ab4f848936f31e8eea50e71d4de72d4b7a77d9e6e9e5ceec4406c648fbc0077ede634ed5 languageName: node linkType: hard @@ -4183,21 +4183,21 @@ __metadata: dependencies: "@types/estree": "npm:*" "@types/json-schema": "npm:*" - checksum: bb8018f0c27839dd0b8c515ac4e6fac39500c36ba20007a6ecca2fe5e5f81cbecca2be8f6f649bdafd5556b8c6d5285d8506ae61cc8570f71fd4e6b07042f641 + checksum: 10c0/c2a7dd579d71e463cbc627248969878ac28b5a5b117b54f44939ee646eeec3e3d2b3a4e0061d3d7fab9d84cef302e3744f3d746e29fc86fb10a48b95b11fa5e9 languageName: node linkType: hard "@types/estree@npm:*, @types/estree@npm:^1.0.0": version: 1.0.5 resolution: "@types/estree@npm:1.0.5" - checksum: 7de6d928dd4010b0e20c6919e1a6c27b61f8d4567befa89252055fad503d587ecb9a1e3eab1b1901f923964d7019796db810b7fd6430acb26c32866d126fd408 + checksum: 10c0/b3b0e334288ddb407c7b3357ca67dbee75ee22db242ca7c56fe27db4e1a31989cb8af48a84dd401deb787fe10cc6b2ab1ee82dc4783be87ededbe3d53c79c70d languageName: node linkType: hard "@types/estree@npm:^0.0.51": version: 0.0.51 resolution: "@types/estree@npm:0.0.51" - checksum: b566c7a3fc8a81ca3d9e00a717e90b8f5d567e2476b4f6d76a20ec6da33ec28165b8f989ed8dd0c9df41405199777ec36a4f85f32a347fbc6c3f696a3128b6e7 + checksum: 10c0/a70c60d5e634e752fcd45b58c9c046ef22ad59ede4bc93ad5193c7e3b736ebd6bcd788ade59d9c3b7da6eeb0939235f011d4c59bb4fc04d8c346b76035099dd1 languageName: node linkType: hard @@ -4209,7 +4209,7 @@ __metadata: "@types/qs": "npm:*" "@types/range-parser": "npm:*" "@types/send": "npm:*" - checksum: 9079e137470e0456bb8e77ae66df9505ee12591e94860bde574cfe52c5c60bbc5bf7dd44f5689c3cbb1baf0aa84442d9a21f53dcd921d18745727293cd5a5fd6 + checksum: 10c0/12480527eef86ad9f748d785811c88e6bb89f4a76e531cf2e18f1f4f0743e46783cf4d27a939dec96aec8770c54c060d9e697bb8544ecd202098140688c3b222 languageName: node linkType: hard @@ -4221,14 +4221,14 @@ __metadata: "@types/express-serve-static-core": "npm:^4.17.33" "@types/qs": "npm:*" "@types/serve-static": "npm:*" - checksum: 7a6d26cf6f43d3151caf4fec66ea11c9d23166e4f3102edfe45a94170654a54ea08cf3103d26b3928d7ebcc24162c90488e33986b7e3a5f8941225edd5eb18c7 + checksum: 10c0/12e562c4571da50c7d239e117e688dc434db1bac8be55613294762f84fd77fbd0658ccd553c7d3ab02408f385bc93980992369dd30e2ecd2c68c358e6af8fabf languageName: node linkType: hard "@types/find-cache-dir@npm:^3.2.1": version: 3.2.1 resolution: "@types/find-cache-dir@npm:3.2.1" - checksum: bf5c4e96da40247cd9e6327f54dfccda961a0fb2d70e3c71bd05def94de4c2e6fb310fe8ecb0f04ecf5dbc52214e184b55a2337b0f87250d4ae1e2e7d58321e4 + checksum: 10c0/68059aec88ef776a689c1711a881fd91a9ce1b03dd5898ea1d2ac5d77d7b0235f21fdf210f380c13deca8b45e4499841a63aaf31fd2123af687f2c6b472f41ce languageName: node linkType: hard @@ -4238,7 +4238,7 @@ __metadata: dependencies: "@types/minimatch": "npm:*" "@types/node": "npm:*" - checksum: 6ae717fedfdfdad25f3d5a568323926c64f52ef35897bcac8aca8e19bc50c0bd84630bbd063e5d52078b2137d8e7d3c26eabebd1a2f03ff350fff8a91e79fc19 + checksum: 10c0/a8eb5d5cb5c48fc58c7ca3ff1e1ddf771ee07ca5043da6e4871e6757b4472e2e73b4cfef2644c38983174a4bc728c73f8da02845c28a1212f98cabd293ecae98 languageName: node linkType: hard @@ -4247,7 +4247,7 @@ __metadata: resolution: "@types/graceful-fs@npm:4.1.9" dependencies: "@types/node": "npm:*" - checksum: 79d746a8f053954bba36bd3d94a90c78de995d126289d656fb3271dd9f1229d33f678da04d10bce6be440494a5a73438e2e363e92802d16b8315b051036c5256 + checksum: 10c0/235d2fc69741448e853333b7c3d1180a966dd2b8972c8cbcd6b2a0c6cd7f8d582ab2b8e58219dbc62cce8f1b40aa317ff78ea2201cdd8249da5025adebed6f0b languageName: node linkType: hard @@ -4256,7 +4256,7 @@ __metadata: resolution: "@types/hast@npm:2.3.10" dependencies: "@types/unist": "npm:^2" - checksum: 41531b7fbf590b02452996fc63272479c20a07269e370bd6514982cbcd1819b4b84d3ea620f2410d1b9541a23d08ce2eeb0a592145d05e00e249c3d56700d460 + checksum: 10c0/16daac35d032e656defe1f103f9c09c341a6dc553c7ec17b388274076fa26e904a71ea5ea41fd368a6d5f1e9e53be275c80af7942b9c466d8511d261c9529c7e languageName: node linkType: hard @@ -4266,21 +4266,21 @@ __metadata: dependencies: "@types/react": "npm:*" hoist-non-react-statics: "npm:^3.3.0" - checksum: b645b062a20cce6ab1245ada8274051d8e2e0b2ee5c6bd58215281d0ec6dae2f26631af4e2e7c8abe238cdcee73fcaededc429eef569e70908f82d0cc0ea31d7 + checksum: 10c0/2a3b64bf3d9817d7830afa60ee314493c475fb09570a64e7737084cd482d2177ebdddf888ce837350bac51741278b077683facc9541f052d4bbe8487b4e3e618 languageName: node linkType: hard "@types/http-errors@npm:*": version: 2.0.4 resolution: "@types/http-errors@npm:2.0.4" - checksum: 1f3d7c3b32c7524811a45690881736b3ef741bf9849ae03d32ad1ab7062608454b150a4e7f1351f83d26a418b2d65af9bdc06198f1c079d75578282884c4e8e3 + checksum: 10c0/494670a57ad4062fee6c575047ad5782506dd35a6b9ed3894cea65830a94367bd84ba302eb3dde331871f6d70ca287bfedb1b2cf658e6132cd2cbd427ab56836 languageName: node linkType: hard "@types/istanbul-lib-coverage@npm:*, @types/istanbul-lib-coverage@npm:^2.0.0, @types/istanbul-lib-coverage@npm:^2.0.1": version: 2.0.6 resolution: "@types/istanbul-lib-coverage@npm:2.0.6" - checksum: 3feac423fd3e5449485afac999dcfcb3d44a37c830af898b689fadc65d26526460bedb889db278e0d4d815a670331796494d073a10ee6e3a6526301fe7415778 + checksum: 10c0/3948088654f3eeb45363f1db158354fb013b362dba2a5c2c18c559484d5eb9f6fd85b23d66c0a7c2fcfab7308d0a585b14dadaca6cc8bf89ebfdc7f8f5102fb7 languageName: node linkType: hard @@ -4289,7 +4289,7 @@ __metadata: resolution: "@types/istanbul-lib-report@npm:3.0.3" dependencies: "@types/istanbul-lib-coverage": "npm:*" - checksum: b91e9b60f865ff08cb35667a427b70f6c2c63e88105eadd29a112582942af47ed99c60610180aa8dcc22382fa405033f141c119c69b95db78c4c709fbadfeeb4 + checksum: 10c0/247e477bbc1a77248f3c6de5dadaae85ff86ac2d76c5fc6ab1776f54512a745ff2a5f791d22b942e3990ddbd40f3ef5289317c4fca5741bedfaa4f01df89051c languageName: node linkType: hard @@ -4298,7 +4298,7 @@ __metadata: resolution: "@types/istanbul-reports@npm:3.0.4" dependencies: "@types/istanbul-lib-report": "npm:*" - checksum: 93eb18835770b3431f68ae9ac1ca91741ab85f7606f310a34b3586b5a34450ec038c3eed7ab19266635499594de52ff73723a54a72a75b9f7d6a956f01edee95 + checksum: 10c0/1647fd402aced5b6edac87274af14ebd6b3a85447ef9ad11853a70fd92a98d35f81a5d3ea9fcb5dbb5834e800c6e35b64475e33fcae6bfa9acc70d61497c54ee languageName: node linkType: hard @@ -4308,7 +4308,7 @@ __metadata: dependencies: expect: "npm:^29.0.0" pretty-format: "npm:^29.0.0" - checksum: 312e8dcf92cdd5a5847d6426f0940829bca6fe6b5a917248f3d7f7ef5d85c9ce78ef05e47d2bbabc40d41a930e0e36db2d443d2610a9e3db9062da2d5c904211 + checksum: 10c0/25fc8e4c611fa6c4421e631432e9f0a6865a8cb07c9815ec9ac90d630271cad773b2ee5fe08066f7b95bebd18bb967f8ce05d018ee9ab0430f9dfd1d84665b6f languageName: node linkType: hard @@ -4319,63 +4319,63 @@ __metadata: "@types/node": "npm:*" "@types/tough-cookie": "npm:*" parse5: "npm:^7.0.0" - checksum: 15fbb9a0bfb4a5845cf6e795f2fd12400aacfca53b8c7e5bca4a3e5e8fa8629f676327964d64258aefb127d2d8a2be86dad46359efbfca0e8c9c2b790e7f8a88 + checksum: 10c0/3d4b2a3eab145674ee6da482607c5e48977869109f0f62560bf91ae1a792c9e847ac7c6aaf243ed2e97333cb3c51aef314ffa54a19ef174b8f9592dfcb836b25 languageName: node linkType: hard "@types/json-schema@npm:*, @types/json-schema@npm:^7.0.12, @types/json-schema@npm:^7.0.15, @types/json-schema@npm:^7.0.9": version: 7.0.15 resolution: "@types/json-schema@npm:7.0.15" - checksum: 1a3c3e06236e4c4aab89499c428d585527ce50c24fe8259e8b3926d3df4cfbbbcf306cfc73ddfb66cbafc973116efd15967020b0f738f63e09e64c7d260519e7 + checksum: 10c0/a996a745e6c5d60292f36731dd41341339d4eeed8180bb09226e5c8d23759067692b1d88e5d91d72ee83dfc00d3aca8e7bd43ea120516c17922cbcb7c3e252db languageName: node linkType: hard "@types/json5@npm:^0.0.29": version: 0.0.29 resolution: "@types/json5@npm:0.0.29" - checksum: 4e5aed58cabb2bbf6f725da13421aa50a49abb6bc17bfab6c31b8774b073fa7b50d557c61f961a09a85f6056151190f8ac95f13f5b48136ba5841f7d4484ec56 + checksum: 10c0/6bf5337bc447b706bb5b4431d37686aa2ea6d07cfd6f79cc31de80170d6ff9b1c7384a9c0ccbc45b3f512bae9e9f75c2e12109806a15331dc94e8a8db6dbb4ac languageName: node linkType: hard "@types/lodash@npm:^4.14.167": version: 4.14.202 resolution: "@types/lodash@npm:4.14.202" - checksum: 1bb9760a5b1dda120132c4b987330d67979c95dbc22612678682cd61b00302e190f4207228f3728580059cdab5582362262e3819aea59960c1017bd2b9fb26f6 + checksum: 10c0/6064d43c8f454170841bd67c8266cc9069d9e570a72ca63f06bceb484cb4a3ee60c9c1f305c1b9e3a87826049fd41124b8ef265c4dd08b00f6766609c7fe9973 languageName: node linkType: hard "@types/mdx@npm:^2.0.0": version: 2.0.11 resolution: "@types/mdx@npm:2.0.11" - checksum: 54d1ac0dc6c1c8d68f7537ecfab415767f34b4eee9d74f1d302b217307fb72bc976bf2616fdca654bdb01c1e4d152fb094b52f6502d8f6d6a063d3b9a8f9b81a + checksum: 10c0/8e60d9e1adb06854f25ac327ec340763b5867ce65ba5635ae6b24db6bda36d64655c5ee8a2f06bbc246199bcfd41cc3c8a4a95786c97a7befb3c28c7f134ffe1 languageName: node linkType: hard "@types/mime-types@npm:^2.1.0": version: 2.1.4 resolution: "@types/mime-types@npm:2.1.4" - checksum: f8c521c54ee0c0b9f90a65356a80b1413ed27ccdc94f5c7ebb3de5d63cedb559cd2610ea55b4100805c7349606a920d96e54f2d16b2f0afa6b7cd5253967ccc9 + checksum: 10c0/a10d57881d14a053556b3d09292de467968d965b0a06d06732c748da39b3aa569270b5b9f32529fd0e9ac1e5f3b91abb894f5b1996373254a65cb87903c86622 languageName: node linkType: hard "@types/mime@npm:*": version: 3.0.4 resolution: "@types/mime@npm:3.0.4" - checksum: a6139c8e1f705ef2b064d072f6edc01f3c099023ad7c4fce2afc6c2bf0231888202adadbdb48643e8e20da0ce409481a49922e737eca52871b3dc08017455843 + checksum: 10c0/db478bc0f99e40f7b3e01d356a9bdf7817060808a294978111340317bcd80ca35382855578c5b60fbc84ae449674bd9bb38427b18417e1f8f19e4f72f8b242cd languageName: node linkType: hard "@types/mime@npm:^1": version: 1.3.5 resolution: "@types/mime@npm:1.3.5" - checksum: e29a5f9c4776f5229d84e525b7cd7dd960b51c30a0fb9a028c0821790b82fca9f672dab56561e2acd9e8eed51d431bde52eafdfef30f643586c4162f1aecfc78 + checksum: 10c0/c2ee31cd9b993804df33a694d5aa3fa536511a49f2e06eeab0b484fef59b4483777dbb9e42a4198a0809ffbf698081fdbca1e5c2218b82b91603dfab10a10fbc languageName: node linkType: hard "@types/minimatch@npm:*": version: 5.1.2 resolution: "@types/minimatch@npm:5.1.2" - checksum: 94db5060d20df2b80d77b74dd384df3115f01889b5b6c40fa2dfa27cfc03a68fb0ff7c1f2a0366070263eb2e9d6bfd8c87111d4bc3ae93c3f291297c1bf56c85 + checksum: 10c0/83cf1c11748891b714e129de0585af4c55dd4c2cafb1f1d5233d79246e5e1e19d1b5ad9e8db449667b3ffa2b6c80125c429dbee1054e9efb45758dbc4e118562 languageName: node linkType: hard @@ -4385,7 +4385,7 @@ __metadata: dependencies: "@types/node": "npm:*" form-data: "npm:^4.0.0" - checksum: c416df8f182ec3826278ea42557fda08f169a48a05e60722d9c8edd4e5b2076ae281c6b6601ad406035b7201f885b0257983b61c26b3f9eb0f41192a807b5de5 + checksum: 10c0/5283d4e0bcc37a5b6d8e629aee880a4ffcfb33e089f4b903b2981b19c623972d1e64af7c3f9540ab990f0f5c89b9b5dda19c5bcb37a8e177079e93683bfd2f49 languageName: node linkType: hard @@ -4394,7 +4394,7 @@ __metadata: resolution: "@types/node@npm:20.11.21" dependencies: undici-types: "npm:~5.26.4" - checksum: a31ecc6a3c615bca310ffe7dea23613153ff9e1e175c09d14198402b2cef9b1bb1bf3912aff6ffc6cb01b99a025ec6dd6474c797bfb0aaf83daf4edaea063760 + checksum: 10c0/52b1cdfe8b14a67ab98c01b2e9621994b34a2537368e108fb925121a9d5958eb7344a2fb81ff36964932d5e5a093de8897f021bded10cad3536fd31e932b3000 languageName: node linkType: hard @@ -4403,49 +4403,49 @@ __metadata: resolution: "@types/node@npm:18.19.19" dependencies: undici-types: "npm:~5.26.4" - checksum: ea0f6be1f028054d4a3ecb672af7f52856dcc6e37cbe4733e8eb244084fdb50f2953bae730a2ad449c37d449e49d97deb4b17911705bc8adaf2651dc88e233e4 + checksum: 10c0/125bfcd654c7d205d4d6cb39b8d07541c032aa20f6aa12a564c3ec0988d8d30dda156c8dfc357cfad56963f4138bd338e491f5d13d4259931ac12d76259620fe languageName: node linkType: hard "@types/normalize-package-data@npm:^2.4.0": version: 2.4.4 resolution: "@types/normalize-package-data@npm:2.4.4" - checksum: 65dff72b543997b7be8b0265eca7ace0e34b75c3e5fee31de11179d08fa7124a7a5587265d53d0409532ecb7f7fba662c2012807963e1f9b059653ec2c83ee05 + checksum: 10c0/aef7bb9b015883d6f4119c423dd28c4bdc17b0e8a0ccf112c78b4fe0e91fbc4af7c6204b04bba0e199a57d2f3fbbd5b4a14bf8739bf9d2a39b2a0aad545e0f86 languageName: node linkType: hard "@types/pretty-hrtime@npm:^1.0.0": version: 1.0.3 resolution: "@types/pretty-hrtime@npm:1.0.3" - checksum: 288061dff992c8107d5c7b5a1277bbb0a314a27eb10087dea628a08fa37694a655191a69e25a212c95e61e498363c48ad9e281d23964a448f6c14100a6be0910 + checksum: 10c0/e4c22475c588be982b398dee9ac0b05b21078bc26581819290a4901c5b269bcaa04cae0e61e012d412e811b0897c9dab316db064208914df2f0ed0960fc5306b languageName: node linkType: hard "@types/prismjs@npm:^1.0.0": version: 1.26.3 resolution: "@types/prismjs@npm:1.26.3" - checksum: 4bd55230ffc0b2b16f4008be3a7f1d7c6b32dd3bed8006e64d24fb22c44fc7e300dac77b856f732803ccdc9a3472b2c0ee7776cad048843c47d608c41a89b6a6 + checksum: 10c0/3e8a64bcf0ab5f9a47ec2590938c5a8a20ac849b4949a95ed96e73e64cb890fc56e9c9b724286914717458267b28405f965709e1b9f80db5d68817a7ce5a18a9 languageName: node linkType: hard "@types/prop-types@npm:*": version: 15.7.11 resolution: "@types/prop-types@npm:15.7.11" - checksum: 7519ff11d06fbf6b275029fe03fff9ec377b4cb6e864cac34d87d7146c7f5a7560fd164bdc1d2dbe00b60c43713631251af1fd3d34d46c69cd354602bc0c7c54 + checksum: 10c0/e53423cf9d510515ef8b47ff42f4f1b65a7b7b37c8704e2dbfcb9a60defe0c0e1f3cb1acfdeb466bad44ca938d7c79bffdd51b48ffb659df2432169d0b27a132 languageName: node linkType: hard "@types/qs@npm:*, @types/qs@npm:6.9.12, @types/qs@npm:^6.9.5": version: 6.9.12 resolution: "@types/qs@npm:6.9.12" - checksum: 76be8068091058987bb49aca59e9714ff856661cdc2340499f9d502c78950ac08e7ecbca256c8a72c4c83714bce30e6aaad13f9f739e8c0c436c0eedb2a2627c + checksum: 10c0/21a74f2b78d0839cee37f1a632f3361352f7dceac9edffd117227a695a13e58e18c138aac1f29403f2408221e678f538ca0b37d55012f8bba96d55905edbfe82 languageName: node linkType: hard "@types/range-parser@npm:*": version: 1.2.7 resolution: "@types/range-parser@npm:1.2.7" - checksum: 95640233b689dfbd85b8c6ee268812a732cf36d5affead89e806fe30da9a430767af8ef2cd661024fd97e19d61f3dec75af2df5e80ec3bea000019ab7028629a + checksum: 10c0/361bb3e964ec5133fa40644a0b942279ed5df1949f21321d77de79f48b728d39253e5ce0408c9c17e4e0fd95ca7899da36841686393b9f7a1e209916e9381a3c languageName: node linkType: hard @@ -4454,7 +4454,7 @@ __metadata: resolution: "@types/react-copy-to-clipboard@npm:5.0.7" dependencies: "@types/react": "npm:*" - checksum: adc2970c8756e648daa06e294c422df3dc076a784344ab2ecb78a17ebd7e8e3dfd7f31e68c24267de4815cdeec573a743d952a308b45b8380f6b7912a9a8b911 + checksum: 10c0/33bea4549fa263b597d0dedb3807f99286d8ccf59adb370e3d82d1c9075195925a343982abd73c63cc47854a7240ddae79873a5cb3590c9b33c1b65bf9d07689 languageName: node linkType: hard @@ -4463,7 +4463,7 @@ __metadata: resolution: "@types/react-dom@npm:18.2.19" dependencies: "@types/react": "npm:*" - checksum: 98eb760ce78f1016d97c70f605f0b1a53873a548d3c2192b40c897f694fd9c8bb12baeada16581a9c7b26f5022c1d2613547be98284d8f1b82d1611b1e3e7df0 + checksum: 10c0/88d7c6daa4659f661d0c97985d9fca492f24b421a34bb614dcd94c343aed7bea121463149e97fb01ecaa693be17b7d1542cf71ddb1705f3889a81eb2639a88aa languageName: node linkType: hard @@ -4472,7 +4472,7 @@ __metadata: resolution: "@types/react-syntax-highlighter@npm:15.5.11" dependencies: "@types/react": "npm:*" - checksum: 9074bc6964d26a9515182b0644536e8ed9482fc04986890c91b4aabf818a6008e121989309a3f5399f05b125180b35fbda1d0f3dd6ce2869d50b536c4c78ac05 + checksum: 10c0/b091ac86d72fcc27ff77007577cc64f604ed9de5b7cc3f986301fe7fa7a2a42d1347fb0bf19ff2223c152f9468a42b796f44e97fe526073ee168a3ee64266518 languageName: node linkType: hard @@ -4483,28 +4483,28 @@ __metadata: "@types/prop-types": "npm:*" "@types/scheduler": "npm:*" csstype: "npm:^3.0.2" - checksum: 5f2f6091623f13375a5bbc7e5c222cd212b5d6366ead737b76c853f6f52b314db24af5ae3f688d2d49814c668c216858a75433f145311839d8989d46bb3cbecf + checksum: 10c0/e1d8763259c75ebcdf241dbbbfb7e8f606a8abdc98b1acf1a23e741681ebb20e82b490402a32ce10b81ce7e51f00f8009a162a72136333613b4f247549a3ab8d languageName: node linkType: hard "@types/resolve@npm:^1.20.2": version: 1.20.6 resolution: "@types/resolve@npm:1.20.6" - checksum: dc35f5517606b6687cd971c0281ac58bdee2c50c051b030f04647d3991688be2259c304ee97e5b5d4b9936072c36767eb5933b54611a407d6557972bb6fea4f6 + checksum: 10c0/a9b0549d816ff2c353077365d865a33655a141d066d0f5a3ba6fd4b28bc2f4188a510079f7c1f715b3e7af505a27374adce2a5140a3ece2a059aab3d6e1a4244 languageName: node linkType: hard "@types/scheduler@npm:*": version: 0.16.8 resolution: "@types/scheduler@npm:0.16.8" - checksum: 6c091b096daa490093bf30dd7947cd28e5b2cd612ec93448432b33f724b162587fed9309a0acc104d97b69b1d49a0f3fc755a62282054d62975d53d7fd13472d + checksum: 10c0/f86de504945b8fc41b1f391f847444d542e2e4067cf7e5d9bfeb5d2d2393d3203b1161bc0ef3b1e104d828dabfb60baf06e8d2c27e27ff7e8258e6e618d8c4ec languageName: node linkType: hard "@types/semver@npm:^7.3.12, @types/semver@npm:^7.3.4, @types/semver@npm:^7.5.0": version: 7.5.8 resolution: "@types/semver@npm:7.5.8" - checksum: 3496808818ddb36deabfe4974fd343a78101fa242c4690044ccdc3b95dcf8785b494f5d628f2f47f38a702f8db9c53c67f47d7818f2be1b79f2efb09692e1178 + checksum: 10c0/8663ff927234d1c5fcc04b33062cb2b9fcfbe0f5f351ed26c4d1e1581657deebd506b41ff7fdf89e787e3d33ce05854bc01686379b89e9c49b564c4cfa988efa languageName: node linkType: hard @@ -4514,7 +4514,7 @@ __metadata: dependencies: "@types/mime": "npm:^1" "@types/node": "npm:*" - checksum: 28320a2aa1eb704f7d96a65272a07c0bf3ae7ed5509c2c96ea5e33238980f71deeed51d3631927a77d5250e4091b3e66bce53b42d770873282c6a20bb8b0280d + checksum: 10c0/7f17fa696cb83be0a104b04b424fdedc7eaba1c9a34b06027239aba513b398a0e2b7279778af521f516a397ced417c96960e5f50fcfce40c4bc4509fb1a5883c languageName: node linkType: hard @@ -4525,56 +4525,56 @@ __metadata: "@types/http-errors": "npm:*" "@types/mime": "npm:*" "@types/node": "npm:*" - checksum: 49aa21c367fffe4588fc8c57ea48af0ea7cbadde7418bc53cde85d8bd57fd2a09a293970d9ea86e79f17a87f8adeb3e20da76aab38e1c4d1567931fa15c8af38 + checksum: 10c0/811d1a2f7e74a872195e7a013bcd87a2fb1edf07eaedcb9dcfd20c1eb4bc56ad4ea0d52141c13192c91ccda7c8aeb8a530d8a7e60b9c27f5990d7e62e0fecb03 languageName: node linkType: hard "@types/stack-utils@npm:^2.0.0": version: 2.0.3 resolution: "@types/stack-utils@npm:2.0.3" - checksum: 72576cc1522090fe497337c2b99d9838e320659ac57fa5560fcbdcbafcf5d0216c6b3a0a8a4ee4fdb3b1f5e3420aa4f6223ab57b82fef3578bec3206425c6cf5 + checksum: 10c0/1f4658385ae936330581bcb8aa3a066df03867d90281cdf89cc356d404bd6579be0f11902304e1f775d92df22c6dd761d4451c804b0a4fba973e06211e9bd77c languageName: node linkType: hard "@types/tough-cookie@npm:*": version: 4.0.5 resolution: "@types/tough-cookie@npm:4.0.5" - checksum: 01fd82efc8202670865928629697b62fe9bf0c0dcbc5b1c115831caeb073a2c0abb871ff393d7df1ae94ea41e256cb87d2a5a91fd03cdb1b0b4384e08d4ee482 + checksum: 10c0/68c6921721a3dcb40451543db2174a145ef915bc8bcbe7ad4e59194a0238e776e782b896c7a59f4b93ac6acefca9161fccb31d1ce3b3445cb6faa467297fb473 languageName: node linkType: hard "@types/triple-beam@npm:^1.3.2": version: 1.3.5 resolution: "@types/triple-beam@npm:1.3.5" - checksum: 519b6a1b30d4571965c9706ad5400a200b94e4050feca3e7856e3ea7ac00ec9903e32e9a10e2762d0f7e472d5d03e5f4b29c16c0bd8c1f77c8876c683b2231f1 + checksum: 10c0/d5d7f25da612f6d79266f4f1bb9c1ef8f1684e9f60abab251e1261170631062b656ba26ff22631f2760caeafd372abc41e64867cde27fba54fafb73a35b9056a languageName: node linkType: hard "@types/unist@npm:^2, @types/unist@npm:^2.0.0": version: 2.0.10 resolution: "@types/unist@npm:2.0.10" - checksum: e2924e18dedf45f68a5c6ccd6015cd62f1643b1b43baac1854efa21ae9e70505db94290434a23da1137d9e31eb58e54ca175982005698ac37300a1c889f6c4aa + checksum: 10c0/5f247dc2229944355209ad5c8e83cfe29419fa7f0a6d557421b1985a1500444719cc9efcc42c652b55aab63c931813c88033e0202c1ac684bcd4829d66e44731 languageName: node linkType: hard "@types/use-sync-external-store@npm:^0.0.3": version: 0.0.3 resolution: "@types/use-sync-external-store@npm:0.0.3" - checksum: 161ddb8eec5dbe7279ac971531217e9af6b99f7783213566d2b502e2e2378ea19cf5e5ea4595039d730aa79d3d35c6567d48599f69773a02ffcff1776ec2a44e + checksum: 10c0/82824c1051ba40a00e3d47964cdf4546a224e95f172e15a9c62aa3f118acee1c7518b627a34f3aa87298a2039f982e8509f92bfcc18bea7c255c189c293ba547 languageName: node linkType: hard "@types/uuid@npm:^9.0.1": version: 9.0.8 resolution: "@types/uuid@npm:9.0.8" - checksum: b8c60b7ba8250356b5088302583d1704a4e1a13558d143c549c408bf8920535602ffc12394ede77f8a8083511b023704bc66d1345792714002bfa261b17c5275 + checksum: 10c0/b411b93054cb1d4361919579ef3508a1f12bf15b5fdd97337d3d351bece6c921b52b6daeef89b62340fd73fd60da407878432a1af777f40648cbe53a01723489 languageName: node linkType: hard "@types/yargs-parser@npm:*": version: 21.0.3 resolution: "@types/yargs-parser@npm:21.0.3" - checksum: a794eb750e8ebc6273a51b12a0002de41343ffe46befef460bdbb57262d187fdf608bc6615b7b11c462c63c3ceb70abe2564c8dd8ee0f7628f38a314f74a9b9b + checksum: 10c0/e71c3bd9d0b73ca82e10bee2064c384ab70f61034bbfb78e74f5206283fc16a6d85267b606b5c22cb2a3338373586786fed595b2009825d6a9115afba36560a0 languageName: node linkType: hard @@ -4583,7 +4583,7 @@ __metadata: resolution: "@types/yargs@npm:16.0.9" dependencies: "@types/yargs-parser": "npm:*" - checksum: 8f31cbfcd5c3ac67c27e26026d8b9af0c37770fb2421b661939ba06d136f5a4fa61528a5d0f495d5802fbf1d9244b499e664d8d884e3eb3c36d556fb7c278f18 + checksum: 10c0/be24bd9a56c97ddb2964c1c18f5b9fe8271a50e100dc6945989901aae58f7ce6fb8f3a591c749a518401b6301358dbd1997e83c36138a297094feae7f9ac8211 languageName: node linkType: hard @@ -4592,7 +4592,7 @@ __metadata: resolution: "@types/yargs@npm:17.0.32" dependencies: "@types/yargs-parser": "npm:*" - checksum: 1e2b2673847011ce43607df690d392f137d95a2d6ea85aa319403eadda2ef4277365efd4982354d8843f2611ef3846c88599660aaeb537fa9ccddae83c2a89de + checksum: 10c0/2095e8aad8a4e66b86147415364266b8d607a3b95b4239623423efd7e29df93ba81bb862784a6e08664f645cc1981b25fd598f532019174cd3e5e1e689e1cccf languageName: node linkType: hard @@ -4617,7 +4617,7 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: f0b6b6e6ae2afee1df8dd2fd0c56588f9bb600468be9f255e033709a53371c6434da687e75dcb673503ef4f0416226f4ca3c94c65272828106e39b56aac87334 + checksum: 10c0/e5644a987969cbb614bbf766b6bf51341e123c774953690548610147eae0041d70e48ef42be97b68a6e2f5ed9aae37fe040e8054d35bb0568c14194ba564b2d8 languageName: node linkType: hard @@ -4635,7 +4635,7 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: 39238d37f5a5f7058371ee3882fb7cd8a4579883fc5f13fda645c151fcf8d15e4c0db3ea7ffa7915a55c82451b544e9340c0228b45b83085158cb97974112f19 + checksum: 10c0/8fcbfc8c0c86abb750173096e7ca09e1cd44aba3f6115bdb94ffb6b409b86ee23526e9d5a44935b69a6be2385893e66d8e55d92063206028dc48f70d379afcab languageName: node linkType: hard @@ -4645,7 +4645,7 @@ __metadata: dependencies: "@typescript-eslint/types": "npm:5.62.0" "@typescript-eslint/visitor-keys": "npm:5.62.0" - checksum: e827770baa202223bc0387e2fd24f630690809e460435b7dc9af336c77322290a770d62bd5284260fa881c86074d6a9fd6c97b07382520b115f6786b8ed499da + checksum: 10c0/861253235576c1c5c1772d23cdce1418c2da2618a479a7de4f6114a12a7ca853011a1e530525d0931c355a8fd237b9cd828fac560f85f9623e24054fd024726f languageName: node linkType: hard @@ -4655,7 +4655,7 @@ __metadata: dependencies: "@typescript-eslint/types": "npm:7.1.0" "@typescript-eslint/visitor-keys": "npm:7.1.0" - checksum: 3fb18de864331739c1b04fe9e3bb5d926e2fdf0d1fea2871181f68d0fb52325cbc9a5b81da58b7fe7f22d6d58d62b21c83460907146bc2f54ef0720fb3f9037f + checksum: 10c0/2fd167730bbe984343ab94739b00bd82e8cdeea9e63674b099cc5c89b420b28dbf79f40dab48022dc717db8d14ae6ee2739e0fcbdcc0321bc9da5f2602b55788 languageName: node linkType: hard @@ -4672,21 +4672,21 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: 439e6fadab3df3c21adfd651af4e605e1020c86c8c2400b0127c2ee914646bc73945b4add31ca7201cafeead261ad2958362c339ebdfc0798064d56daeb60661 + checksum: 10c0/3e3eea6c03692a643bf4ed11646b0679c6ff13baf1647d97e793f3d8c3adb83061e27a17c2a1470166a3c6c444b974bebc8096d36e0b4b3c36c289ff38bcfc9b languageName: node linkType: hard "@typescript-eslint/types@npm:5.62.0": version: 5.62.0 resolution: "@typescript-eslint/types@npm:5.62.0" - checksum: 24e8443177be84823242d6729d56af2c4b47bfc664dd411a1d730506abf2150d6c31bdefbbc6d97c8f91043e3a50e0c698239dcb145b79bb6b0c34469aaf6c45 + checksum: 10c0/7febd3a7f0701c0b927e094f02e82d8ee2cada2b186fcb938bc2b94ff6fbad88237afc304cbaf33e82797078bbbb1baf91475f6400912f8b64c89be79bfa4ddf languageName: node linkType: hard "@typescript-eslint/types@npm:7.1.0": version: 7.1.0 resolution: "@typescript-eslint/types@npm:7.1.0" - checksum: 34801a14ea1444a1707de5bd3211f0ea53afc82a3c6c4543092f123267389da607c498d1a7de554ac9f071e6ef488238728a5f279ff2abaa0cbdfaa733899b67 + checksum: 10c0/095cde3e773b7605c5e0c86642002768ced09e94def7f3c6f49a67863f47d7c8ae15413a4ab1a2407f779d1b5ede5fb3000bc98b1cf9ed7ec938acc38cac89e7 languageName: node linkType: hard @@ -4704,7 +4704,7 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: 06c975eb5f44b43bd19fadc2e1023c50cf87038fe4c0dd989d4331c67b3ff509b17fa60a3251896668ab4d7322bdc56162a9926971218d2e1a1874d2bef9a52e + checksum: 10c0/d7984a3e9d56897b2481940ec803cb8e7ead03df8d9cfd9797350be82ff765dfcf3cfec04e7355e1779e948da8f02bc5e11719d07a596eb1cb995c48a95e38cf languageName: node linkType: hard @@ -4723,7 +4723,7 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: 7dfc6fc70ff00875728ce5d85a3c5d6cb01435082b20ff9301ebe4d8e4a31a0c997282c762c636937bd66a40b4e0154e2ce98f85d888a6c46d433e9a24c46c4c + checksum: 10c0/063845dc8526dfda722d1b00960443a5158d1bce2bc39bf49bd353f33f42aa30116105a87b55a04df3eaef99c0d1c13fb987c53848dff43de6152c66dd3ba41c languageName: node linkType: hard @@ -4740,7 +4740,7 @@ __metadata: semver: "npm:^7.5.4" peerDependencies: eslint: ^8.56.0 - checksum: 26d64094d8b828ce6cfea660c95cdbd4d0193d338646fc773312093388bc781653fc1ca16977b3be5288579fe43f14c7108fc431da66dd95b6ed680ad44712a0 + checksum: 10c0/3fefd51307d0e294462106c57c4b12cd610bfe1bdcc5ca0142bfac6a5d0d37c18d14be5ec89740eb85515f5512f45219a6048df0efccd457e96f9d0612af4abf languageName: node linkType: hard @@ -4758,7 +4758,7 @@ __metadata: semver: "npm:^7.3.7" peerDependencies: eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 - checksum: 15ef13e43998a082b15f85db979f8d3ceb1f9ce4467b8016c267b1738d5e7cdb12aa90faf4b4e6dd6486c236cf9d33c463200465cf25ff997dbc0f12358550a1 + checksum: 10c0/f09b7d9952e4a205eb1ced31d7684dd55cee40bf8c2d78e923aa8a255318d97279825733902742c09d8690f37a50243f4c4d383ab16bd7aefaf9c4b438f785e1 languageName: node linkType: hard @@ -4768,7 +4768,7 @@ __metadata: dependencies: "@typescript-eslint/types": "npm:5.62.0" eslint-visitor-keys: "npm:^3.3.0" - checksum: dc613ab7569df9bbe0b2ca677635eb91839dfb2ca2c6fa47870a5da4f160db0b436f7ec0764362e756d4164e9445d49d5eb1ff0b87f4c058946ae9d8c92eb388 + checksum: 10c0/7c3b8e4148e9b94d9b7162a596a1260d7a3efc4e65199693b8025c71c4652b8042501c0bc9f57654c1e2943c26da98c0f77884a746c6ae81389fcb0b513d995d languageName: node linkType: hard @@ -4778,14 +4778,14 @@ __metadata: dependencies: "@typescript-eslint/types": "npm:7.1.0" eslint-visitor-keys: "npm:^3.4.1" - checksum: c3e98ebf166fd1854adb0e9599dc108cdbbd95f6eb099d31deae2fd1d4df8fcd8dc9c24ad4f509b961ad900b474c246f6b4b228b5711cc504106c3e0f751a11c + checksum: 10c0/9015a10e6ee2a99fc99e0f7a3f274496a813c2c239e868f29e7c0da919c825fe192fe21d3410c43d8a801e8186b51f08ef06523d2c3010570d893a1486ac293d languageName: node linkType: hard "@ungap/structured-clone@npm:^1.2.0": version: 1.2.0 resolution: "@ungap/structured-clone@npm:1.2.0" - checksum: c6fe89a505e513a7592e1438280db1c075764793a2397877ff1351721fe8792a966a5359769e30242b3cd023f2efb9e63ca2ca88019d73b564488cc20e3eab12 + checksum: 10c0/8209c937cb39119f44eb63cf90c0b73e7c754209a6411c707be08e50e29ee81356dca1a848a405c8bdeebfe2f5e4f831ad310ae1689eeef65e7445c090c6657d languageName: node linkType: hard @@ -4799,7 +4799,7 @@ __metadata: react-refresh: "npm:^0.14.0" peerDependencies: vite: ^4.2.0 - checksum: 9e7378621cb7e4dacd7277cd83b55382febdd3ff4c8a47793895caa8bfe3ce42c3ebe4e4cc49c29b53846d28c2796cf32c5727a3f9e784f7855f4421a80fcf42 + checksum: 10c0/3cf2e044fb4c95dd7b0b3092dcc6c77d6f459ddfae6b1f8ea4ee1d57b33c158072ae9f1067eb1737b6706bad644457f261c70af196f676477fdf3a3ad5653da8 languageName: node linkType: hard @@ -4814,14 +4814,14 @@ __metadata: react-refresh: "npm:^0.14.0" peerDependencies: vite: ^4.1.0-beta.0 - checksum: 54baf15170faed08c5c050ed6ac3b071e743d703f2c26ae685bf362bbaa2d8a733a98af0639f0662d474d95a6d91d008da9de8f3a51cc3e6660c4e642399cf2c + checksum: 10c0/259a92a303cd736240dc0d3282d1261339e7bbcf51c5b326868c910b35d4bd22a360334b2dafa5bfc7f3e935f2cd0fdc7ccb6ec6b519b81017c4c4812cd05290 languageName: node linkType: hard "@xmldom/xmldom@npm:^0.8.10": version: 0.8.10 resolution: "@xmldom/xmldom@npm:0.8.10" - checksum: 62400bc5e0e75b90650e33a5ceeb8d94829dd11f9b260962b71a784cd014ddccec3e603fe788af9c1e839fa4648d8c521ebd80d8b752878d3a40edabc9ce7ccf + checksum: 10c0/c7647c442502720182b0d65b17d45d2d95317c1c8c497626fe524bda79b4fb768a9aa4fae2da919f308e7abcff7d67c058b102a9d641097e9a57f0b80187851f languageName: node linkType: hard @@ -4832,7 +4832,7 @@ __metadata: tslib: "npm:^2.4.0" peerDependencies: esbuild: ">=0.10.0" - checksum: 454f521088c1fa24fda51f83ca4a50ba6e3bd147e5dee8c899e6bf24a7196186532c3abb18480e83395708ffb7238c9cac5b82595c3985ce93593b5afbd0a9f0 + checksum: 10c0/5095bc316862971add31ca1fadb0095b6ad15f25120f6ab3a06086bb6a7be93c2f3c45bff80d5976689fc89b0e9bf82bd3d410e205c852739874d32d050c4e57 languageName: node linkType: hard @@ -4842,7 +4842,7 @@ __metadata: dependencies: "@yarnpkg/libzip": "npm:^2.3.0" tslib: "npm:^1.13.0" - checksum: 29b38bd2054e3ec14677c16321a20ed69ac41d9d6f2fee7d9d7bc0a5a737e6d94add79cfa5f6ab867b5a98ab6aa2df3b53cb34f81159907cc308576a7bc08c67 + checksum: 10c0/c4fbbed99e801f17c381204e9699d9ea4fb51b14e99968985f477bdbc7b02b61e026860173f3f46bd60d9f46ae6a06f420a3edb3c02c3a45ae83779095928094 languageName: node linkType: hard @@ -4852,21 +4852,21 @@ __metadata: dependencies: "@types/emscripten": "npm:^1.39.6" tslib: "npm:^1.13.0" - checksum: 0eb147f39eab2830c29120d17e8bfba5aa15dedb940a7378070c67d4de08e9ba8d34068522e15e6b4db94ecaed4ad520e1e517588a36a348d1aa160bc36156ea + checksum: 10c0/0c2361ccb002e28463ed98541f3bdaab54f52aad6a2080666c2a9ea605ebd9cdfb7b0340b1db6f105820d05bcb803cdfb3ce755a8f6034657298c291bf884f81 languageName: node linkType: hard "abab@npm:^2.0.6": version: 2.0.6 resolution: "abab@npm:2.0.6" - checksum: ebe95d7278999e605823fc515a3b05d689bc72e7f825536e73c95ebf621636874c6de1b749b3c4bf866b96ccd4b3a2802efa313d0e45ad51a413c8c73247db20 + checksum: 10c0/0b245c3c3ea2598fe0025abf7cc7bb507b06949d51e8edae5d12c1b847a0a0c09639abcb94788332b4e2044ac4491c1e8f571b51c7826fd4b0bda1685ad4a278 languageName: node linkType: hard "abbrev@npm:^2.0.0": version: 2.0.0 resolution: "abbrev@npm:2.0.0" - checksum: ca0a54e35bea4ece0ecb68a47b312e1a9a6f772408d5bcb9051230aaa94b0460671c5b5c9cb3240eb5b7bc94c52476550eb221f65a0bbd0145bdc9f3113a6707 + checksum: 10c0/f742a5a107473946f426c691c08daba61a1d15942616f300b5d32fd735be88fef5cba24201757b6c407fd564555fb48c751cfa33519b2605c8a7aadd22baf372 languageName: node linkType: hard @@ -4876,7 +4876,7 @@ __metadata: dependencies: mime-types: "npm:~2.1.34" negotiator: "npm:0.6.3" - checksum: 67eaaa90e2917c58418e7a9b89392002d2b1ccd69bcca4799135d0c632f3b082f23f4ae4ddeedbced5aa59bcc7bdf4699c69ebed4593696c922462b7bc5744d6 + checksum: 10c0/3a35c5f5586cfb9a21163ca47a5f77ac34fa8ceb5d17d2fa2c0d81f41cbd7f8c6fa52c77e2c039acc0f4d09e71abdc51144246900f6bef5e3c4b333f77d89362 languageName: node linkType: hard @@ -4886,7 +4886,7 @@ __metadata: dependencies: acorn: "npm:^8.1.0" acorn-walk: "npm:^8.0.2" - checksum: 2a2998a547af6d0db5f0cdb90acaa7c3cbca6709010e02121fb8b8617c0fbd8bab0b869579903fde358ac78454356a14fadcc1a672ecb97b04b1c2ccba955ce8 + checksum: 10c0/7437f58e92d99292dbebd0e79531af27d706c9f272f31c675d793da6c82d897e75302a8744af13c7f7978a8399840f14a353b60cf21014647f71012982456d2b languageName: node linkType: hard @@ -4895,21 +4895,21 @@ __metadata: resolution: "acorn-jsx@npm:5.3.2" peerDependencies: acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 - checksum: d4371eaef7995530b5b5ca4183ff6f062ca17901a6d3f673c9ac011b01ede37e7a1f7f61f8f5cfe709e88054757bb8f3277dc4061087cdf4f2a1f90ccbcdb977 + checksum: 10c0/4c54868fbef3b8d58927d5e33f0a4de35f59012fe7b12cf9dfbb345fb8f46607709e1c4431be869a23fb63c151033d84c4198fa9f79385cec34fcb1dd53974c1 languageName: node linkType: hard "acorn-walk@npm:^7.2.0": version: 7.2.0 resolution: "acorn-walk@npm:7.2.0" - checksum: 4d3e186f729474aed3bc3d0df44692f2010c726582655b20a23347bef650867655521c48ada444cb4fda241ee713dcb792da363ec74c6282fa884fb7144171bb + checksum: 10c0/ff99f3406ed8826f7d6ef6ac76b7608f099d45a1ff53229fa267125da1924188dbacf02e7903dfcfd2ae4af46f7be8847dc7d564c73c4e230dfb69c8ea8e6b4c languageName: node linkType: hard "acorn-walk@npm:^8.0.2": version: 8.3.2 resolution: "acorn-walk@npm:8.3.2" - checksum: 57dbe2fd8cf744f562431775741c5c087196cd7a65ce4ccb3f3981cdfad25cd24ad2bad404997b88464ac01e789a0a61e5e355b2a84876f13deef39fb39686ca + checksum: 10c0/7e2a8dad5480df7f872569b9dccff2f3da7e65f5353686b1d6032ab9f4ddf6e3a2cb83a9b52cf50b1497fd522154dda92f0abf7153290cc79cd14721ff121e52 languageName: node linkType: hard @@ -4918,7 +4918,7 @@ __metadata: resolution: "acorn@npm:7.4.1" bin: acorn: bin/acorn - checksum: 8be2a40714756d713dfb62544128adce3b7102c6eb94bc312af196c2cc4af76e5b93079bd66b05e9ca31b35a9b0ce12171d16bc55f366cafdb794fdab9d753ec + checksum: 10c0/bd0b2c2b0f334bbee48828ff897c12bd2eb5898d03bf556dcc8942022cec795ac5bb5b6b585e2de687db6231faf07e096b59a361231dd8c9344d5df5f7f0e526 languageName: node linkType: hard @@ -4927,7 +4927,7 @@ __metadata: resolution: "acorn@npm:8.11.3" bin: acorn: bin/acorn - checksum: b688e7e3c64d9bfb17b596e1b35e4da9d50553713b3b3630cf5690f2b023a84eac90c56851e6912b483fe60e8b4ea28b254c07e92f17ef83d72d78745a8352dd + checksum: 10c0/3ff155f8812e4a746fee8ecff1f227d527c4c45655bb1fad6347c3cb58e46190598217551b1500f18542d2bbe5c87120cb6927f5a074a59166fbdd9468f0a299 languageName: node linkType: hard @@ -4981,12 +4981,13 @@ __metadata: eslint-plugin-spellcheck: "npm:0.0.20" eslint-plugin-storybook: "npm:0.8.0" html-react-parser: "npm:5.1.8" - husky: "npm:9.0.11" + husky: "npm:9.1.6" identity-obj-proxy: "npm:3.0.0" jest: "npm:29.7.0" jest-environment-jsdom: "npm:29.7.0" js-base64: "npm:3.7.7" json-schema: "npm:0.4.0" + lint-staged: "npm:15.2.10" prettier: "npm:3.2.5" prop-types: "npm:15.8.1" qs: "npm:6.11.2" @@ -5016,14 +5017,14 @@ __metadata: "address@npm:^1.0.1": version: 1.2.2 resolution: "address@npm:1.2.2" - checksum: 57d80a0c6ccadc8769ad3aeb130c1599e8aee86a8d25f671216c40df9b8489d6c3ef879bc2752b40d1458aa768f947c2d91e5b2fedfe63cf702c40afdfda9ba9 + checksum: 10c0/1c8056b77fb124456997b78ed682ecc19d2fd7ea8bd5850a2aa8c3e3134c913847c57bcae418622efd32ba858fa1e242a40a251ac31da0515664fc0ac03a047d languageName: node linkType: hard "agent-base@npm:5": version: 5.1.1 resolution: "agent-base@npm:5.1.1" - checksum: 82954db5dccdccccf52c4b7f548394a696accd259d564bfb325fb02586aaaa9df96f5d50bb19134923fe5ff9c21195e7a88871bf4e086cca9014a549a0ba2a5f + checksum: 10c0/3baa3f01072c16e3955ce7802166e576cde9831af82b262aae1c780af49c0c84e82e64ba9ef9e7d1704fe29e9f0096a78a4f998ec137360fee3cb95186f97161 languageName: node linkType: hard @@ -5032,7 +5033,7 @@ __metadata: resolution: "agent-base@npm:6.0.2" dependencies: debug: "npm:4" - checksum: 21fb903e0917e5cb16591b4d0ef6a028a54b83ac30cd1fca58dece3d4e0990512a8723f9f83130d88a41e2af8b1f7be1386fda3ea2d181bb1a62155e75e95e23 + checksum: 10c0/dc4f757e40b5f3e3d674bc9beb4f1048f4ee83af189bae39be99f57bf1f48dde166a8b0a5342a84b5944ee8e6ed1e5a9d801858f4ad44764e84957122fe46261 languageName: node linkType: hard @@ -5041,7 +5042,7 @@ __metadata: resolution: "agent-base@npm:7.1.0" dependencies: debug: "npm:^4.3.4" - checksum: f7828f991470a0cc22cb579c86a18cbae83d8a3cbed39992ab34fc7217c4d126017f1c74d0ab66be87f71455318a8ea3e757d6a37881b8d0f2a2c6aa55e5418f + checksum: 10c0/fc974ab57ffdd8421a2bc339644d312a9cca320c20c3393c9d8b1fd91731b9bbabdb985df5fc860f5b79d81c3e350daa3fcb31c5c07c0bb385aafc817df004ce languageName: node linkType: hard @@ -5051,7 +5052,7 @@ __metadata: dependencies: clean-stack: "npm:^2.0.0" indent-string: "npm:^4.0.0" - checksum: 1101a33f21baa27a2fa8e04b698271e64616b886795fd43c31068c07533c7b3facfcaf4e9e0cab3624bd88f729a592f1c901a1a229c9e490eafce411a8644b79 + checksum: 10c0/a42f67faa79e3e6687a4923050e7c9807db3848a037076f791d10e092677d65c1d2d863b7848560699f40fc0502c19f40963fb1cd1fb3d338a7423df8e45e039 languageName: node linkType: hard @@ -5063,7 +5064,7 @@ __metadata: fast-json-stable-stringify: "npm:^2.0.0" json-schema-traverse: "npm:^0.4.1" uri-js: "npm:^4.2.2" - checksum: 48d6ad21138d12eb4d16d878d630079a2bda25a04e745c07846a4ad768319533031e28872a9b3c5790fa1ec41aabdf2abed30a56e5a03ebc2cf92184b8ee306c + checksum: 10c0/41e23642cbe545889245b9d2a45854ebba51cda6c778ebced9649420d9205f2efb39cb43dbc41e358409223b1ea43303ae4839db682c848b891e4811da1a5a71 languageName: node linkType: hard @@ -5075,7 +5076,7 @@ __metadata: json-schema-traverse: "npm:^1.0.0" require-from-string: "npm:^2.0.2" uri-js: "npm:^4.2.2" - checksum: b406f3b79b5756ac53bfe2c20852471b08e122bc1ee4cde08ae4d6a800574d9cd78d60c81c69c63ff81e4da7cd0b638fafbb2303ae580d49cf1600b9059efb85 + checksum: 10c0/ac4f72adf727ee425e049bc9d8b31d4a57e1c90da8d28bcd23d60781b12fcd6fc3d68db5df16994c57b78b94eed7988f5a6b482fd376dc5b084125e20a0a622e languageName: node linkType: hard @@ -5084,21 +5085,30 @@ __metadata: resolution: "ansi-escapes@npm:4.3.2" dependencies: type-fest: "npm:^0.21.3" - checksum: 8661034456193ffeda0c15c8c564a9636b0c04094b7f78bd01517929c17c504090a60f7a75f949f5af91289c264d3e1001d91492c1bd58efc8e100500ce04de2 + checksum: 10c0/da917be01871525a3dfcf925ae2977bc59e8c513d4423368645634bf5d4ceba5401574eb705c1e92b79f7292af5a656f78c5725a4b0e1cec97c4b413705c1d50 + languageName: node + linkType: hard + +"ansi-escapes@npm:^7.0.0": + version: 7.0.0 + resolution: "ansi-escapes@npm:7.0.0" + dependencies: + environment: "npm:^1.0.0" + checksum: 10c0/86e51e36fabef18c9c004af0a280573e828900641cea35134a124d2715e0c5a473494ab4ce396614505da77638ae290ff72dd8002d9747d2ee53f5d6bbe336be languageName: node linkType: hard "ansi-regex@npm:^5.0.1": version: 5.0.1 resolution: "ansi-regex@npm:5.0.1" - checksum: 2aa4bb54caf2d622f1afdad09441695af2a83aa3fe8b8afa581d205e57ed4261c183c4d3877cee25794443fde5876417d859c108078ab788d6af7e4fe52eb66b + checksum: 10c0/9a64bb8627b434ba9327b60c027742e5d17ac69277960d041898596271d992d4d52ba7267a63ca10232e29f6107fc8a835f6ce8d719b88c5f8493f8254813737 languageName: node linkType: hard "ansi-regex@npm:^6.0.1": version: 6.0.1 resolution: "ansi-regex@npm:6.0.1" - checksum: 1ff8b7667cded1de4fa2c9ae283e979fc87036864317da86a2e546725f96406746411d0d85e87a2d12fa5abd715d90006de7fa4fa0477c92321ad3b4c7d4e169 + checksum: 10c0/cbe16dbd2c6b2735d1df7976a7070dd277326434f0212f43abf6d87674095d247968209babdaad31bb00882fa68807256ba9be340eec2f1004de14ca75f52a08 languageName: node linkType: hard @@ -5107,7 +5117,7 @@ __metadata: resolution: "ansi-styles@npm:3.2.1" dependencies: color-convert: "npm:^1.9.0" - checksum: d85ade01c10e5dd77b6c89f34ed7531da5830d2cb5882c645f330079975b716438cd7ebb81d0d6e6b4f9c577f19ae41ab55f07f19786b02f9dfd9e0377395665 + checksum: 10c0/ece5a8ef069fcc5298f67e3f4771a663129abd174ea2dfa87923a2be2abf6cd367ef72ac87942da00ce85bd1d651d4cd8595aebdb1b385889b89b205860e977b languageName: node linkType: hard @@ -5116,21 +5126,21 @@ __metadata: resolution: "ansi-styles@npm:4.3.0" dependencies: color-convert: "npm:^2.0.1" - checksum: b4494dfbfc7e4591b4711a396bd27e540f8153914123dccb4cdbbcb514015ada63a3809f362b9d8d4f6b17a706f1d7bea3c6f974b15fa5ae76b5b502070889ff + checksum: 10c0/895a23929da416f2bd3de7e9cb4eabd340949328ab85ddd6e484a637d8f6820d485f53933446f5291c3b760cbc488beb8e88573dd0f9c7daf83dccc8fe81b041 languageName: node linkType: hard "ansi-styles@npm:^5.0.0": version: 5.2.0 resolution: "ansi-styles@npm:5.2.0" - checksum: d7f4e97ce0623aea6bc0d90dcd28881ee04cba06c570b97fd3391bd7a268eedfd9d5e2dd4fdcbdd82b8105df5faf6f24aaedc08eaf3da898e702db5948f63469 + checksum: 10c0/9c4ca80eb3c2fb7b33841c210d2f20807f40865d27008d7c3f707b7f95cab7d67462a565e2388ac3285b71cb3d9bb2173de8da37c57692a362885ec34d6e27df languageName: node linkType: hard -"ansi-styles@npm:^6.1.0": +"ansi-styles@npm:^6.0.0, ansi-styles@npm:^6.1.0, ansi-styles@npm:^6.2.1": version: 6.2.1 resolution: "ansi-styles@npm:6.2.1" - checksum: 70fdf883b704d17a5dfc9cde206e698c16bcd74e7f196ab821511651aee4f9f76c9514bdfa6ca3a27b5e49138b89cb222a28caf3afe4567570139577f991df32 + checksum: 10c0/5d1ec38c123984bcedd996eac680d548f31828bd679a66db2bdf11844634dde55fec3efa9c6bb1d89056a5e79c1ac540c4c784d592ea1d25028a92227d2f2d5c languageName: node linkType: hard @@ -5140,14 +5150,14 @@ __metadata: dependencies: normalize-path: "npm:^3.0.0" picomatch: "npm:^2.0.4" - checksum: 3e044fd6d1d26545f235a9fe4d7a534e2029d8e59fa7fd9f2a6eb21230f6b5380ea1eaf55136e60cbf8e613544b3b766e7a6fa2102e2a3a117505466e3025dc2 + checksum: 10c0/57b06ae984bc32a0d22592c87384cd88fe4511b1dd7581497831c56d41939c8a001b28e7b853e1450f2bf61992dfcaa8ae2d0d161a0a90c4fb631ef07098fbac languageName: node linkType: hard "app-root-dir@npm:^1.0.2": version: 1.0.2 resolution: "app-root-dir@npm:1.0.2" - checksum: d4b1653fc60b6465b982bf5a88b12051ed2d807d70609386a809306e1c636496f53522d61fa30f9f98c71aaae34f34e1651889cf17d81a44e3dafd2859d495ad + checksum: 10c0/0225e4be7788968a82bb76df9b14b0d7f212a5c12e8c625cdc34f80548780bcbfc5f3287d0806dddd83bf9dbf9ce302e76b2887cd3a6f4be52b79df7f3aa9e7c languageName: node linkType: hard @@ -5156,14 +5166,14 @@ __metadata: resolution: "argparse@npm:1.0.10" dependencies: sprintf-js: "npm:~1.0.2" - checksum: c6a621343a553ff3779390bb5ee9c2263d6643ebcd7843227bdde6cc7adbed796eb5540ca98db19e3fd7b4714e1faa51551f8849b268bb62df27ddb15cbcd91e + checksum: 10c0/b2972c5c23c63df66bca144dbc65d180efa74f25f8fd9b7d9a0a6c88ae839db32df3d54770dcb6460cf840d232b60695d1a6b1053f599d84e73f7437087712de languageName: node linkType: hard "argparse@npm:^2.0.1": version: 2.0.1 resolution: "argparse@npm:2.0.1" - checksum: 18640244e641a417ec75a9bd38b0b2b6b95af5199aa241b131d4b2fb206f334d7ecc600bd194861610a5579084978bfcbb02baa399dbe442d56d0ae5e60dbaef + checksum: 10c0/c5640c2d89045371c7cedd6a70212a04e360fd34d6edeae32f6952c63949e3525ea77dbec0289d8213a99bbaeab5abfa860b5c12cf88a2e6cf8106e90dd27a7e languageName: node linkType: hard @@ -5172,7 +5182,7 @@ __metadata: resolution: "aria-hidden@npm:1.2.3" dependencies: tslib: "npm:^2.0.0" - checksum: cd7f8474f1bef2dadce8fc74ef6d0fa8c9a477ee3c9e49fc3698e5e93a62014140c520266ee28969d63b5ab474144fe48b6182d010feb6a223f7a73928e6660a + checksum: 10c0/46b07b7273167ad3fc2625f1ecbb43f8e6f73115c66785cbb5dcf1e2508133a43b6419d610c39676ceaeb563239efbd8974d5c0187695db8b3e8c3e11f549c2d languageName: node linkType: hard @@ -5181,7 +5191,7 @@ __metadata: resolution: "aria-query@npm:5.1.3" dependencies: deep-equal: "npm:^2.0.5" - checksum: e5da608a7c4954bfece2d879342b6c218b6b207e2d9e5af270b5e38ef8418f02d122afdc948b68e32649b849a38377785252059090d66fa8081da95d1609c0d2 + checksum: 10c0/edcbc8044c4663d6f88f785e983e6784f98cb62b4ba1e9dd8d61b725d0203e4cfca38d676aee984c31f354103461102a3d583aa4fbe4fd0a89b679744f4e5faf languageName: node linkType: hard @@ -5190,7 +5200,7 @@ __metadata: resolution: "aria-query@npm:5.3.0" dependencies: dequal: "npm:^2.0.3" - checksum: c3e1ed127cc6886fea4732e97dd6d3c3938e64180803acfb9df8955517c4943760746ffaf4020ce8f7ffaa7556a3b5f85c3769a1f5ca74a1288e02d042f9ae4e + checksum: 10c0/2bff0d4eba5852a9dd578ecf47eaef0e82cc52569b48469b0aac2db5145db0b17b7a58d9e01237706d1e14b7a1b0ac9b78e9c97027ad97679dd8f91b85da1469 languageName: node linkType: hard @@ -5200,14 +5210,14 @@ __metadata: dependencies: call-bind: "npm:^1.0.5" is-array-buffer: "npm:^3.0.4" - checksum: 53524e08f40867f6a9f35318fafe467c32e45e9c682ba67b11943e167344d2febc0f6977a17e699b05699e805c3e8f073d876f8bbf1b559ed494ad2cd0fae09e + checksum: 10c0/f5cdf54527cd18a3d2852ddf73df79efec03829e7373a8322ef5df2b4ef546fb365c19c71d6b42d641cb6bfe0f1a2f19bc0ece5b533295f86d7c3d522f228917 languageName: node linkType: hard "array-flatten@npm:1.1.1": version: 1.1.1 resolution: "array-flatten@npm:1.1.1" - checksum: e13c9d247241be82f8b4ec71d035ed7204baa82fae820d4db6948d30d3c4a9f2b3905eb2eec2b937d4aa3565200bd3a1c500480114cff649fa748747d2a50feb + checksum: 10c0/806966c8abb2f858b08f5324d9d18d7737480610f3bd5d3498aaae6eb5efdc501a884ba019c9b4a8f02ff67002058749d05548fd42fa8643f02c9c7f22198b91 languageName: node linkType: hard @@ -5220,14 +5230,14 @@ __metadata: es-abstract: "npm:^1.22.1" get-intrinsic: "npm:^1.2.1" is-string: "npm:^1.0.7" - checksum: 856a8be5d118967665936ad33ff3b07adfc50b06753e596e91fb80c3da9b8c022e92e3cc6781156d6ad95db7109b9f603682c7df2d6a529ed01f7f6b39a4a360 + checksum: 10c0/692907bd7f19d06dc58ccb761f34b58f5dc0b437d2b47a8fe42a1501849a5cf5c27aed3d521a9702667827c2c85a7e75df00a402c438094d87fc43f39ebf9b2b languageName: node linkType: hard "array-union@npm:^2.1.0": version: 2.1.0 resolution: "array-union@npm:2.1.0" - checksum: 5bee12395cba82da674931df6d0fea23c4aa4660cb3b338ced9f828782a65caa232573e6bf3968f23e0c5eb301764a382cef2f128b170a9dc59de0e36c39f98d + checksum: 10c0/429897e68110374f39b771ec47a7161fc6a8fc33e196857c0a396dc75df0b5f65e4d046674db764330b6bb66b39ef48dd7c53b6a2ee75cfb0681e0c1a7033962 languageName: node linkType: hard @@ -5240,7 +5250,7 @@ __metadata: es-abstract: "npm:^1.22.1" es-array-method-boxes-properly: "npm:^1.0.0" is-string: "npm:^1.0.7" - checksum: 3da2189afb00f95559cc73fc3c50f17a071a65bb705c0b2f2e2a2b2142781215b622442368c8b4387389b6ab251adf09ad347f9a8a4cf29d24404cc5ea1e295c + checksum: 10c0/8b70b5f866df5d90fa27aa5bfa30f5fefc44cbea94b0513699d761713658077c2a24cbf06aac5179eabddb6c93adc467af4c288b7a839c5bc5a769ee5a2d48ad languageName: node linkType: hard @@ -5253,7 +5263,7 @@ __metadata: es-abstract: "npm:^1.22.3" es-errors: "npm:^1.3.0" es-shim-unscopables: "npm:^1.0.2" - checksum: 12d7de8da619065b9d4c40550d11c13f2fbbc863c4270ef01d022f49ef16fbe9022441ee9d60b1e952853c661dd4b3e05c21e4348d4631c6d93ddf802a252296 + checksum: 10c0/b23ae35cf7621c82c20981ee110626090734a264798e781b052e534e3d61d576f03d125d92cf2e3672062bb5cc5907e02e69f2d80196a55f3cdb0197b4aa8c64 languageName: node linkType: hard @@ -5265,7 +5275,7 @@ __metadata: define-properties: "npm:^1.2.0" es-abstract: "npm:^1.22.1" es-shim-unscopables: "npm:^1.0.0" - checksum: d9d2f6f27584de92ec7995bc931103e6de722cd2498bdbfc4cba814fc3e52f056050a93be883018811f7c0a35875f5056584a0e940603a5e5934f0279896aebe + checksum: 10c0/a578ed836a786efbb6c2db0899ae80781b476200617f65a44846cb1ed8bd8b24c8821b83703375d8af639c689497b7b07277060024b9919db94ac3e10dc8a49b languageName: node linkType: hard @@ -5277,7 +5287,7 @@ __metadata: define-properties: "npm:^1.2.0" es-abstract: "npm:^1.22.1" es-shim-unscopables: "npm:^1.0.0" - checksum: 33f20006686e0cbe844fde7fd290971e8366c6c5e3380681c2df15738b1df766dd02c7784034aeeb3b037f65c496ee54de665388288edb323a2008bb550f77ea + checksum: 10c0/67b3f1d602bb73713265145853128b1ad77cc0f9b833c7e1e056b323fbeac41a4ff1c9c99c7b9445903caea924d9ca2450578d9011913191aa88cc3c3a4b54f4 languageName: node linkType: hard @@ -5290,7 +5300,7 @@ __metadata: es-abstract: "npm:^1.22.3" es-errors: "npm:^1.1.0" es-shim-unscopables: "npm:^1.0.2" - checksum: 9a5b7909a9ddd02a5f5489911766c314a11fb40f8f5106bdbedf6c21898763faeb78ba3af53f7038f288de9161d2605ad10d8b720e07f71a7ed1de49f39c0897 + checksum: 10c0/a27e1ca51168ecacf6042901f5ef021e43c8fa04b6c6b6f2a30bac3645cd2b519cecbe0bc45db1b85b843f64dc3207f0268f700b4b9fbdec076d12d432cf0865 languageName: node linkType: hard @@ -5306,7 +5316,7 @@ __metadata: get-intrinsic: "npm:^1.2.3" is-array-buffer: "npm:^3.0.4" is-shared-array-buffer: "npm:^1.0.2" - checksum: 0221f16c1e3ec7b67da870ee0e1f12b825b5f9189835392b59a22990f715827561a4f4cd5330dc7507de272d8df821be6cd4b0cb569babf5ea4be70e365a2f3d + checksum: 10c0/d32754045bcb2294ade881d45140a5e52bda2321b9e98fa514797b7f0d252c4c5ab0d1edb34112652c62fa6a9398def568da63a4d7544672229afea283358c36 languageName: node linkType: hard @@ -5319,14 +5329,14 @@ __metadata: object-is: "npm:^1.1.5" object.assign: "npm:^4.1.4" util: "npm:^0.12.5" - checksum: 6b9d813c8eef1c0ac13feac5553972e4bd180ae16000d4eb5c0ded2489188737c75a5aacefc97a985008b37502f62fe1bad34da1a7481a54bbfabec3964c8aa7 + checksum: 10c0/7271a5da883c256a1fa690677bf1dd9d6aa882139f2bed1cd15da4f9e7459683e1da8e32a203d6cc6767e5e0f730c77a9532a87b896b4b0af0dd535f668775f0 languageName: node linkType: hard "ast-types-flow@npm:^0.0.8": version: 0.0.8 resolution: "ast-types-flow@npm:0.0.8" - checksum: 85a1c24af4707871c27cfe456bd2ff7fcbe678f3d1c878ac968c9557735a171a17bdcc8c8f903ceab3fc3c49d5b3da2194e6ab0a6be7fec0e133fa028f21ba1b + checksum: 10c0/f2a0ba8055353b743c41431974521e5e852a9824870cd6fce2db0e538ac7bf4da406bbd018d109af29ff3f8f0993f6a730c9eddbd0abd031fbcb29ca75c1014e languageName: node linkType: hard @@ -5335,21 +5345,21 @@ __metadata: resolution: "ast-types@npm:0.16.1" dependencies: tslib: "npm:^2.0.1" - checksum: f569b475eb1c8cb93888cb6e7b7e36dc43fa19a77e4eb132cbff6e3eb1598ca60f850db6e60b070e5a0ee8c1559fca921dac0916e576f2f104e198793b0bdd8d + checksum: 10c0/abcc49e42eb921a7ebc013d5bec1154651fb6dbc3f497541d488859e681256901b2990b954d530ba0da4d0851271d484f7057d5eff5e07cb73e8b10909f711bf languageName: node linkType: hard "async-limiter@npm:~1.0.0": version: 1.0.1 resolution: "async-limiter@npm:1.0.1" - checksum: 2b849695b465d93ad44c116220dee29a5aeb63adac16c1088983c339b0de57d76e82533e8e364a93a9f997f28bbfc6a92948cefc120652bd07f3b59f8d75cf2b + checksum: 10c0/0693d378cfe86842a70d4c849595a0bb50dc44c11649640ca982fa90cbfc74e3cc4753b5a0847e51933f2e9c65ce8e05576e75e5e1fd963a086e673735b35969 languageName: node linkType: hard "async@npm:^3.2.3, async@npm:^3.2.5": version: 3.2.5 resolution: "async@npm:3.2.5" - checksum: 323c3615c3f0ab1ac25a6f953296bc0ac3213d5e0f1c0debdb12964e55963af288d570293c11e44f7967af58c06d2a88d0ea588c86ec0fbf62fa98037f604a0f + checksum: 10c0/1408287b26c6db67d45cb346e34892cee555b8b59e6c68e6f8c3e495cad5ca13b4f218180e871f3c2ca30df4ab52693b66f2f6ff43644760cab0b2198bda79c1 languageName: node linkType: hard @@ -5358,14 +5368,14 @@ __metadata: resolution: "asynciterator.prototype@npm:1.0.0" dependencies: has-symbols: "npm:^1.0.3" - checksum: e8ebfd9493ac651cf9b4165e9d64030b3da1d17181bb1963627b59e240cdaf021d9b59d44b827dc1dde4e22387ec04c2d0f8720cf58a1c282e34e40cc12721b3 + checksum: 10c0/fb76850e57d931ff59fd16b6cddb79b0d34fe45f400b2c3480d38892e72cd089787401687dbdb7cdb14ece402c275d3e02a648760d1489cd493527129c4c6204 languageName: node linkType: hard "asynckit@npm:^0.4.0": version: 0.4.0 resolution: "asynckit@npm:0.4.0" - checksum: 3ce727cbc78f69d6a4722517a58ee926c8c21083633b1d3fdf66fd688f6c127a53a592141bd4866f9b63240a86e9d8e974b13919450bd17fa33c2d22c4558ad8 + checksum: 10c0/d73e2ddf20c4eb9337e1b3df1a0f6159481050a5de457c55b14ea2e5cb6d90bb69e004c9af54737a5ee0917fcf2c9e25de67777bbe58261847846066ba75bc9d languageName: node linkType: hard @@ -5374,14 +5384,14 @@ __metadata: resolution: "available-typed-arrays@npm:1.0.7" dependencies: possible-typed-array-names: "npm:^1.0.0" - checksum: 6c9da3a66caddd83c875010a1ca8ef11eac02ba15fb592dc9418b2b5e7b77b645fa7729380a92d9835c2f05f2ca1b6251f39b993e0feb3f1517c74fa1af02cab + checksum: 10c0/d07226ef4f87daa01bd0fe80f8f310982e345f372926da2e5296aecc25c41cab440916bbaa4c5e1034b453af3392f67df5961124e4b586df1e99793a1374bdb2 languageName: node linkType: hard "axe-core@npm:=4.7.0": version: 4.7.0 resolution: "axe-core@npm:4.7.0" - checksum: 615c0f7722c3c9fcf353dbd70b00e2ceae234d4c17cbc839dd85c01d16797c4e4da45f8d27c6118e9e6b033fb06efd196106e13651a1b2f3a10e0f11c7b2f660 + checksum: 10c0/89ac5712b5932ac7d23398b4cb5ba081c394a086e343acc68ba49c83472706e18e0799804e8388c779dcdacc465377deb29f2714241d3fbb389cf3a6b275c9ba languageName: node linkType: hard @@ -5392,7 +5402,7 @@ __metadata: follow-redirects: "npm:^1.15.4" form-data: "npm:^4.0.0" proxy-from-env: "npm:^1.1.0" - checksum: a1932b089ece759cd261f175d9ebf4d41c8994cf0c0767cda86055c7a19bcfdade8ae3464bf4cec4c8b142f4a657dc664fb77a41855e8376cf38b86d7a86518f + checksum: 10c0/131bf8e62eee48ca4bd84e6101f211961bf6a21a33b95e5dfb3983d5a2fe50d9fffde0b57668d7ce6f65063d3dc10f2212cbcb554f75cfca99da1c73b210358d languageName: node linkType: hard @@ -5401,7 +5411,7 @@ __metadata: resolution: "axobject-query@npm:3.2.1" dependencies: dequal: "npm:^2.0.3" - checksum: 675af2548ed4ece75ad6d50cc0473cfdec7579eac77ec9861e7088d03ffb171aa697b70d2877423bee2ce16460ef62c698c6442a105612cc015719e8ea06b0bd + checksum: 10c0/f7debc2012e456139b57d888c223f6d3cb4b61eb104164a85e3d346273dd6ef0bc9a04b6660ca9407704a14a8e05fa6b6eb9d55f44f348c7210de7ffb350c3a7 languageName: node linkType: hard @@ -5410,7 +5420,7 @@ __metadata: resolution: "babel-core@npm:7.0.0-bridge.0" peerDependencies: "@babel/core": ^7.0.0-0 - checksum: 2a1cb879019dffb08d17bec36e13c3a6d74c94773f41c1fd8b14de13f149cc34b705b0a1e07b42fcf35917b49d78db6ff0c5c3b00b202a5235013d517b5c6bbb + checksum: 10c0/f57576e30267be4607d163b7288031d332cf9200ea35efe9fb33c97f834e304376774c28c1f9d6928d6733fcde7041e4010f1248a0519e7730c590d4b07b9608 languageName: node linkType: hard @@ -5427,7 +5437,7 @@ __metadata: slash: "npm:^3.0.0" peerDependencies: "@babel/core": ^7.8.0 - checksum: 8a0953bd813b3a8926008f7351611055548869e9a53dd36d6e7e96679001f71e65fd7dbfe253265c3ba6a4e630dc7c845cf3e78b17d758ef1880313ce8fba258 + checksum: 10c0/2eda9c1391e51936ca573dd1aedfee07b14c59b33dbe16ef347873ddd777bcf6e2fc739681e9e9661ab54ef84a3109a03725be2ac32cd2124c07ea4401cbe8c1 languageName: node linkType: hard @@ -5440,7 +5450,7 @@ __metadata: "@istanbuljs/schema": "npm:^0.1.2" istanbul-lib-instrument: "npm:^5.0.4" test-exclude: "npm:^6.0.0" - checksum: ffd436bb2a77bbe1942a33245d770506ab2262d9c1b3c1f1da7f0592f78ee7445a95bc2efafe619dd9c1b6ee52c10033d6c7d29ddefe6f5383568e60f31dfe8d + checksum: 10c0/1075657feb705e00fd9463b329921856d3775d9867c5054b449317d39153f8fbcebd3e02ebf00432824e647faff3683a9ca0a941325ef1afe9b3c4dd51b24beb languageName: node linkType: hard @@ -5452,7 +5462,7 @@ __metadata: "@babel/types": "npm:^7.3.3" "@types/babel__core": "npm:^7.1.14" "@types/babel__traverse": "npm:^7.0.6" - checksum: 9bfa86ec4170bd805ab8ca5001ae50d8afcb30554d236ba4a7ffc156c1a92452e220e4acbd98daefc12bf0216fccd092d0a2efed49e7e384ec59e0597a926d65 + checksum: 10c0/7e6451caaf7dce33d010b8aafb970e62f1b0c0b57f4978c37b0d457bbcf0874d75a395a102daf0bae0bd14eafb9f6e9a165ee5e899c0a4f1f3bb2e07b304ed2e languageName: node linkType: hard @@ -5465,7 +5475,7 @@ __metadata: semver: "npm:^6.3.1" peerDependencies: "@babel/core": ^7.4.0 || ^8.0.0-0 <8.0.0 - checksum: 6b5a79bdc1c43edf857fd3a82966b3c7ff4a90eee00ca8d663e0a98304d6e285a05759d64a4dbc16e04a2a5ea1f248673d8bf789711be5e694e368f19884887c + checksum: 10c0/843e7528de0e03a31a6f3837896a95f75b0b24b0294a077246282372279e974400b0bdd82399e8f9cbfe42c87ed56540fd71c33eafb7c8e8b9adac546ecc5fe5 languageName: node linkType: hard @@ -5477,7 +5487,7 @@ __metadata: core-js-compat: "npm:^3.34.0" peerDependencies: "@babel/core": ^7.4.0 || ^8.0.0-0 <8.0.0 - checksum: efdf9ba82e7848a2c66e0522adf10ac1646b16f271a9006b61a22f976b849de22a07c54c8826887114842ccd20cc9a4617b61e8e0789227a74378ab508e715cd + checksum: 10c0/b857010736c5e42e20b683973dae862448a42082fcc95b3ef188305a6864a4f94b5cbd568e49e4cd7172c6b2eace7bc403c3ba0984fbe5479474ade01126d559 languageName: node linkType: hard @@ -5488,7 +5498,7 @@ __metadata: "@babel/helper-define-polyfill-provider": "npm:^0.5.0" peerDependencies: "@babel/core": ^7.4.0 || ^8.0.0-0 <8.0.0 - checksum: 3a9b4828673b23cd648dcfb571eadcd9d3fadfca0361d0a7c6feeb5a30474e92faaa49f067a6e1c05e49b6a09812879992028ff3ef3446229ff132d6e1de7eb6 + checksum: 10c0/2aab692582082d54e0df9f9373dca1b223e65b4e7e96440160f27ed8803d417a1fa08da550f08aa3820d2010329ca91b68e2b6e9bd7aed51c93d46dfe79629bb languageName: node linkType: hard @@ -5510,7 +5520,7 @@ __metadata: "@babel/plugin-syntax-top-level-await": "npm:^7.8.3" peerDependencies: "@babel/core": ^7.0.0 - checksum: 94561959cb12bfa80867c9eeeace7c3d48d61707d33e55b4c3fdbe82fc745913eb2dbfafca62aef297421b38aadcb58550e5943f50fbcebbeefd70ce2bed4b74 + checksum: 10c0/5ba39a3a0e6c37d25e56a4fb843be632dac98d54706d8a0933f9bcb1a07987a96d55c2b5a6c11788a74063fb2534fe68c1f1dbb6c93626850c785e0938495627 languageName: node linkType: hard @@ -5522,21 +5532,21 @@ __metadata: babel-preset-current-node-syntax: "npm:^1.0.0" peerDependencies: "@babel/core": ^7.0.0 - checksum: aa4ff2a8a728d9d698ed521e3461a109a1e66202b13d3494e41eea30729a5e7cc03b3a2d56c594423a135429c37bf63a9fa8b0b9ce275298be3095a88c69f6fb + checksum: 10c0/ec5fd0276b5630b05f0c14bb97cc3815c6b31600c683ebb51372e54dcb776cff790bdeeabd5b8d01ede375a040337ccbf6a3ccd68d3a34219125945e167ad943 languageName: node linkType: hard "balanced-match@npm:^1.0.0": version: 1.0.2 resolution: "balanced-match@npm:1.0.2" - checksum: 9706c088a283058a8a99e0bf91b0a2f75497f185980d9ffa8b304de1d9e58ebda7c72c07ebf01dadedaac5b2907b2c6f566f660d62bd336c3468e960403b9d65 + checksum: 10c0/9308baf0a7e4838a82bbfd11e01b1cb0f0cf2893bc1676c27c2a8c0e70cbae1c59120c3268517a8ae7fb6376b4639ef81ca22582611dbee4ed28df945134aaee languageName: node linkType: hard "base64-js@npm:^1.3.1": version: 1.5.1 resolution: "base64-js@npm:1.5.1" - checksum: 669632eb3745404c2f822a18fc3a0122d2f9a7a13f7fb8b5823ee19d1d2ff9ee5b52c53367176ea4ad093c332fd5ab4bd0ebae5a8e27917a4105a4cfc86b1005 + checksum: 10c0/f23823513b63173a001030fae4f2dabe283b99a9d324ade3ad3d148e218134676f1ee8568c877cd79ec1c53158dcf2d2ba527a97c606618928ba99dd930102bf languageName: node linkType: hard @@ -5545,21 +5555,21 @@ __metadata: resolution: "better-opn@npm:3.0.2" dependencies: open: "npm:^8.0.4" - checksum: 24668e5a837d0d2c0edf17ad5ebcfeb00a8a5578a5eb09f7a409e1a60617cdfea40b8ebfc95e5f12d9568157930d033e6805788fcf0780413ac982c95d3745d1 + checksum: 10c0/911ef25d44da75aabfd2444ce7a4294a8000ebcac73068c04a60298b0f7c7506b60421aa4cd02ac82502fb42baaff7e4892234b51e6923eded44c5a11185f2f5 languageName: node linkType: hard "big-integer@npm:^1.6.44": version: 1.6.52 resolution: "big-integer@npm:1.6.52" - checksum: 4bc6ae152a96edc9f95020f5fc66b13d26a9ad9a021225a9f0213f7e3dc44269f423aa8c42e19d6ac4a63bb2b22140b95d10be8f9ca7a6d9aa1b22b330d1f514 + checksum: 10c0/9604224b4c2ab3c43c075d92da15863077a9f59e5d4205f4e7e76acd0cd47e8d469ec5e5dba8d9b32aa233951893b29329ca56ac80c20ce094b4a647a66abae0 languageName: node linkType: hard "binary-extensions@npm:^2.0.0": version: 2.2.0 resolution: "binary-extensions@npm:2.2.0" - checksum: ccd267956c58d2315f5d3ea6757cf09863c5fc703e50fbeb13a7dc849b812ef76e3cf9ca8f35a0c48498776a7478d7b4a0418e1e2b8cb9cb9731f2922aaad7f8 + checksum: 10c0/d73d8b897238a2d3ffa5f59c0241870043aa7471335e89ea5e1ff48edb7c2d0bb471517a3e4c5c3f4c043615caa2717b5f80a5e61e07503d51dc85cb848e665d languageName: node linkType: hard @@ -5570,7 +5580,7 @@ __metadata: buffer: "npm:^5.5.0" inherits: "npm:^2.0.4" readable-stream: "npm:^3.4.0" - checksum: b7904e66ed0bdfc813c06ea6c3e35eafecb104369dbf5356d0f416af90c1546de3b74e5b63506f0629acf5e16a6f87c3798f16233dcff086e9129383aa02ab55 + checksum: 10c0/02847e1d2cb089c9dc6958add42e3cdeaf07d13f575973963335ac0fdece563a50ac770ac4c8fa06492d2dd276f6cc3b7f08c7cd9c7a7ad0f8d388b2a28def5f languageName: node linkType: hard @@ -5590,14 +5600,14 @@ __metadata: raw-body: "npm:2.5.1" type-is: "npm:~1.6.18" unpipe: "npm:1.0.0" - checksum: 5f8d128022a2fb8b6e7990d30878a0182f300b70e46b3f9d358a9433ad6275f0de46add6d63206da3637c01c3b38b6111a7480f7e7ac2e9f7b989f6133fe5510 + checksum: 10c0/a202d493e2c10a33fb7413dac7d2f713be579c4b88343cd814b6df7a38e5af1901fc31044e04de176db56b16d9772aa25a7723f64478c20f4d91b1ac223bf3b8 languageName: node linkType: hard "boolbase@npm:^1.0.0": version: 1.0.0 resolution: "boolbase@npm:1.0.0" - checksum: 3e25c80ef626c3a3487c73dbfc70ac322ec830666c9ad915d11b701142fab25ec1e63eff2c450c74347acfd2de854ccde865cd79ef4db1683f7c7b046ea43bb0 + checksum: 10c0/e4b53deb4f2b85c52be0e21a273f2045c7b6a6ea002b0e139c744cb6f95e9ec044439a52883b0d74dedd1ff3da55ed140cfdddfed7fb0cccbed373de5dce1bcf languageName: node linkType: hard @@ -5606,7 +5616,7 @@ __metadata: resolution: "bplist-parser@npm:0.2.0" dependencies: big-integer: "npm:^1.6.44" - checksum: 15d31c1b0c7e0fb384e96349453879a33609d92d91b55a9ccee04b4be4b0645f1c823253d73326a1a23104521fbc45c2dd97fb05adf61863841b68cbb2ca7a3d + checksum: 10c0/ce79c69e0f6efe506281e7c84e3712f7d12978991675b6e3a58a295b16f13ca81aa9b845c335614a545e0af728c8311b6aa3142af76ba1cb616af9bbac5c4a9f languageName: node linkType: hard @@ -5616,7 +5626,7 @@ __metadata: dependencies: balanced-match: "npm:^1.0.0" concat-map: "npm:0.0.1" - checksum: faf34a7bb0c3fcf4b59c7808bc5d2a96a40988addf2e7e09dfbb67a2251800e0d14cd2bfc1aa79174f2f5095c54ff27f46fb1289fe2d77dac755b5eb3434cc07 + checksum: 10c0/695a56cd058096a7cb71fb09d9d6a7070113c7be516699ed361317aca2ec169f618e28b8af352e02ab4233fb54eb0168460a40dc320bab0034b36ab59aaad668 languageName: node linkType: hard @@ -5625,7 +5635,7 @@ __metadata: resolution: "brace-expansion@npm:2.0.1" dependencies: balanced-match: "npm:^1.0.0" - checksum: a61e7cd2e8a8505e9f0036b3b6108ba5e926b4b55089eeb5550cd04a471fe216c96d4fe7e4c7f995c728c554ae20ddfc4244cad10aef255e72b62930afd233d1 + checksum: 10c0/b358f2fe060e2d7a87aa015979ecea07f3c37d4018f8d6deb5bd4c229ad3a0384fe6029bb76cd8be63c81e516ee52d1a0673edbe2023d53a5191732ae3c3e49f languageName: node linkType: hard @@ -5634,14 +5644,23 @@ __metadata: resolution: "braces@npm:3.0.2" dependencies: fill-range: "npm:^7.0.1" - checksum: 966b1fb48d193b9d155f810e5efd1790962f2c4e0829f8440b8ad236ba009222c501f70185ef732fef17a4c490bb33a03b90dab0631feafbdf447da91e8165b1 + checksum: 10c0/321b4d675791479293264019156ca322163f02dc06e3c4cab33bb15cd43d80b51efef69b0930cfde3acd63d126ebca24cd0544fa6f261e093a0fb41ab9dda381 + languageName: node + linkType: hard + +"braces@npm:^3.0.3": + version: 3.0.3 + resolution: "braces@npm:3.0.3" + dependencies: + fill-range: "npm:^7.1.1" + checksum: 10c0/7c6dfd30c338d2997ba77500539227b9d1f85e388a5f43220865201e407e076783d0881f2d297b9f80951b4c957fcf0b51c1d2d24227631643c3f7c284b0aa04 languageName: node linkType: hard "browser-assert@npm:^1.2.1": version: 1.2.1 resolution: "browser-assert@npm:1.2.1" - checksum: 8b2407cd04c1ed592cf892dec35942b7d72635829221e0788c9a16c4d2afa8b7156bc9705b1c4b32c30d88136c576fda3cbcb8f494d6f865264c706ea8798d92 + checksum: 10c0/902abf999f92c9c951fdb6d7352c09eea9a84706258699655f7e7906e42daa06a1ae286398a755872740e05a6a71c43c5d1a0c0431d67a8cdb66e5d859a3fc0c languageName: node linkType: hard @@ -5650,7 +5669,7 @@ __metadata: resolution: "browserify-zlib@npm:0.1.4" dependencies: pako: "npm:~0.2.0" - checksum: cd506a1ef9c3280f6537a17ed1352ef7738b66fef0a15a655dc3a43edc34be6ee78c5838427146ae1fcd4801fc06d2ab203614d0f8c4df8b5a091cf0134b9a80 + checksum: 10c0/0cde7ca5d33d43125649330fd75c056397e53731956a2593c4a2529f4e609a8e6abdb2b8e1921683abf5645375b92cfb2a21baa42fe3c9fc3e2556d32043af93 languageName: node linkType: hard @@ -5664,7 +5683,7 @@ __metadata: update-browserslist-db: "npm:^1.0.13" bin: browserslist: cli.js - checksum: 496c3862df74565dd942b4ae65f502c575cbeba1fa4a3894dad7aa3b16130dc3033bc502d8848147f7b625154a284708253d9598bcdbef5a1e34cf11dc7bad8e + checksum: 10c0/8e9cc154529062128d02a7af4d8adeead83ca1df8cd9ee65a88e2161039f3d68a4d40fea7353cab6bae4c16182dec2fdd9a1cf7dc2a2935498cee1af0e998943 languageName: node linkType: hard @@ -5673,21 +5692,21 @@ __metadata: resolution: "bser@npm:2.1.1" dependencies: node-int64: "npm:^0.4.0" - checksum: edba1b65bae682450be4117b695997972bd9a3c4dfee029cab5bcb72ae5393a79a8f909b8bc77957eb0deec1c7168670f18f4d5c556f46cdd3bca5f3b3a8d020 + checksum: 10c0/24d8dfb7b6d457d73f32744e678a60cc553e4ec0e9e1a01cf614b44d85c3c87e188d3cc78ef0442ce5032ee6818de20a0162ba1074725c0d08908f62ea979227 languageName: node linkType: hard "buffer-crc32@npm:~0.2.3": version: 0.2.13 resolution: "buffer-crc32@npm:0.2.13" - checksum: 06252347ae6daca3453b94e4b2f1d3754a3b146a111d81c68924c22d91889a40623264e95e67955b1cb4a68cbedf317abeabb5140a9766ed248973096db5ce1c + checksum: 10c0/cb0a8ddf5cf4f766466db63279e47761eb825693eeba6a5a95ee4ec8cb8f81ede70aa7f9d8aeec083e781d47154290eb5d4d26b3f7a465ec57fb9e7d59c47150 languageName: node linkType: hard "buffer-from@npm:^1.0.0": version: 1.1.2 resolution: "buffer-from@npm:1.1.2" - checksum: 0448524a562b37d4d7ed9efd91685a5b77a50672c556ea254ac9a6d30e3403a517d8981f10e565db24e8339413b43c97ca2951f10e399c6125a0d8911f5679bb + checksum: 10c0/124fff9d66d691a86d3b062eff4663fe437a9d9ee4b47b1b9e97f5a5d14f6d5399345db80f796827be7c95e70a8e765dd404b7c3ff3b3324f98e9b0c8826cc34 languageName: node linkType: hard @@ -5697,21 +5716,21 @@ __metadata: dependencies: base64-js: "npm:^1.3.1" ieee754: "npm:^1.1.13" - checksum: 997434d3c6e3b39e0be479a80288875f71cd1c07d75a3855e6f08ef848a3c966023f79534e22e415ff3a5112708ce06127277ab20e527146d55c84566405c7c6 + checksum: 10c0/27cac81cff434ed2876058d72e7c4789d11ff1120ef32c9de48f59eab58179b66710c488987d295ae89a228f835fc66d088652dffeb8e3ba8659f80eb091d55e languageName: node linkType: hard "bytes@npm:3.0.0": version: 3.0.0 resolution: "bytes@npm:3.0.0" - checksum: a2b386dd8188849a5325f58eef69c3b73c51801c08ffc6963eddc9be244089ba32d19347caf6d145c86f315ae1b1fc7061a32b0c1aa6379e6a719090287ed101 + checksum: 10c0/91d42c38601c76460519ffef88371caacaea483a354c8e4b8808e7b027574436a5713337c003ea3de63ee4991c2a9a637884fdfe7f761760d746929d9e8fec60 languageName: node linkType: hard "bytes@npm:3.1.2": version: 3.1.2 resolution: "bytes@npm:3.1.2" - checksum: a10abf2ba70c784471d6b4f58778c0beeb2b5d405148e66affa91f23a9f13d07603d0a0354667310ae1d6dc141474ffd44e2a074be0f6e2254edb8fc21445388 + checksum: 10c0/76d1c43cbd602794ad8ad2ae94095cddeb1de78c5dddaa7005c51af10b0176c69971a6d88e805a90c2b6550d76636e43c40d8427a808b8645ede885de4a0358e languageName: node linkType: hard @@ -5731,7 +5750,7 @@ __metadata: ssri: "npm:^10.0.0" tar: "npm:^6.1.11" unique-filename: "npm:^3.0.0" - checksum: 5ca58464f785d4d64ac2019fcad95451c8c89bea25949f63acd8987fcc3493eaef1beccc0fa39e673506d879d3fc1ab420760f8a14f8ddf46ea2d121805a5e96 + checksum: 10c0/7992665305cc251a984f4fdbab1449d50e88c635bc43bf2785530c61d239c61b349e5734461baa461caaee65f040ab14e2d58e694f479c0810cffd181ba5eabc languageName: node linkType: hard @@ -5744,35 +5763,35 @@ __metadata: function-bind: "npm:^1.1.2" get-intrinsic: "npm:^1.2.4" set-function-length: "npm:^1.2.1" - checksum: cd6fe658e007af80985da5185bff7b55e12ef4c2b6f41829a26ed1eef254b1f1c12e3dfd5b2b068c6ba8b86aba62390842d81752e67dcbaec4f6f76e7113b6b7 + checksum: 10c0/a3ded2e423b8e2a265983dba81c27e125b48eefb2655e7dfab6be597088da3d47c47976c24bc51b8fd9af1061f8f87b4ab78a314f3c77784b2ae2ba535ad8b8d languageName: node linkType: hard "callsites@npm:^3.0.0": version: 3.1.0 resolution: "callsites@npm:3.1.0" - checksum: 072d17b6abb459c2ba96598918b55868af677154bec7e73d222ef95a8fdb9bbf7dae96a8421085cdad8cd190d86653b5b6dc55a4484f2e5b2e27d5e0c3fc15b3 + checksum: 10c0/fff92277400eb06c3079f9e74f3af120db9f8ea03bad0e84d9aede54bbe2d44a56cccb5f6cf12211f93f52306df87077ecec5b712794c5a9b5dac6d615a3f301 languageName: node linkType: hard "camelcase@npm:^5.3.1": version: 5.3.1 resolution: "camelcase@npm:5.3.1" - checksum: e6effce26b9404e3c0f301498184f243811c30dfe6d0b9051863bd8e4034d09c8c2923794f280d6827e5aa055f6c434115ff97864a16a963366fb35fd673024b + checksum: 10c0/92ff9b443bfe8abb15f2b1513ca182d16126359ad4f955ebc83dc4ddcc4ef3fdd2c078bc223f2673dc223488e75c99b16cc4d056624374b799e6a1555cf61b23 languageName: node linkType: hard "camelcase@npm:^6.2.0": version: 6.3.0 resolution: "camelcase@npm:6.3.0" - checksum: 8c96818a9076434998511251dcb2761a94817ea17dbdc37f47ac080bd088fc62c7369429a19e2178b993497132c8cbcf5cc1f44ba963e76782ba469c0474938d + checksum: 10c0/0d701658219bd3116d12da3eab31acddb3f9440790c0792e0d398f0a520a6a4058018e546862b6fba89d7ae990efaeb97da71e1913e9ebf5a8b5621a3d55c710 languageName: node linkType: hard "caniuse-lite@npm:^1.0.30001587": version: 1.0.30001591 resolution: "caniuse-lite@npm:1.0.30001591" - checksum: 3891fad30a99b984a3a20570c0440d35dda933c79ea190cdb78a1f1743866506a4b41b4389b53a7c0351f2228125f9dc49308463f57e61503e5689b444add1a8 + checksum: 10c0/21937d341c3d75994504db21340f65573a1e847a8ab33ee4964ed493994d6552864c494ba144485459abd9c711c75c0708bc9fa19f2bff525bff75ffb0a42c3b languageName: node linkType: hard @@ -5783,7 +5802,7 @@ __metadata: ansi-styles: "npm:^3.2.1" escape-string-regexp: "npm:^1.0.5" supports-color: "npm:^5.3.0" - checksum: 3d1d103433166f6bfe82ac75724951b33769675252d8417317363ef9d54699b7c3b2d46671b772b893a8e50c3ece70c4b933c73c01e81bc60ea4df9b55afa303 + checksum: 10c0/e6543f02ec877732e3a2d1c3c3323ddb4d39fbab687c23f526e25bd4c6a9bf3b83a696e8c769d078e04e5754921648f7821b2a2acfd16c550435fd630026e073 languageName: node linkType: hard @@ -5793,7 +5812,7 @@ __metadata: dependencies: ansi-styles: "npm:^4.1.0" supports-color: "npm:^7.1.0" - checksum: 37f90b31fd655fb49c2bd8e2a68aebefddd64522655d001ef417e6f955def0ed9110a867ffc878a533f2dafea5f2032433a37c8a7614969baa7f8a1cd424ddfc + checksum: 10c0/ee650b0a065b3d7a6fda258e75d3a86fc8e4effa55871da730a9e42ccb035bf5fd203525e5a1ef45ec2582ecc4f65b47eb11357c526b84dd29a14fb162c414d2 languageName: node linkType: hard @@ -5803,56 +5822,63 @@ __metadata: dependencies: ansi-styles: "npm:^4.1.0" supports-color: "npm:^7.1.0" - checksum: cb3f3e594913d63b1814d7ca7c9bafbf895f75fbf93b92991980610dfd7b48500af4e3a5d4e3a8f337990a96b168d7eb84ee55efdce965e2ee8efc20f8c8f139 + checksum: 10c0/4a3fef5cc34975c898ffe77141450f679721df9dde00f6c304353fa9c8b571929123b26a0e4617bde5018977eb655b31970c297b91b63ee83bb82aeb04666880 + languageName: node + linkType: hard + +"chalk@npm:~5.3.0": + version: 5.3.0 + resolution: "chalk@npm:5.3.0" + checksum: 10c0/8297d436b2c0f95801103ff2ef67268d362021b8210daf8ddbe349695333eb3610a71122172ff3b0272f1ef2cf7cc2c41fdaa4715f52e49ffe04c56340feed09 languageName: node linkType: hard "char-regex@npm:^1.0.2": version: 1.0.2 resolution: "char-regex@npm:1.0.2" - checksum: 1ec5c2906adb9f84e7f6732a40baef05d7c85401b82ffcbc44b85fbd0f7a2b0c2a96f2eb9cf55cae3235dc12d4023003b88f09bcae8be9ae894f52ed746f4d48 + checksum: 10c0/57a09a86371331e0be35d9083ba429e86c4f4648ecbe27455dbfb343037c16ee6fdc7f6b61f433a57cc5ded5561d71c56a150e018f40c2ffb7bc93a26dae341e languageName: node linkType: hard "character-entities-legacy@npm:^1.0.0": version: 1.1.4 resolution: "character-entities-legacy@npm:1.1.4" - checksum: fe03a82c154414da3a0c8ab3188e4237ec68006cbcd681cf23c7cfb9502a0e76cd30ab69a2e50857ca10d984d57de3b307680fff5328ccd427f400e559c3a811 + checksum: 10c0/ea4ca9c29887335eed86d78fc67a640168342b1274da84c097abb0575a253d1265281a5052f9a863979e952bcc267b4ecaaf4fe233a7e1e0d8a47806c65b96c7 languageName: node linkType: hard "character-entities-legacy@npm:^3.0.0": version: 3.0.0 resolution: "character-entities-legacy@npm:3.0.0" - checksum: 7582af055cb488b626d364b7d7a4e46b06abd526fb63c0e4eb35bcb9c9799cc4f76b39f34fdccef2d1174ac95e53e9ab355aae83227c1a2505877893fce77731 + checksum: 10c0/ec4b430af873661aa754a896a2b55af089b4e938d3d010fad5219299a6b6d32ab175142699ee250640678cd64bdecd6db3c9af0b8759ab7b155d970d84c4c7d1 languageName: node linkType: hard "character-entities@npm:^1.0.0": version: 1.2.4 resolution: "character-entities@npm:1.2.4" - checksum: 7c11641c48d1891aaba7bc800d4500804d91a28f46d64e88c001c38e6ab2e7eae28873a77ae16e6c55d24cac35ddfbb15efe56c3012b86684a3c4e95c70216b7 + checksum: 10c0/ad015c3d7163563b8a0ee1f587fb0ef305ef344e9fd937f79ca51cccc233786a01d591d989d5bf7b2e66b528ac9efba47f3b1897358324e69932f6d4b25adfe1 languageName: node linkType: hard "character-entities@npm:^2.0.0": version: 2.0.2 resolution: "character-entities@npm:2.0.2" - checksum: c8dd1f4bf1a92fccf7d2fad9673660a88b37854557d30f6076c32fedfb92d1420208298829ff1d3b6b4fa1c7012e8326c45e7f5c3ed1e9a09ec177593c521b2f + checksum: 10c0/b0c645a45bcc90ff24f0e0140f4875a8436b8ef13b6bcd31ec02cfb2ca502b680362aa95386f7815bdc04b6464d48cf191210b3840d7c04241a149ede591a308 languageName: node linkType: hard "character-reference-invalid@npm:^1.0.0": version: 1.1.4 resolution: "character-reference-invalid@npm:1.1.4" - checksum: 812ebc5e6e8d08fd2fa5245ae78c1e1a4bea4692e93749d256a135c4a442daf931ca18e067cc61ff4a58a419eae52677126a0bc4f05a511290427d60d3057805 + checksum: 10c0/29f05081c5817bd1e975b0bf61e77b60a40f62ad371d0f0ce0fdb48ab922278bc744d1fbe33771dced751887a8403f265ff634542675c8d7375f6ff4811efd0e languageName: node linkType: hard "character-reference-invalid@npm:^2.0.0": version: 2.0.1 resolution: "character-reference-invalid@npm:2.0.1" - checksum: 98d3b1a52ae510b7329e6ee7f6210df14f1e318c5415975d4c9e7ee0ef4c07875d47c6e74230c64551f12f556b4a8ccc24d9f3691a2aa197019e72a95e9297ee + checksum: 10c0/2ae0dec770cd8659d7e8b0ce24392d83b4c2f0eb4a3395c955dce5528edd4cc030a794cfa06600fcdd700b3f2de2f9b8e40e309c0011c4180e3be64a0b42e6a1 languageName: node linkType: hard @@ -5871,28 +5897,28 @@ __metadata: dependenciesMeta: fsevents: optional: true - checksum: c327fb07704443f8d15f7b4a7ce93b2f0bc0e6cea07ec28a7570aa22cd51fcf0379df589403976ea956c369f25aa82d84561947e227cd925902e1751371658df + checksum: 10c0/8361dcd013f2ddbe260eacb1f3cb2f2c6f2b0ad118708a343a5ed8158941a39cb8fb1d272e0f389712e74ee90ce8ba864eece9e0e62b9705cb468a2f6d917462 languageName: node linkType: hard "chownr@npm:^1.1.1": version: 1.1.4 resolution: "chownr@npm:1.1.4" - checksum: 115648f8eb38bac5e41c3857f3e663f9c39ed6480d1349977c4d96c95a47266fcacc5a5aabf3cb6c481e22d72f41992827db47301851766c4fd77ac21a4f081d + checksum: 10c0/ed57952a84cc0c802af900cf7136de643d3aba2eecb59d29344bc2f3f9bf703a301b9d84cdc71f82c3ffc9ccde831b0d92f5b45f91727d6c9da62f23aef9d9db languageName: node linkType: hard "chownr@npm:^2.0.0": version: 2.0.0 resolution: "chownr@npm:2.0.0" - checksum: c57cf9dd0791e2f18a5ee9c1a299ae6e801ff58fee96dc8bfd0dcb4738a6ce58dd252a3605b1c93c6418fe4f9d5093b28ffbf4d66648cb2a9c67eaef9679be2f + checksum: 10c0/594754e1303672171cc04e50f6c398ae16128eb134a88f801bf5354fd96f205320f23536a045d9abd8b51024a149696e51231565891d4efdab8846021ecf88e6 languageName: node linkType: hard "ci-info@npm:^3.2.0": version: 3.9.0 resolution: "ci-info@npm:3.9.0" - checksum: 75bc67902b4d1c7b435497adeb91598f6d52a3389398e44294f6601b20cfef32cf2176f7be0eb961d9e085bb333a8a5cae121cb22f81cf238ae7f58eb80e9397 + checksum: 10c0/6f0109e36e111684291d46123d491bc4e7b7a1934c3a20dea28cba89f1d4a03acd892f5f6a81ed3855c38647e285a150e3c9ba062e38943bef57fee6c1554c3a languageName: node linkType: hard @@ -5901,28 +5927,28 @@ __metadata: resolution: "citty@npm:0.1.6" dependencies: consola: "npm:^3.2.3" - checksum: 3208947e73abb699a12578ee2bfee254bf8dd1ce0d5698e8a298411cabf16bd3620d63433aef5bd88cdb2b9da71aef18adefa3b4ffd18273bb62dd1d28c344f5 + checksum: 10c0/d26ad82a9a4a8858c7e149d90b878a3eceecd4cfd3e2ed3cd5f9a06212e451fb4f8cbe0fa39a3acb1b3e8f18e22db8ee5def5829384bad50e823d4b301609b48 languageName: node linkType: hard "cjs-module-lexer@npm:^1.0.0": version: 1.2.3 resolution: "cjs-module-lexer@npm:1.2.3" - checksum: f96a5118b0a012627a2b1c13bd2fcb92509778422aaa825c5da72300d6dcadfb47134dd2e9d97dfa31acd674891dd91642742772d19a09a8adc3e56bd2f5928c + checksum: 10c0/0de9a9c3fad03a46804c0d38e7b712fb282584a9c7ef1ed44cae22fb71d9bb600309d66a9711ac36a596fd03422f5bb03e021e8f369c12a39fa1786ae531baab languageName: node linkType: hard "classnames@npm:2.5.1": version: 2.5.1 resolution: "classnames@npm:2.5.1" - checksum: 58eb394e8817021b153bb6e7d782cfb667e4ab390cb2e9dac2fc7c6b979d1cc2b2a733093955fc5c94aa79ef5c8c89f11ab77780894509be6afbb91dddd79d15 + checksum: 10c0/afff4f77e62cea2d79c39962980bf316bacb0d7c49e13a21adaadb9221e1c6b9d3cdb829d8bb1b23c406f4e740507f37e1dcf506f7e3b7113d17c5bab787aa69 languageName: node linkType: hard "clean-stack@npm:^2.0.0": version: 2.2.0 resolution: "clean-stack@npm:2.2.0" - checksum: 2ac8cd2b2f5ec986a3c743935ec85b07bc174d5421a5efc8017e1f146a1cf5f781ae962618f416352103b32c9cd7e203276e8c28241bbe946160cab16149fb68 + checksum: 10c0/1f90262d5f6230a17e27d0c190b09d47ebe7efdd76a03b5a1127863f7b3c9aec4c3e6c8bb3a7bbf81d553d56a1fd35728f5a8ef4c63f867ac8d690109742a8c1 languageName: node linkType: hard @@ -5931,14 +5957,23 @@ __metadata: resolution: "cli-cursor@npm:3.1.0" dependencies: restore-cursor: "npm:^3.1.0" - checksum: 2692784c6cd2fd85cfdbd11f53aea73a463a6d64a77c3e098b2b4697a20443f430c220629e1ca3b195ea5ac4a97a74c2ee411f3807abf6df2b66211fec0c0a29 + checksum: 10c0/92a2f98ff9037d09be3dfe1f0d749664797fb674bf388375a2207a1203b69d41847abf16434203e0089212479e47a358b13a0222ab9fccfe8e2644a7ccebd111 + languageName: node + linkType: hard + +"cli-cursor@npm:^5.0.0": + version: 5.0.0 + resolution: "cli-cursor@npm:5.0.0" + dependencies: + restore-cursor: "npm:^5.0.0" + checksum: 10c0/7ec62f69b79f6734ab209a3e4dbdc8af7422d44d360a7cb1efa8a0887bbe466a6e625650c466fe4359aee44dbe2dc0b6994b583d40a05d0808a5cb193641d220 languageName: node linkType: hard "cli-spinners@npm:^2.5.0": version: 2.9.2 resolution: "cli-spinners@npm:2.9.2" - checksum: a0a863f442df35ed7294424f5491fa1756bd8d2e4ff0c8736531d886cec0ece4d85e8663b77a5afaf1d296e3cbbebff92e2e99f52bbea89b667cbe789b994794 + checksum: 10c0/907a1c227ddf0d7a101e7ab8b300affc742ead4b4ebe920a5bf1bc6d45dce2958fcd195eb28fa25275062fe6fa9b109b93b63bc8033396ed3bcb50297008b3a3 languageName: node linkType: hard @@ -5951,7 +5986,17 @@ __metadata: dependenciesMeta: "@colors/colors": optional: true - checksum: 8d82b75be7edc7febb1283dc49582a521536527cba80af62a2e4522a0ee39c252886a1a2f02d05ae9d753204dbcffeb3a40d1358ee10dccd7fe8d935cfad3f85 + checksum: 10c0/39e580cb346c2eaf1bd8f4ff055ae644e902b8303c164a1b8894c0dc95941f92e001db51f49649011be987e708d9fa3183ccc2289a4d376a057769664048cc0c + languageName: node + linkType: hard + +"cli-truncate@npm:^4.0.0": + version: 4.0.0 + resolution: "cli-truncate@npm:4.0.0" + dependencies: + slice-ansi: "npm:^5.0.0" + string-width: "npm:^7.0.0" + checksum: 10c0/d7f0b73e3d9b88cb496e6c086df7410b541b56a43d18ade6a573c9c18bd001b1c3fba1ad578f741a4218fdc794d042385f8ac02c25e1c295a2d8b9f3cb86eb4c languageName: node linkType: hard @@ -5962,14 +6007,14 @@ __metadata: string-width: "npm:^4.2.0" strip-ansi: "npm:^6.0.1" wrap-ansi: "npm:^7.0.0" - checksum: eaa5561aeb3135c2cddf7a3b3f562fc4238ff3b3fc666869ef2adf264be0f372136702f16add9299087fb1907c2e4ec5dbfe83bd24bce815c70a80c6c1a2e950 + checksum: 10c0/4bda0f09c340cbb6dfdc1ed508b3ca080f12992c18d68c6be4d9cf51756033d5266e61ec57529e610dacbf4da1c634423b0c1b11037709cc6b09045cbd815df5 languageName: node linkType: hard "clone-buffer@npm:^1.0.0": version: 1.0.0 resolution: "clone-buffer@npm:1.0.0" - checksum: a39a35e7fd081e0f362ba8195bd15cbc8205df1fbe4598bb4e09c1f9a13c0320a47ab8a61a8aa83561e4ed34dc07666d73254ee952ddd3985e4286b082fe63b9 + checksum: 10c0/d813f4d12651bc4951d5e4869e2076d34ccfc3b23d0aae4e2e20e5a5e97bc7edbba84038356d222c54b25e3a83b5f45e8b637c18c6bd1794b2f1b49114122c50 languageName: node linkType: hard @@ -5980,28 +6025,28 @@ __metadata: is-plain-object: "npm:^2.0.4" kind-of: "npm:^6.0.2" shallow-clone: "npm:^3.0.0" - checksum: 770f912fe4e6f21873c8e8fbb1e99134db3b93da32df271d00589ea4a29dbe83a9808a322c93f3bcaf8584b8b4fa6fc269fc8032efbaa6728e0c9886c74467d2 + checksum: 10c0/637753615aa24adf0f2d505947a1bb75e63964309034a1cf56ba4b1f30af155201edd38d26ffe26911adaae267a3c138b344a4947d39f5fc1b6d6108125aa758 languageName: node linkType: hard "clone-stats@npm:^1.0.0": version: 1.0.0 resolution: "clone-stats@npm:1.0.0" - checksum: 654c0425afc5c5c55a4d95b2e0c6eccdd55b5247e7a1e7cca9000b13688b96b0a157950c72c5307f9fd61f17333ad796d3cd654778f2d605438012391cc4ada5 + checksum: 10c0/bb1e05991e034e1eb104173c25bb652ea5b2b4dad5a49057a857e00f8d1da39de3bd689128a25bab8cbdfbea8ae8f6066030d106ed5c299a7d92be7967c50217 languageName: node linkType: hard "clone@npm:^1.0.2": version: 1.0.4 resolution: "clone@npm:1.0.4" - checksum: d06418b7335897209e77bdd430d04f882189582e67bd1f75a04565f3f07f5b3f119a9d670c943b6697d0afb100f03b866b3b8a1f91d4d02d72c4ecf2bb64b5dd + checksum: 10c0/2176952b3649293473999a95d7bebfc9dc96410f6cbd3d2595cf12fd401f63a4bf41a7adbfd3ab2ff09ed60cb9870c58c6acdd18b87767366fabfc163700f13b languageName: node linkType: hard "clone@npm:^2.1.1": version: 2.1.2 resolution: "clone@npm:2.1.2" - checksum: d9c79efba655f0bf601ab299c57eb54cbaa9860fb011aee9d89ed5ac0d12df1660ab7642fddaabb9a26b7eff0e117d4520512cb70798319ff5d30a111b5310c2 + checksum: 10c0/ed0601cd0b1606bc7d82ee7175b97e68d1dd9b91fd1250a3617b38d34a095f8ee0431d40a1a611122dcccb4f93295b4fdb94942aa763392b5fe44effa50c2d5e languageName: node linkType: hard @@ -6012,21 +6057,21 @@ __metadata: inherits: "npm:^2.0.1" process-nextick-args: "npm:^2.0.0" readable-stream: "npm:^2.3.5" - checksum: 81e17fe4b2901e2d9899717e1d4ed88bd1ede700b819b77c61f7402b9ca97c4769692d85bd74710be806f31caf33c62acdea49d5bbe8794a66ade01c9c2d5a6d + checksum: 10c0/52db2904dcfcd117e4e9605b69607167096c954352eff0fcded0a16132c9cfc187b36b5db020bee2dc1b3a968ca354f8b30aef3d8b4ea74e3ea83a81d43e47bb languageName: node linkType: hard "co@npm:^4.6.0": version: 4.6.0 resolution: "co@npm:4.6.0" - checksum: a5d9f37091c70398a269e625cedff5622f200ed0aa0cff22ee7b55ed74a123834b58711776eb0f1dc58eb6ebbc1185aa7567b57bd5979a948c6e4f85073e2c05 + checksum: 10c0/c0e85ea0ca8bf0a50cbdca82efc5af0301240ca88ebe3644a6ffb8ffe911f34d40f8fbcf8f1d52c5ddd66706abd4d3bfcd64259f1e8e2371d4f47573b0dc8c28 languageName: node linkType: hard "collect-v8-coverage@npm:^1.0.0": version: 1.0.2 resolution: "collect-v8-coverage@npm:1.0.2" - checksum: 30ea7d5c9ee51f2fdba4901d4186c5b7114a088ef98fd53eda3979da77eed96758a2cae81cc6d97e239aaea6065868cf908b24980663f7b7e96aa291b3e12fa4 + checksum: 10c0/ed7008e2e8b6852c5483b444a3ae6e976e088d4335a85aa0a9db2861c5f1d31bd2d7ff97a60469b3388deeba661a619753afbe201279fb159b4b9548ab8269a1 languageName: node linkType: hard @@ -6035,7 +6080,7 @@ __metadata: resolution: "color-convert@npm:1.9.3" dependencies: color-name: "npm:1.1.3" - checksum: ffa319025045f2973919d155f25e7c00d08836b6b33ea2d205418c59bd63a665d713c52d9737a9e0fe467fb194b40fbef1d849bae80d674568ee220a31ef3d10 + checksum: 10c0/5ad3c534949a8c68fca8fbc6f09068f435f0ad290ab8b2f76841b9e6af7e0bb57b98cb05b0e19fe33f5d91e5a8611ad457e5f69e0a484caad1f7487fd0e8253c languageName: node linkType: hard @@ -6044,21 +6089,21 @@ __metadata: resolution: "color-convert@npm:2.0.1" dependencies: color-name: "npm:~1.1.4" - checksum: fa00c91b4332b294de06b443923246bccebe9fab1b253f7fe1772d37b06a2269b4039a85e309abe1fe11b267b11c08d1d0473fda3badd6167f57313af2887a64 + checksum: 10c0/37e1150172f2e311fe1b2df62c6293a342ee7380da7b9cfdba67ea539909afbd74da27033208d01d6d5cfc65ee7868a22e18d7e7648e004425441c0f8a15a7d7 languageName: node linkType: hard "color-name@npm:1.1.3": version: 1.1.3 resolution: "color-name@npm:1.1.3" - checksum: 09c5d3e33d2105850153b14466501f2bfb30324a2f76568a408763a3b7433b0e50e5b4ab1947868e65cb101bb7cb75029553f2c333b6d4b8138a73fcc133d69d + checksum: 10c0/566a3d42cca25b9b3cd5528cd7754b8e89c0eb646b7f214e8e2eaddb69994ac5f0557d9c175eb5d8f0ad73531140d9c47525085ee752a91a2ab15ab459caf6d6 languageName: node linkType: hard "color-name@npm:^1.0.0, color-name@npm:~1.1.4": version: 1.1.4 resolution: "color-name@npm:1.1.4" - checksum: b0445859521eb4021cd0fb0cc1a75cecf67fceecae89b63f62b201cca8d345baf8b952c966862a9d9a2632987d4f6581f0ec8d957dfacece86f0a7919316f610 + checksum: 10c0/a1a3f914156960902f46f7f56bc62effc6c94e84b2cae157a526b1c1f74b677a47ec602bf68a61abfa2b42d15b7c5651c6dbe72a43af720bc588dff885b10f95 languageName: node linkType: hard @@ -6068,7 +6113,7 @@ __metadata: dependencies: color-name: "npm:^1.0.0" simple-swizzle: "npm:^0.2.2" - checksum: 72aa0b81ee71b3f4fb1ac9cd839cdbd7a011a7d318ef58e6cb13b3708dca75c7e45029697260488709f1b1c7ac4e35489a87e528156c1e365917d1c4ccb9b9cd + checksum: 10c0/b0bfd74c03b1f837f543898b512f5ea353f71630ccdd0d66f83028d1f0924a7d4272deb278b9aef376cacf1289b522ac3fb175e99895283645a2dc3a33af2404 languageName: node linkType: hard @@ -6078,7 +6123,14 @@ __metadata: dependencies: color-convert: "npm:^1.9.3" color-string: "npm:^1.6.0" - checksum: bf70438e0192f4f62f4bfbb303e7231289e8cc0d15ff6b6cbdb722d51f680049f38d4fdfc057a99cb641895cf5e350478c61d98586400b060043afc44285e7ae + checksum: 10c0/39345d55825884c32a88b95127d417a2c24681d8b57069413596d9fcbb721459ef9d9ec24ce3e65527b5373ce171b73e38dbcd9c830a52a6487e7f37bf00e83c + languageName: node + linkType: hard + +"colorette@npm:^2.0.20": + version: 2.0.20 + resolution: "colorette@npm:2.0.20" + checksum: 10c0/e94116ff33b0ff56f3b83b9ace895e5bf87c2a7a47b3401b8c3f3226e050d5ef76cf4072fb3325f9dc24d1698f9b730baf4e05eeaf861d74a1883073f4c98a40 languageName: node linkType: hard @@ -6088,7 +6140,7 @@ __metadata: dependencies: color: "npm:^3.1.3" text-hex: "npm:1.0.x" - checksum: bb3934ef3c417e961e6d03d7ca60ea6e175947029bfadfcdb65109b01881a1c0ecf9c2b0b59abcd0ee4a0d7c1eae93beed01b0e65848936472270a0b341ebce8 + checksum: 10c0/af5f91ff7f8e146b96e439ac20ed79b197210193bde721b47380a75b21751d90fa56390c773bb67c0aedd34ff85091883a437ab56861c779bd507d639ba7e123 languageName: node linkType: hard @@ -6097,42 +6149,49 @@ __metadata: resolution: "combined-stream@npm:1.0.8" dependencies: delayed-stream: "npm:~1.0.0" - checksum: 2e969e637d05d09fa50b02d74c83a1186f6914aae89e6653b62595cc75a221464f884f55f231b8f4df7a49537fba60bdc0427acd2bf324c09a1dbb84837e36e4 + checksum: 10c0/0dbb829577e1b1e839fa82b40c07ffaf7de8a09b935cadd355a73652ae70a88b4320db322f6634a4ad93424292fa80973ac6480986247f1734a1137debf271d5 languageName: node linkType: hard "comma-separated-tokens@npm:^1.0.0": version: 1.0.8 resolution: "comma-separated-tokens@npm:1.0.8" - checksum: 0adcb07174fa4d08cf0f5c8e3aec40a36b5ff0c2c720e5e23f50fe02e6789d1d00a67036c80e0c1e1539f41d3e7f0101b074039dd833b4e4a59031b659d6ca0d + checksum: 10c0/c3bcfeaa6d50313528a006a40bcc0f9576086665c9b48d4b3a76ddd63e7d6174734386c98be1881cbf6ecfc25e1db61cd775a7b896d2ea7a65de28f83a0f9b17 languageName: node linkType: hard "comma-separated-tokens@npm:^2.0.0": version: 2.0.3 resolution: "comma-separated-tokens@npm:2.0.3" - checksum: e3bf9e0332a5c45f49b90e79bcdb4a7a85f28d6a6f0876a94f1bb9b2bfbdbbb9292aac50e1e742d8c0db1e62a0229a106f57917e2d067fca951d81737651700d + checksum: 10c0/91f90f1aae320f1755d6957ef0b864fe4f54737f3313bd95e0802686ee2ca38bff1dd381964d00ae5db42912dd1f4ae5c2709644e82706ffc6f6842a813cdd67 languageName: node linkType: hard "commander@npm:^6.2.1": version: 6.2.1 resolution: "commander@npm:6.2.1" - checksum: 25b88c2efd0380c84f7844b39cf18510da7bfc5013692d68cdc65f764a1c34e6c8a36ea6d72b6620e3710a930cf8fab2695bdec2bf7107a0f4fa30a3ef3b7d0e + checksum: 10c0/85748abd9d18c8bc88febed58b98f66b7c591d9b5017cad459565761d7b29ca13b7783ea2ee5ce84bf235897333706c4ce29adf1ce15c8252780e7000e2ce9ea languageName: node linkType: hard "commander@npm:^7.2.0": version: 7.2.0 resolution: "commander@npm:7.2.0" - checksum: 9973af10727ad4b44f26703bf3e9fdc323528660a7590efe3aa9ad5042b4584c0deed84ba443f61c9d6f02dade54a5a5d3c95e306a1e1630f8374ae6db16c06d + checksum: 10c0/8d690ff13b0356df7e0ebbe6c59b4712f754f4b724d4f473d3cc5b3fdcf978e3a5dc3078717858a2ceb50b0f84d0660a7f22a96cdc50fb877d0c9bb31593d23a + languageName: node + linkType: hard + +"commander@npm:~12.1.0": + version: 12.1.0 + resolution: "commander@npm:12.1.0" + checksum: 10c0/6e1996680c083b3b897bfc1cfe1c58dfbcd9842fd43e1aaf8a795fbc237f65efcc860a3ef457b318e73f29a4f4a28f6403c3d653d021d960e4632dd45bde54a9 languageName: node linkType: hard "commondir@npm:^1.0.1": version: 1.0.1 resolution: "commondir@npm:1.0.1" - checksum: 4620bc4936a4ef12ce7dfcd272bb23a99f2ad68889a4e4ad766c9f8ad21af982511934d6f7050d4a8bde90011b1c15d56e61a1b4576d9913efbf697a20172d6c + checksum: 10c0/33a124960e471c25ee19280c9ce31ccc19574b566dc514fe4f4ca4c34fa8b0b57cf437671f5de380e11353ea9426213fca17687dd2ef03134fea2dbc53809fd6 languageName: node linkType: hard @@ -6141,7 +6200,7 @@ __metadata: resolution: "compressible@npm:2.0.18" dependencies: mime-db: "npm:>= 1.43.0 < 2" - checksum: 58321a85b375d39230405654721353f709d0c1442129e9a17081771b816302a012471a9b8f4864c7dbe02eef7f2aaac3c614795197092262e94b409c9be108f0 + checksum: 10c0/8a03712bc9f5b9fe530cc5a79e164e665550d5171a64575d7dcf3e0395d7b4afa2d79ab176c61b5b596e28228b350dd07c1a2a6ead12fd81d1b6cd632af2fef7 languageName: node linkType: hard @@ -6156,14 +6215,14 @@ __metadata: on-headers: "npm:~1.0.2" safe-buffer: "npm:5.1.2" vary: "npm:~1.1.2" - checksum: 469cd097908fe1d3ff146596d4c24216ad25eabb565c5456660bdcb3a14c82ebc45c23ce56e19fc642746cf407093b55ab9aa1ac30b06883b27c6c736e6383c2 + checksum: 10c0/138db836202a406d8a14156a5564fb1700632a76b6e7d1546939472895a5304f2b23c80d7a22bf44c767e87a26e070dbc342ea63bb45ee9c863354fa5556bbbc languageName: node linkType: hard "concat-map@npm:0.0.1": version: 0.0.1 resolution: "concat-map@npm:0.0.1" - checksum: 9680699c8e2b3af0ae22592cb764acaf973f292a7b71b8a06720233011853a58e256c89216a10cbe889727532fd77f8bcd49a760cedfde271b8e006c20e079f2 + checksum: 10c0/c996b1cfdf95b6c90fee4dae37e332c8b6eb7d106430c17d538034c0ad9a1630cb194d2ab37293b1bdd4d779494beee7786d586a50bd9376fd6f7bcc2bd4c98f languageName: node linkType: hard @@ -6175,14 +6234,14 @@ __metadata: inherits: "npm:^2.0.3" readable-stream: "npm:^2.2.2" typedarray: "npm:^0.0.6" - checksum: 71db903c84fc073ca35a274074e8d26c4330713d299f8623e993c448c1f6bf8b967806dd1d1a7b0f8add6f15ab1af7435df21fe79b4fe7efd78420c89e054e28 + checksum: 10c0/2e9864e18282946dabbccb212c5c7cec0702745e3671679eb8291812ca7fd12023f7d8cb36493942a62f770ac96a7f90009dc5c82ad69893438371720fa92617 languageName: node linkType: hard "consola@npm:^3.2.3": version: 3.2.3 resolution: "consola@npm:3.2.3" - checksum: 02972dcb048c337357a3628438e5976b8e45bcec22fdcfbe9cd17622992953c4d695d5152f141464a02deac769b1d23028e8ac87f56483838df7a6bbf8e0f5a2 + checksum: 10c0/c606220524ec88a05bb1baf557e9e0e04a0c08a9c35d7a08652d99de195c4ddcb6572040a7df57a18ff38bbc13ce9880ad032d56630cef27bef72768ef0ac078 languageName: node linkType: hard @@ -6191,35 +6250,35 @@ __metadata: resolution: "content-disposition@npm:0.5.4" dependencies: safe-buffer: "npm:5.2.1" - checksum: b7f4ce176e324f19324be69b05bf6f6e411160ac94bc523b782248129eb1ef3be006f6cff431aaea5e337fe5d176ce8830b8c2a1b721626ead8933f0cbe78720 + checksum: 10c0/bac0316ebfeacb8f381b38285dc691c9939bf0a78b0b7c2d5758acadad242d04783cee5337ba7d12a565a19075af1b3c11c728e1e4946de73c6ff7ce45f3f1bb languageName: node linkType: hard "content-type@npm:~1.0.4": version: 1.0.5 resolution: "content-type@npm:1.0.5" - checksum: 585847d98dc7fb8035c02ae2cb76c7a9bd7b25f84c447e5ed55c45c2175e83617c8813871b4ee22f368126af6b2b167df655829007b21aa10302873ea9c62662 + checksum: 10c0/b76ebed15c000aee4678c3707e0860cb6abd4e680a598c0a26e17f0bfae723ec9cc2802f0ff1bc6e4d80603719010431d2231018373d4dde10f9ccff9dadf5af languageName: node linkType: hard "convert-source-map@npm:^2.0.0": version: 2.0.0 resolution: "convert-source-map@npm:2.0.0" - checksum: c987be3ec061348cdb3c2bfb924bec86dea1eacad10550a85ca23edb0fe3556c3a61c7399114f3331ccb3499d7fd0285ab24566e5745929412983494c3926e15 + checksum: 10c0/8f2f7a27a1a011cc6cc88cc4da2d7d0cfa5ee0369508baae3d98c260bb3ac520691464e5bbe4ae7cdf09860c1d69ecc6f70c63c6e7c7f7e3f18ec08484dc7d9b languageName: node linkType: hard "cookie-signature@npm:1.0.6": version: 1.0.6 resolution: "cookie-signature@npm:1.0.6" - checksum: f4e1b0a98a27a0e6e66fd7ea4e4e9d8e038f624058371bf4499cfcd8f3980be9a121486995202ba3fca74fbed93a407d6d54d43a43f96fd28d0bd7a06761591a + checksum: 10c0/b36fd0d4e3fef8456915fcf7742e58fbfcc12a17a018e0eb9501c9d5ef6893b596466f03b0564b81af29ff2538fd0aa4b9d54fe5ccbfb4c90ea50ad29fe2d221 languageName: node linkType: hard "cookie@npm:0.5.0": version: 0.5.0 resolution: "cookie@npm:0.5.0" - checksum: aae7911ddc5f444a9025fbd979ad1b5d60191011339bce48e555cb83343d0f98b865ff5c4d71fecdfb8555a5cafdc65632f6fce172f32aaf6936830a883a0380 + checksum: 10c0/c01ca3ef8d7b8187bae434434582288681273b5a9ed27521d4d7f9f7928fe0c920df0decd9f9d3bbd2d14ac432b8c8cf42b98b3bdd5bfe0e6edddeebebe8b61d languageName: node linkType: hard @@ -6228,7 +6287,7 @@ __metadata: resolution: "copy-to-clipboard@npm:3.3.3" dependencies: toggle-selection: "npm:^1.0.6" - checksum: e0a325e39b7615108e6c1c8ac110ae7b829cdc4ee3278b1df6a0e4228c490442cc86444cd643e2da344fbc424b3aab8909e2fec82f8bc75e7e5b190b7c24eecf + checksum: 10c0/3ebf5e8ee00601f8c440b83ec08d838e8eabb068c1fae94a9cda6b42f288f7e1b552f3463635f419af44bf7675afc8d0390d30876cf5c2d5d35f86d9c56a3e5f languageName: node linkType: hard @@ -6237,14 +6296,14 @@ __metadata: resolution: "core-js-compat@npm:3.36.0" dependencies: browserslist: "npm:^4.22.3" - checksum: 633c49a254fe48981057e33651e5a74a0a14f14731aa5afed5d2e61fbe3c5cbc116ffd4feaa158c683c40d6dc4fd2e6aa0ebe12c45d157cfa571309d08400c98 + checksum: 10c0/5ce2ad0ece8379883c01958e196575abc015692fc0394b8917f132b6b32e5c2bfb2612902c3f98f270cfa2d9d6522c28d36665038f3726796f1f4b436e4f863e languageName: node linkType: hard "core-util-is@npm:~1.0.0": version: 1.0.3 resolution: "core-util-is@npm:1.0.3" - checksum: 9de8597363a8e9b9952491ebe18167e3b36e7707569eed0ebf14f8bba773611376466ae34575bca8cfe3c767890c859c74056084738f09d4e4a6f902b2ad7d99 + checksum: 10c0/90a0e40abbddfd7618f8ccd63a74d88deea94e77d0e8dbbea059fa7ebebb8fbb4e2909667fe26f3a467073de1a542ebe6ae4c73a73745ac5833786759cd906c9 languageName: node linkType: hard @@ -6261,7 +6320,7 @@ __metadata: peerDependenciesMeta: typescript: optional: true - checksum: 91d082baca0f33b1c085bf010f9ded4af43cbedacba8821da0fb5667184d0a848addc52c31fadd080007f904a555319c238cf5f4c03e6d58ece2e4876b2e73d6 + checksum: 10c0/0382a9ed13208f8bfc22ca2f62b364855207dffdb73dc26e150ade78c3093f1cf56172df2dd460c8caf2afa91c0ed4ec8a88c62f8f9cd1cf423d26506aa8797a languageName: node linkType: hard @@ -6278,7 +6337,7 @@ __metadata: prompts: "npm:^2.0.1" bin: create-jest: bin/create-jest.js - checksum: 847b4764451672b4174be4d5c6d7d63442ec3aa5f3de52af924e4d996d87d7801c18e125504f25232fc75840f6625b3ac85860fac6ce799b5efae7bdcaf4a2b7 + checksum: 10c0/e7e54c280692470d3398f62a6238fd396327e01c6a0757002833f06d00afc62dd7bfe04ff2b9cd145264460e6b4d1eb8386f2925b7e567f97939843b7b0e812f languageName: node linkType: hard @@ -6289,14 +6348,14 @@ __metadata: path-key: "npm:^3.1.0" shebang-command: "npm:^2.0.0" which: "npm:^2.0.1" - checksum: e1a13869d2f57d974de0d9ef7acbf69dc6937db20b918525a01dacb5032129bd552d290d886d981e99f1b624cb03657084cc87bd40f115c07ecf376821c729ce + checksum: 10c0/5738c312387081c98d69c98e105b6327b069197f864a60593245d64c8089c8a0a744e16349281210d56835bb9274130d825a78b2ad6853ca13cfbeffc0c31750 languageName: node linkType: hard "crypto-random-string@npm:^2.0.0": version: 2.0.0 resolution: "crypto-random-string@npm:2.0.0" - checksum: 0283879f55e7c16fdceacc181f87a0a65c53bc16ffe1d58b9d19a6277adcd71900d02bb2c4843dd55e78c51e30e89b0fec618a7f170ebcc95b33182c28f05fd6 + checksum: 10c0/288589b2484fe787f9e146f56c4be90b940018f17af1b152e4dde12309042ff5a2bf69e949aab8b8ac253948381529cc6f3e5a2427b73643a71ff177fa122b37 languageName: node linkType: hard @@ -6309,14 +6368,14 @@ __metadata: domhandler: "npm:^4.3.1" domutils: "npm:^2.8.0" nth-check: "npm:^2.0.1" - checksum: 8f7310c9af30ccaba8f72cb4a54d32232c53bf9ba05d019b693e16bfd7ba5df0affc1f4d74b1ee55923643d23b80a837eedcf60938c53356e479b04049ff9994 + checksum: 10c0/a489d8e5628e61063d5a8fe0fa1cc7ae2478cb334a388a354e91cf2908154be97eac9fa7ed4dffe87a3e06cf6fcaa6016553115335c4fd3377e13dac7bd5a8e1 languageName: node linkType: hard "css-selector-parser@npm:^1.4.1": version: 1.4.1 resolution: "css-selector-parser@npm:1.4.1" - checksum: 81ae797956a6c03e9668fcc11446b2e8ffb13fe87fccc5c80b852cace4136668bab8b171192f395edde94932b8e43b67c0975d8c187721e2da186684d6a17e83 + checksum: 10c0/4a89a7b61072cf0e4d09e8abbb9a77bc661232b6fe6a6fe51ba775757bae0e3fc462b0db4c9a857da55afb89a1c1746a7b2ec1200f639c539556ebdc758b0101 languageName: node linkType: hard @@ -6326,21 +6385,21 @@ __metadata: dependencies: mdn-data: "npm:2.0.14" source-map: "npm:^0.6.1" - checksum: 29710728cc4b136f1e9b23ee1228ec403ec9f3d487bc94a9c5dbec563c1e08c59bc917dd6f82521a35e869ff655c298270f43ca673265005b0cd05b292eb05ab + checksum: 10c0/499a507bfa39b8b2128f49736882c0dd636b0cd3370f2c69f4558ec86d269113286b7df469afc955de6a68b0dba00bc533e40022a73698081d600072d5d83c1c languageName: node linkType: hard "css-what@npm:^6.0.1": version: 6.1.0 resolution: "css-what@npm:6.1.0" - checksum: c67a3a2d0d81843af87f8bf0a4d0845b0f952377714abbb2884e48942409d57a2110eabee003609d02ee487b054614bdfcfc59ee265728ff105bd5aa221c1d0e + checksum: 10c0/a09f5a6b14ba8dcf57ae9a59474722e80f20406c53a61e9aedb0eedc693b135113ffe2983f4efc4b5065ae639442e9ae88df24941ef159c218b231011d733746 languageName: node linkType: hard "css.escape@npm:^1.5.1": version: 1.5.1 resolution: "css.escape@npm:1.5.1" - checksum: f6d38088d870a961794a2580b2b2af1027731bb43261cfdce14f19238a88664b351cc8978abc20f06cc6bbde725699dec8deb6fe9816b139fc3f2af28719e774 + checksum: 10c0/5e09035e5bf6c2c422b40c6df2eb1529657a17df37fda5d0433d722609527ab98090baf25b13970ca754079a0f3161dd3dfc0e743563ded8cfa0749d861c1525 languageName: node linkType: hard @@ -6349,21 +6408,21 @@ __metadata: resolution: "csso@npm:4.2.0" dependencies: css-tree: "npm:^1.1.2" - checksum: 8b6a2dc687f2a8165dde13f67999d5afec63cb07a00ab100fbb41e4e8b28d986cfa0bc466b4f5ba5de7260c2448a64e6ad26ec718dd204d3a7d109982f0bf1aa + checksum: 10c0/f8c6b1300efaa0f8855a7905ae3794a29c6496e7f16a71dec31eb6ca7cfb1f058a4b03fd39b66c4deac6cb06bf6b4ba86da7b67d7320389cb9994d52b924b903 languageName: node linkType: hard "cssom@npm:^0.5.0": version: 0.5.0 resolution: "cssom@npm:0.5.0" - checksum: b502a315b1ce020a692036cc38cb36afa44157219b80deadfa040ab800aa9321fcfbecf02fd2e6ec87db169715e27978b4ab3701f916461e9cf7808899f23b54 + checksum: 10c0/8c4121c243baf0678c65dcac29b201ff0067dfecf978de9d5c83b2ff127a8fdefd2bfd54577f5ad8c80ed7d2c8b489ae01c82023545d010c4ecb87683fb403dd languageName: node linkType: hard "cssom@npm:~0.3.6": version: 0.3.8 resolution: "cssom@npm:0.3.8" - checksum: 49eacc88077555e419646c0ea84ddc73c97e3a346ad7cb95e22f9413a9722d8964b91d781ce21d378bd5ae058af9a745402383fa4e35e9cdfd19654b63f892a9 + checksum: 10c0/d74017b209440822f9e24d8782d6d2e808a8fdd58fa626a783337222fe1c87a518ba944d4c88499031b4786e68772c99dfae616638d71906fe9f203aeaf14411 languageName: node linkType: hard @@ -6372,21 +6431,21 @@ __metadata: resolution: "cssstyle@npm:2.3.0" dependencies: cssom: "npm:~0.3.6" - checksum: 46f7f05a153446c4018b0454ee1464b50f606cb1803c90d203524834b7438eb52f3b173ba0891c618f380ced34ee12020675dc0052a7f1be755fe4ebc27ee977 + checksum: 10c0/863400da2a458f73272b9a55ba7ff05de40d850f22eb4f37311abebd7eff801cf1cd2fb04c4c92b8c3daed83fe766e52e4112afb7bc88d86c63a9c2256a7d178 languageName: node linkType: hard "csstype@npm:^3.0.2": version: 3.1.3 resolution: "csstype@npm:3.1.3" - checksum: f593cce41ff5ade23f44e77521e3a1bcc2c64107041e1bf6c3c32adc5187d0d60983292fda326154d20b01079e24931aa5b08e4467cc488b60bb1e7f6d478ade + checksum: 10c0/80c089d6f7e0c5b2bd83cf0539ab41474198579584fa10d86d0cafe0642202343cbc119e076a0b1aece191989477081415d66c9fefbf3c957fc2fc4b7009f248 languageName: node linkType: hard "damerau-levenshtein@npm:^1.0.8": version: 1.0.8 resolution: "damerau-levenshtein@npm:1.0.8" - checksum: f4eba1c90170f96be25d95fa3857141b5f81e254f7e4d530da929217b19990ea9a0390fc53d3c1cafac9152fda78e722ea4894f765cf6216be413b5af1fbf821 + checksum: 10c0/4c2647e0f42acaee7d068756c1d396e296c3556f9c8314bac1ac63ffb236217ef0e7e58602b18bb2173deec7ec8e0cac8e27cccf8f5526666b4ff11a13ad54a3 languageName: node linkType: hard @@ -6397,14 +6456,14 @@ __metadata: abab: "npm:^2.0.6" whatwg-mimetype: "npm:^3.0.0" whatwg-url: "npm:^11.0.0" - checksum: 033fc3dd0fba6d24bc9a024ddcf9923691dd24f90a3d26f6545d6a2f71ec6956f93462f2cdf2183cc46f10dc01ed3bcb36731a8208456eb1a08147e571fe2a76 + checksum: 10c0/051c3aaaf3e961904f136aab095fcf6dff4db23a7fc759dd8ba7b3e6ba03fc07ef608086caad8ab910d864bd3b5e57d0d2f544725653d77c96a2c971567045f4 languageName: node linkType: hard "date-fns@npm:3.3.1": version: 3.3.1 resolution: "date-fns@npm:3.3.1" - checksum: 98231936765dfb6fc6897676319b500a06a39f051b2c3ecbdd541a07ce9b1344b770277b8bfb1049fb7a2f70bf365ac8e6f1e2bb452b10e1a8101d518ca7f95d + checksum: 10c0/e04ff79244010e03b912d791cd3250af5f18866ce868604958d76bd87e5fb0b79f0a810b8e7066248452b41779b288c4fd21de1cac2cd4b6d384e9dd931c9674 languageName: node linkType: hard @@ -6413,7 +6472,7 @@ __metadata: resolution: "debug@npm:2.6.9" dependencies: ms: "npm:2.0.0" - checksum: e07005f2b40e04f1bd14a3dd20520e9c4f25f60224cb006ce9d6781732c917964e9ec029fc7f1a151083cd929025ad5133814d4dc624a9aaf020effe4914ed14 + checksum: 10c0/121908fb839f7801180b69a7e218a40b5a0b718813b886b7d6bdb82001b931c938e2941d1e4450f33a1b1df1da653f5f7a0440c197f29fbf8a6e9d45ff6ef589 languageName: node linkType: hard @@ -6425,7 +6484,7 @@ __metadata: peerDependenciesMeta: supports-color: optional: true - checksum: 0073c3bcbd9cb7d71dd5f6b55be8701af42df3e56e911186dfa46fac3a5b9eb7ce7f377dd1d3be6db8977221f8eb333d945216f645cf56f6b688cd484837d255 + checksum: 10c0/cedbec45298dd5c501d01b92b119cd3faebe5438c3917ff11ae1bff86a6c722930ac9c8659792824013168ba6db7c4668225d845c633fbdafbbf902a6389f736 languageName: node linkType: hard @@ -6434,14 +6493,26 @@ __metadata: resolution: "debug@npm:3.2.7" dependencies: ms: "npm:^2.1.1" - checksum: d86fd7be2b85462297ea16f1934dc219335e802f629ca9a69b63ed8ed041dda492389bb2ee039217c02e5b54792b1c51aa96ae954cf28634d363a2360c7a1639 + checksum: 10c0/37d96ae42cbc71c14844d2ae3ba55adf462ec89fd3a999459dec3833944cd999af6007ff29c780f1c61153bcaaf2c842d1e4ce1ec621e4fc4923244942e4a02a + languageName: node + linkType: hard + +"debug@npm:~4.3.6": + version: 4.3.7 + resolution: "debug@npm:4.3.7" + dependencies: + ms: "npm:^2.1.3" + peerDependenciesMeta: + supports-color: + optional: true + checksum: 10c0/1471db19c3b06d485a622d62f65947a19a23fbd0dd73f7fd3eafb697eec5360cde447fb075919987899b1a2096e85d35d4eb5a4de09a57600ac9cf7e6c8e768b languageName: node linkType: hard "decimal.js@npm:^10.4.2": version: 10.4.3 resolution: "decimal.js@npm:10.4.3" - checksum: de663a7bc4d368e3877db95fcd5c87b965569b58d16cdc4258c063d231ca7118748738df17cd638f7e9dd0be8e34cec08d7234b20f1f2a756a52fc5a38b188d0 + checksum: 10c0/6d60206689ff0911f0ce968d40f163304a6c1bc739927758e6efc7921cfa630130388966f16bf6ef6b838cb33679fbe8e7a78a2f3c478afce841fd55ac8fb8ee languageName: node linkType: hard @@ -6450,7 +6521,7 @@ __metadata: resolution: "decode-named-character-reference@npm:1.0.2" dependencies: character-entities: "npm:^2.0.0" - checksum: f4c71d3b93105f20076052f9cb1523a22a9c796b8296cd35eef1ca54239c78d182c136a848b83ff8da2071e3ae2b1d300bf29d00650a6d6e675438cc31b11d78 + checksum: 10c0/66a9fc5d9b5385a2b3675c69ba0d8e893393d64057f7dbbb585265bb4fc05ec513d76943b8e5aac7d8016d20eea4499322cbf4cd6d54b466976b78f3a7587a4c languageName: node linkType: hard @@ -6462,7 +6533,7 @@ __metadata: peerDependenciesMeta: babel-plugin-macros: optional: true - checksum: fc00a8bc3dfb7c413a778dc40ee8151b6c6ff35159d641f36ecd839c1df5c6e0ec5f4992e658c82624a1a62aaecaffc23b9c965ceb0bbf4d698bfc16469ac27d + checksum: 10c0/f8612cd5b00aab58b18bb95572dca08dc2d49720bfa7201a444c3dae430291e8a06d4928614a6ec8764d713927f44bce9c990d3b8238fca2f430990ddc17c070 languageName: node linkType: hard @@ -6488,21 +6559,21 @@ __metadata: which-boxed-primitive: "npm:^1.0.2" which-collection: "npm:^1.0.1" which-typed-array: "npm:^1.1.13" - checksum: 1ce49d0b71d0f14d8ef991a742665eccd488dfc9b3cada069d4d7a86291e591c92d2589c832811dea182b4015736b210acaaebce6184be356c1060d176f5a05f + checksum: 10c0/a48244f90fa989f63ff5ef0cc6de1e4916b48ea0220a9c89a378561960814794a5800c600254482a2c8fd2e49d6c2e196131dc983976adb024c94a42dfe4949f languageName: node linkType: hard "deep-is@npm:^0.1.3": version: 0.1.4 resolution: "deep-is@npm:0.1.4" - checksum: ec12d074aef5ae5e81fa470b9317c313142c9e8e2afe3f8efa124db309720db96d1d222b82b84c834e5f87e7a614b44a4684b6683583118b87c833b3be40d4d8 + checksum: 10c0/7f0ee496e0dff14a573dc6127f14c95061b448b87b995fc96c017ce0a1e66af1675e73f1d6064407975bc4ea6ab679497a29fff7b5b9c4e99cb10797c1ad0b4c languageName: node linkType: hard "deepmerge@npm:^4.2.2": version: 4.3.1 resolution: "deepmerge@npm:4.3.1" - checksum: 058d9e1b0ff1a154468bf3837aea436abcfea1ba1d165ddaaf48ca93765fdd01a30d33c36173da8fbbed951dd0a267602bc782fe288b0fc4b7e1e7091afc4529 + checksum: 10c0/e53481aaf1aa2c4082b5342be6b6d8ad9dfe387bc92ce197a66dea08bd4265904a087e75e464f14d1347cf2ac8afe1e4c16b266e0561cc5df29382d3c5f80044 languageName: node linkType: hard @@ -6512,7 +6583,7 @@ __metadata: dependencies: bplist-parser: "npm:^0.2.0" untildify: "npm:^4.0.0" - checksum: 279c7ad492542e5556336b6c254a4eaf31b2c63a5433265655ae6e47301197b6cfb15c595a6fdc6463b2ff8e1a1a1ed3cba56038a60e1527ba4ab1628c6b9941 + checksum: 10c0/8db3ab882eb3e1e8b59d84c8641320e6c66d8eeb17eb4bb848b7dd549b1e6fd313988e4a13542e95fbaeff03f6e9dedc5ad191ad4df7996187753eb0d45c00b7 languageName: node linkType: hard @@ -6521,7 +6592,7 @@ __metadata: resolution: "defaults@npm:1.0.4" dependencies: clone: "npm:^1.0.2" - checksum: 3a88b7a587fc076b84e60affad8b85245c01f60f38fc1d259e7ac1d89eb9ce6abb19e27215de46b98568dd5bc48471730b327637e6f20b0f1bc85cf00440c80a + checksum: 10c0/9cfbe498f5c8ed733775db62dfd585780387d93c17477949e1670bfcfb9346e0281ce8c4bf9f4ac1fc0f9b851113bd6dc9e41182ea1644ccd97de639fa13c35a languageName: node linkType: hard @@ -6532,14 +6603,14 @@ __metadata: es-define-property: "npm:^1.0.0" es-errors: "npm:^1.3.0" gopd: "npm:^1.0.1" - checksum: abdcb2505d80a53524ba871273e5da75e77e52af9e15b3aa65d8aad82b8a3a424dad7aee2cc0b71470ac7acf501e08defac362e8b6a73cdb4309f028061df4ae + checksum: 10c0/dea0606d1483eb9db8d930d4eac62ca0fa16738b0b3e07046cddfacf7d8c868bbe13fa0cb263eb91c7d0d527960dc3f2f2471a69ed7816210307f6744fe62e37 languageName: node linkType: hard "define-lazy-prop@npm:^2.0.0": version: 2.0.0 resolution: "define-lazy-prop@npm:2.0.0" - checksum: 0115fdb065e0490918ba271d7339c42453d209d4cb619dfe635870d906731eff3e1ade8028bb461ea27ce8264ec5e22c6980612d332895977e89c1bbc80fcee2 + checksum: 10c0/db6c63864a9d3b7dc9def55d52764968a5af296de87c1b2cc71d8be8142e445208071953649e0386a8cc37cfcf9a2067a47207f1eb9ff250c2a269658fdae422 languageName: node linkType: hard @@ -6550,14 +6621,14 @@ __metadata: define-data-property: "npm:^1.0.1" has-property-descriptors: "npm:^1.0.0" object-keys: "npm:^1.1.1" - checksum: b4ccd00597dd46cb2d4a379398f5b19fca84a16f3374e2249201992f36b30f6835949a9429669ee6b41b6e837205a163eadd745e472069e70dfc10f03e5fcc12 + checksum: 10c0/88a152319ffe1396ccc6ded510a3896e77efac7a1bfbaa174a7b00414a1747377e0bb525d303794a47cf30e805c2ec84e575758512c6e44a993076d29fd4e6c3 languageName: node linkType: hard "defu@npm:^6.1.3": version: 6.1.4 resolution: "defu@npm:6.1.4" - checksum: aeffdb47300f45b4fdef1c5bd3880ac18ea7a1fd5b8a8faf8df29350ff03bf16dd34f9800205cab513d476e4c0a3783aa0cff0a433aff0ac84a67ddc4c8a2d64 + checksum: 10c0/2d6cc366262dc0cb8096e429368e44052fdf43ed48e53ad84cc7c9407f890301aa5fcb80d0995abaaf842b3949f154d060be4160f7a46cb2bc2f7726c81526f5 languageName: node linkType: hard @@ -6573,56 +6644,56 @@ __metadata: p-map: "npm:^4.0.0" rimraf: "npm:^3.0.2" slash: "npm:^3.0.0" - checksum: 563288b73b8b19a7261c47fd21a330eeab6e2acd7c6208c49790dfd369127120dd7836cdf0c1eca216b77c94782a81507eac6b4734252d3bef2795cb366996b6 + checksum: 10c0/8a095c5ccade42c867a60252914ae485ec90da243d735d1f63ec1e64c1cfbc2b8810ad69a29ab6326d159d4fddaa2f5bad067808c42072351ec458efff86708f languageName: node linkType: hard "delayed-stream@npm:~1.0.0": version: 1.0.0 resolution: "delayed-stream@npm:1.0.0" - checksum: 46fe6e83e2cb1d85ba50bd52803c68be9bd953282fa7096f51fc29edd5d67ff84ff753c51966061e5ba7cb5e47ef6d36a91924eddb7f3f3483b1c560f77a0020 + checksum: 10c0/d758899da03392e6712f042bec80aa293bbe9e9ff1b2634baae6a360113e708b91326594c8a486d475c69d6259afb7efacdc3537bfcda1c6c648e390ce601b19 languageName: node linkType: hard "depd@npm:2.0.0": version: 2.0.0 resolution: "depd@npm:2.0.0" - checksum: c0c8ff36079ce5ada64f46cc9d6fd47ebcf38241105b6e0c98f412e8ad91f084bcf906ff644cc3a4bd876ca27a62accb8b0fff72ea6ed1a414b89d8506f4a5ca + checksum: 10c0/58bd06ec20e19529b06f7ad07ddab60e504d9e0faca4bd23079fac2d279c3594334d736508dc350e06e510aba5e22e4594483b3a6562ce7c17dd797f4cc4ad2c languageName: node linkType: hard "dequal@npm:^2.0.2, dequal@npm:^2.0.3": version: 2.0.3 resolution: "dequal@npm:2.0.3" - checksum: 6ff05a7561f33603df87c45e389c9ac0a95e3c056be3da1a0c4702149e3a7f6fe5ffbb294478687ba51a9e95f3a60e8b6b9005993acd79c292c7d15f71964b6b + checksum: 10c0/f98860cdf58b64991ae10205137c0e97d384c3a4edc7f807603887b7c4b850af1224a33d88012009f150861cbee4fa2d322c4cc04b9313bee312e47f6ecaa888 languageName: node linkType: hard "destroy@npm:1.2.0": version: 1.2.0 resolution: "destroy@npm:1.2.0" - checksum: 0acb300b7478a08b92d810ab229d5afe0d2f4399272045ab22affa0d99dbaf12637659411530a6fcd597a9bdac718fc94373a61a95b4651bbc7b83684a565e38 + checksum: 10c0/bd7633942f57418f5a3b80d5cb53898127bcf53e24cdf5d5f4396be471417671f0fee48a4ebe9a1e9defbde2a31280011af58a57e090ff822f589b443ed4e643 languageName: node linkType: hard "detect-indent@npm:^6.1.0": version: 6.1.0 resolution: "detect-indent@npm:6.1.0" - checksum: ab953a73c72dbd4e8fc68e4ed4bfd92c97eb6c43734af3900add963fd3a9316f3bc0578b018b24198d4c31a358571eff5f0656e81a1f3b9ad5c547d58b2d093d + checksum: 10c0/dd83cdeda9af219cf77f5e9a0dc31d828c045337386cfb55ce04fad94ba872ee7957336834154f7647b89b899c3c7acc977c57a79b7c776b506240993f97acc7 languageName: node linkType: hard "detect-newline@npm:^3.0.0": version: 3.1.0 resolution: "detect-newline@npm:3.1.0" - checksum: ae6cd429c41ad01b164c59ea36f264a2c479598e61cba7c99da24175a7ab80ddf066420f2bec9a1c57a6bead411b4655ff15ad7d281c000a89791f48cbe939e7 + checksum: 10c0/c38cfc8eeb9fda09febb44bcd85e467c970d4e3bf526095394e5a4f18bc26dd0cf6b22c69c1fa9969261521c593836db335c2795218f6d781a512aea2fb8209d languageName: node linkType: hard "detect-node-es@npm:^1.1.0": version: 1.1.0 resolution: "detect-node-es@npm:1.1.0" - checksum: e46307d7264644975b71c104b9f028ed1d3d34b83a15b8a22373640ce5ea630e5640b1078b8ea15f202b54641da71e4aa7597093bd4b91f113db520a26a37449 + checksum: 10c0/e562f00de23f10c27d7119e1af0e7388407eb4b06596a25f6d79a360094a109ff285de317f02b090faae093d314cf6e73ac3214f8a5bb3a0def5bece94557fbe languageName: node linkType: hard @@ -6631,7 +6702,7 @@ __metadata: resolution: "detect-package-manager@npm:2.0.1" dependencies: execa: "npm:^5.1.1" - checksum: e72b910182d5ad479198d4235be206ac64a479257b32201bb06f3c842cc34c65ea851d46f72cc1d4bf535bcc6c4b44b5b86bb29fe1192b8c9c07b46883672f28 + checksum: 10c0/56ffd65228d1ff3ead5ea7f8ab951a517a29270de27510b790c9a8b77d4f36efbd61493e170ca77ee3dc13cbb5218583ce65b78ad14a59dc48565c9bcbbf3c71 languageName: node linkType: hard @@ -6644,14 +6715,14 @@ __metadata: bin: detect: bin/detect-port.js detect-port: bin/detect-port.js - checksum: b48da9340481742547263d5d985e65d078592557863402ecf538511735e83575867e94f91fe74405ea19b61351feb99efccae7e55de9a151d5654e3417cea05b + checksum: 10c0/f2b204ad3a9f8e8b53fea35fcc97469f31a8e3e786a2f59fbc886397e33b5f130c5f964bf001b9a64d990047c3824f6a439308461ff19801df04ab48a754639e languageName: node linkType: hard "diff-sequences@npm:^29.6.3": version: 29.6.3 resolution: "diff-sequences@npm:29.6.3" - checksum: 179daf9d2f9af5c57ad66d97cb902a538bcf8ed64963fa7aa0c329b3de3665ce2eb6ffdc2f69f29d445fa4af2517e5e55e5b6e00c00a9ae4f43645f97f7078cb + checksum: 10c0/32e27ac7dbffdf2fb0eb5a84efd98a9ad084fbabd5ac9abb8757c6770d5320d2acd172830b28c4add29bb873d59420601dfc805ac4064330ce59b1adfd0593b2 languageName: node linkType: hard @@ -6660,7 +6731,7 @@ __metadata: resolution: "dir-glob@npm:3.0.1" dependencies: path-type: "npm:^4.0.0" - checksum: fa05e18324510d7283f55862f3161c6759a3f2f8dbce491a2fc14c8324c498286c54282c1f0e933cb930da8419b30679389499b919122952a4f8592362ef4615 + checksum: 10c0/dcac00920a4d503e38bb64001acb19df4efc14536ada475725e12f52c16777afdee4db827f55f13a908ee7efc0cb282e2e3dbaeeb98c0993dd93d1802d3bf00c languageName: node linkType: hard @@ -6669,7 +6740,7 @@ __metadata: resolution: "doctrine@npm:2.1.0" dependencies: esutils: "npm:^2.0.2" - checksum: 555684f77e791b17173ea86e2eea45ef26c22219cb64670669c4f4bebd26dbc95cd90ec1f4159e9349a6bb9eb892ce4dde8cd0139e77bedd8bf4518238618474 + checksum: 10c0/b6416aaff1f380bf56c3b552f31fdf7a69b45689368deca72d28636f41c16bb28ec3ebc40ace97db4c1afc0ceeb8120e8492fe0046841c94c2933b2e30a7d5ac languageName: node linkType: hard @@ -6678,21 +6749,21 @@ __metadata: resolution: "doctrine@npm:3.0.0" dependencies: esutils: "npm:^2.0.2" - checksum: b4b28f1df5c563f7d876e7461254a4597b8cabe915abe94d7c5d1633fed263fcf9a85e8d3836591fc2d040108e822b0d32758e5ec1fe31c590dc7e08086e3e48 + checksum: 10c0/c96bdccabe9d62ab6fea9399fdff04a66e6563c1d6fb3a3a063e8d53c3bb136ba63e84250bbf63d00086a769ad53aef92d2bd483f03f837fc97b71cbee6b2520 languageName: node linkType: hard "dom-accessibility-api@npm:^0.5.9": version: 0.5.16 resolution: "dom-accessibility-api@npm:0.5.16" - checksum: 377b4a7f9eae0a5d72e1068c369c99e0e4ca17fdfd5219f3abd32a73a590749a267475a59d7b03a891f9b673c27429133a818c44b2e47e32fec024b34274e2ca + checksum: 10c0/b2c2eda4fae568977cdac27a9f0c001edf4f95a6a6191dfa611e3721db2478d1badc01db5bb4fa8a848aeee13e442a6c2a4386d65ec65a1436f24715a2f8d053 languageName: node linkType: hard "dom-accessibility-api@npm:^0.6.3": version: 0.6.3 resolution: "dom-accessibility-api@npm:0.6.3" - checksum: 83d3371f8226487fbad36e160d44f1d9017fb26d46faba6a06fcad15f34633fc827b8c3e99d49f71d5f3253d866e2131826866fd0a3c86626f8eccfc361881ff + checksum: 10c0/10bee5aa514b2a9a37c87cd81268db607a2e933a050074abc2f6fa3da9080ebed206a320cbc123567f2c3087d22292853bdfdceaffdd4334ffe2af9510b29360 languageName: node linkType: hard @@ -6703,7 +6774,7 @@ __metadata: domelementtype: "npm:^2.0.1" domhandler: "npm:^4.2.0" entities: "npm:^2.0.0" - checksum: 53b217bcfed4a0f90dd47f34f239b1c81fff53ffa39d164d722325817fdb554903b145c2d12c8421ce0df7d31c1b180caf7eacd3c86391dd925f803df8027dcc + checksum: 10c0/67d775fa1ea3de52035c98168ddcd59418356943b5eccb80e3c8b3da53adb8e37edb2cc2f885802b7b1765bf5022aec21dfc32910d7f9e6de4c3148f095ab5e0 languageName: node linkType: hard @@ -6714,14 +6785,14 @@ __metadata: domelementtype: "npm:^2.3.0" domhandler: "npm:^5.0.2" entities: "npm:^4.2.0" - checksum: e3bf9027a64450bca0a72297ecdc1e3abb7a2912268a9f3f5d33a2e29c1e2c3502c6e9f860fc6625940bfe0cfb57a44953262b9e94df76872fdfb8151097eeb3 + checksum: 10c0/d5ae2b7110ca3746b3643d3ef60ef823f5f078667baf530cec096433f1627ec4b6fa8c072f09d079d7cda915fd2c7bc1b7b935681e9b09e591e1e15f4040b8e2 languageName: node linkType: hard "domelementtype@npm:^2.0.1, domelementtype@npm:^2.2.0, domelementtype@npm:^2.3.0": version: 2.3.0 resolution: "domelementtype@npm:2.3.0" - checksum: ee837a318ff702622f383409d1f5b25dd1024b692ef64d3096ff702e26339f8e345820f29a68bcdcea8cfee3531776b3382651232fbeae95612d6f0a75efb4f6 + checksum: 10c0/686f5a9ef0fff078c1412c05db73a0dce096190036f33e400a07e2a4518e9f56b1e324f5c576a0a747ef0e75b5d985c040b0d51945ce780c0dd3c625a18cd8c9 languageName: node linkType: hard @@ -6730,7 +6801,7 @@ __metadata: resolution: "domexception@npm:4.0.0" dependencies: webidl-conversions: "npm:^7.0.0" - checksum: 4ed443227d2871d76c58d852b2e93c68e0443815b2741348f20881bedee8c1ad4f9bfc5d30c7dec433cd026b57da63407c010260b1682fef4c8847e7181ea43f + checksum: 10c0/774277cd9d4df033f852196e3c0077a34dbd15a96baa4d166e0e47138a80f4c0bdf0d94e4703e6ff5883cec56bb821a6fff84402d8a498e31de7c87eb932a294 languageName: node linkType: hard @@ -6739,7 +6810,7 @@ __metadata: resolution: "domhandler@npm:5.0.3" dependencies: domelementtype: "npm:^2.3.0" - checksum: 809b805a50a9c6884a29f38aec0a4e1b4537f40e1c861950ed47d10b049febe6b79ab72adaeeebb3cc8fc1cd33f34e97048a72a9265103426d93efafa78d3e96 + checksum: 10c0/bba1e5932b3e196ad6862286d76adc89a0dbf0c773e5ced1eb01f9af930c50093a084eff14b8de5ea60b895c56a04d5de8bbc4930c5543d029091916770b2d2a languageName: node linkType: hard @@ -6748,7 +6819,7 @@ __metadata: resolution: "domhandler@npm:4.3.1" dependencies: domelementtype: "npm:^2.2.0" - checksum: e0d2af7403997a3ca040a9ace4a233b75ebe321e0ef628b417e46d619d65d47781b2f2038b6c2ef6e56e73e66aec99caf6a12c7e687ecff18ef74af6dfbde5de + checksum: 10c0/5c199c7468cb052a8b5ab80b13528f0db3d794c64fc050ba793b574e158e67c93f8336e87fd81e9d5ee43b0e04aea4d8b93ed7be4899cb726a1601b3ba18538b languageName: node linkType: hard @@ -6759,7 +6830,7 @@ __metadata: dom-serializer: "npm:^1.0.1" domelementtype: "npm:^2.2.0" domhandler: "npm:^4.2.0" - checksum: 1f316a03f00b09a8893d4a25d297d5cbffd02c564509dede28ef72d5ce38d93f6d61f1de88d439f31b14a1d9b42f587ed711b9e8b1b4d3bf6001399832bfc4e0 + checksum: 10c0/d58e2ae01922f0dd55894e61d18119924d88091837887bf1438f2327f32c65eb76426bd9384f81e7d6dcfb048e0f83c19b222ad7101176ad68cdc9c695b563db languageName: node linkType: hard @@ -6770,21 +6841,21 @@ __metadata: dom-serializer: "npm:^2.0.0" domelementtype: "npm:^2.3.0" domhandler: "npm:^5.0.3" - checksum: 9a169a6e57ac4c738269a73ab4caf785114ed70e46254139c1bbc8144ac3102aacb28a6149508395ae34aa5d6a40081f4fa5313855dc8319c6d8359866b6dfea + checksum: 10c0/342d64cf4d07b8a0573fb51e0a6312a88fb520c7fefd751870bf72fa5fc0f2e0cb9a3958a573610b1d608c6e2a69b8e9b4b40f0bfb8f87a71bce4f180cca1887 languageName: node linkType: hard "dotenv-expand@npm:^10.0.0": version: 10.0.0 resolution: "dotenv-expand@npm:10.0.0" - checksum: b41eb278bc96b92cbf3037ca5f3d21e8845bf165dc06b6f9a0a03d278c2bd5a01c0cfbb3528ae3a60301ba1a8a9cace30e748c54b460753bc00d4c014b675597 + checksum: 10c0/298f5018e29cfdcb0b5f463ba8e8627749103fbcf6cf81c561119115754ed582deee37b49dfc7253028aaba875ab7aea5fa90e5dac88e511d009ab0e6677924e languageName: node linkType: hard "dotenv@npm:^16.0.0": version: 16.4.5 resolution: "dotenv@npm:16.4.5" - checksum: 55a3134601115194ae0f924e54473459ed0d9fc340ae610b676e248cca45aa7c680d86365318ea964e6da4e2ea80c4514c1adab5adb43d6867fb57ff068f95c8 + checksum: 10c0/48d92870076832af0418b13acd6e5a5a3e83bb00df690d9812e94b24aff62b88ade955ac99a05501305b8dc8f1b0ee7638b18493deb6fe93d680e5220936292f languageName: node linkType: hard @@ -6796,21 +6867,21 @@ __metadata: inherits: "npm:^2.0.1" readable-stream: "npm:^2.0.0" stream-shift: "npm:^1.0.0" - checksum: 7799984d178fb57e11c43f5f172a10f795322ec85ff664c2a98d2c2de6deeb9d7a30b810f83923dcd7ebe0f1786724b8aee2b62ca4577522141f93d6d48fb31c + checksum: 10c0/59d1440c1b4e3a4db35ae96933392703ce83518db1828d06b9b6322920d6cbbf0b7159e88be120385fe459e77f1eb0c7622f26e9ec1f47c9ff05c2b35747dbd3 languageName: node linkType: hard "eastasianwidth@npm:^0.2.0": version: 0.2.0 resolution: "eastasianwidth@npm:0.2.0" - checksum: 9b1d3e1baefeaf7d70799db8774149cef33b97183a6addceeba0cf6b85ba23ee2686f302f14482006df32df75d32b17c509c143a3689627929e4a8efaf483952 + checksum: 10c0/26f364ebcdb6395f95124fda411f63137a4bfb5d3a06453f7f23dfe52502905bd84e0488172e0f9ec295fdc45f05c23d5d91baf16bd26f0fe9acd777a188dc39 languageName: node linkType: hard "ee-first@npm:1.1.1": version: 1.1.1 resolution: "ee-first@npm:1.1.1" - checksum: 1b4cac778d64ce3b582a7e26b218afe07e207a0f9bfe13cc7395a6d307849cfe361e65033c3251e00c27dd060cab43014c2d6b2647676135e18b77d2d05b3f4f + checksum: 10c0/b5bb125ee93161bc16bfe6e56c6b04de5ad2aa44234d8f644813cc95d861a6910903132b05093706de2b706599367c4130eb6d170f6b46895686b95f87d017b7 languageName: node linkType: hard @@ -6821,49 +6892,56 @@ __metadata: jake: "npm:^10.8.5" bin: ejs: bin/cli.js - checksum: 71f56d37540d2c2d71701f0116710c676f75314a3e997ef8b83515d5d4d2b111c5a72725377caeecb928671bacb84a0d38135f345904812e989847057d59f21a + checksum: 10c0/f0e249c79128810f5f6d5cbf347fc906d86bb9384263db0b2a9004aea649f2bc2d112736de5716c509c80afb4721c47281bd5b57c757d3b63f1bf5ac5f885893 languageName: node linkType: hard "electron-to-chromium@npm:^1.4.668": version: 1.4.685 resolution: "electron-to-chromium@npm:1.4.685" - checksum: 8067c77260c94dee9117da963cc2d4cab13854ce6da97b310cc2c23da3dbde82fd94b5c3c352e86ca522b8491bf9cc621d61d8b58ff94a46f7c0a60dc64c4fcf + checksum: 10c0/c9ce3907164c68aafe3667e9334dd41dadafdbbcb7dc6f6a53db4cafcafa107b664f54769c4ecb819b712695abf192cacde2d436c366cea1f75340cab05a5cb4 languageName: node linkType: hard "emittery@npm:^0.13.1": version: 0.13.1 resolution: "emittery@npm:0.13.1" - checksum: fbe214171d878b924eedf1757badf58a5dce071cd1fa7f620fa841a0901a80d6da47ff05929d53163105e621ce11a71b9d8acb1148ffe1745e045145f6e69521 + checksum: 10c0/1573d0ae29ab34661b6c63251ff8f5facd24ccf6a823f19417ae8ba8c88ea450325788c67f16c99edec8de4b52ce93a10fe441ece389fd156e88ee7dab9bfa35 + languageName: node + linkType: hard + +"emoji-regex@npm:^10.3.0": + version: 10.4.0 + resolution: "emoji-regex@npm:10.4.0" + checksum: 10c0/a3fcedfc58bfcce21a05a5f36a529d81e88d602100145fcca3dc6f795e3c8acc4fc18fe773fbf9b6d6e9371205edb3afa2668ec3473fa2aa7fd47d2a9d46482d languageName: node linkType: hard "emoji-regex@npm:^8.0.0": version: 8.0.0 resolution: "emoji-regex@npm:8.0.0" - checksum: c72d67a6821be15ec11997877c437491c313d924306b8da5d87d2a2bcc2cec9903cb5b04ee1a088460501d8e5b44f10df82fdc93c444101a7610b80c8b6938e1 + checksum: 10c0/b6053ad39951c4cf338f9092d7bfba448cdfd46fe6a2a034700b149ac9ffbc137e361cbd3c442297f86bed2e5f7576c1b54cc0a6bf8ef5106cc62f496af35010 languageName: node linkType: hard "emoji-regex@npm:^9.2.2": version: 9.2.2 resolution: "emoji-regex@npm:9.2.2" - checksum: 915acf859cea7131dac1b2b5c9c8e35c4849e325a1d114c30adb8cd615970f6dca0e27f64f3a4949d7d6ed86ecd79a1c5c63f02e697513cddd7b5835c90948b8 + checksum: 10c0/af014e759a72064cf66e6e694a7fc6b0ed3d8db680427b021a89727689671cefe9d04151b2cad51dbaf85d5ba790d061cd167f1cf32eb7b281f6368b3c181639 languageName: node linkType: hard "enabled@npm:2.0.x": version: 2.0.0 resolution: "enabled@npm:2.0.0" - checksum: 9d256d89f4e8a46ff988c6a79b22fa814b4ffd82826c4fdacd9b42e9b9465709d3b748866d0ab4d442dfc6002d81de7f7b384146ccd1681f6a7f868d2acca063 + checksum: 10c0/3b2c2af9bc7f8b9e291610f2dde4a75cf6ee52a68f4dd585482fbdf9a55d65388940e024e56d40bb03e05ef6671f5f53021fa8b72a20e954d7066ec28166713f languageName: node linkType: hard "encodeurl@npm:~1.0.2": version: 1.0.2 resolution: "encodeurl@npm:1.0.2" - checksum: e50e3d508cdd9c4565ba72d2012e65038e5d71bdc9198cb125beb6237b5b1ade6c0d343998da9e170fb2eae52c1bed37d4d6d98a46ea423a0cddbed5ac3f780c + checksum: 10c0/f6c2387379a9e7c1156c1c3d4f9cb7bb11cf16dd4c1682e1f6746512564b053df5781029b6061296832b59fb22f459dbe250386d217c2f6e203601abb2ee0bec languageName: node linkType: hard @@ -6872,7 +6950,7 @@ __metadata: resolution: "encoding@npm:0.1.13" dependencies: iconv-lite: "npm:^0.6.2" - checksum: bb98632f8ffa823996e508ce6a58ffcf5856330fde839ae42c9e1f436cc3b5cc651d4aeae72222916545428e54fd0f6aa8862fd8d25bdbcc4589f1e3f3715e7f + checksum: 10c0/36d938712ff00fe1f4bac88b43bcffb5930c1efa57bbcdca9d67e1d9d6c57cfb1200fb01efe0f3109b2ce99b231f90779532814a81370a1bd3274a0f58585039 languageName: node linkType: hard @@ -6881,28 +6959,28 @@ __metadata: resolution: "end-of-stream@npm:1.4.4" dependencies: once: "npm:^1.4.0" - checksum: 530a5a5a1e517e962854a31693dbb5c0b2fc40b46dad2a56a2deec656ca040631124f4795823acc68238147805f8b021abbe221f4afed5ef3c8e8efc2024908b + checksum: 10c0/870b423afb2d54bb8d243c63e07c170409d41e20b47eeef0727547aea5740bd6717aca45597a9f2745525667a6b804c1e7bede41f856818faee5806dd9ff3975 languageName: node linkType: hard "entities@npm:^2.0.0": version: 2.2.0 resolution: "entities@npm:2.2.0" - checksum: 2c765221ee324dbe25e1b8ca5d1bf2a4d39e750548f2e85cbf7ca1d167d709689ddf1796623e66666ae747364c11ed512c03b48c5bbe70968d30f2a4009509b7 + checksum: 10c0/7fba6af1f116300d2ba1c5673fc218af1961b20908638391b4e1e6d5850314ee2ac3ec22d741b3a8060479911c99305164aed19b6254bde75e7e6b1b2c3f3aa3 languageName: node linkType: hard "entities@npm:^4.2.0, entities@npm:^4.4.0, entities@npm:^4.5.0": version: 4.5.0 resolution: "entities@npm:4.5.0" - checksum: ede2a35c9bce1aeccd055a1b445d41c75a14a2bb1cd22e242f20cf04d236cdcd7f9c859eb83f76885327bfae0c25bf03303665ee1ce3d47c5927b98b0e3e3d48 + checksum: 10c0/5b039739f7621f5d1ad996715e53d964035f75ad3b9a4d38c6b3804bb226e282ffeae2443624d8fdd9c47d8e926ae9ac009c54671243f0c3294c26af7cc85250 languageName: node linkType: hard "env-paths@npm:^2.2.0": version: 2.2.1 resolution: "env-paths@npm:2.2.1" - checksum: 65b5df55a8bab92229ab2b40dad3b387fad24613263d103a97f91c9fe43ceb21965cd3392b1ccb5d77088021e525c4e0481adb309625d0cb94ade1d1fb8dc17e + checksum: 10c0/285325677bf00e30845e330eec32894f5105529db97496ee3f598478e50f008c5352a41a30e5e72ec9de8a542b5a570b85699cd63bd2bc646dbcb9f311d83bc4 languageName: node linkType: hard @@ -6911,14 +6989,21 @@ __metadata: resolution: "envinfo@npm:7.11.1" bin: envinfo: dist/cli.js - checksum: 5a18ead05954ac1643350170fefce2436a9cb758dc402e36fe4616553ee46469f766fcb6df72379d1741a2e5b55918949b343ff6174502c31c524a5cf75f05cd + checksum: 10c0/4550cce03d4d8a7b137d548faaf9c920356474231636cb4a6e74ae75db3b9cb04aa0a052ee391e2363af5db697166c207ba76e106338d758c6126830b3e16d75 + languageName: node + linkType: hard + +"environment@npm:^1.0.0": + version: 1.1.0 + resolution: "environment@npm:1.1.0" + checksum: 10c0/fb26434b0b581ab397039e51ff3c92b34924a98b2039dcb47e41b7bca577b9dbf134a8eadb364415c74464b682e2d3afe1a4c0eb9873dc44ea814c5d3103331d languageName: node linkType: hard "err-code@npm:^2.0.2": version: 2.0.3 resolution: "err-code@npm:2.0.3" - checksum: 1d20d825cdcce8d811bfbe86340f4755c02655a7feb2f13f8c880566d9d72a3f6c92c192a6867632e490d6da67b678271f46e01044996a6443e870331100dfdd + checksum: 10c0/b642f7b4dd4a376e954947550a3065a9ece6733ab8e51ad80db727aaae0817c2e99b02a97a3d6cecc648a97848305e728289cf312d09af395403a90c9d4d8a66 languageName: node linkType: hard @@ -6927,7 +7012,7 @@ __metadata: resolution: "error-ex@npm:1.3.2" dependencies: is-arrayish: "npm:^0.2.1" - checksum: d547740aa29c34e753fb6fed2c5de81802438529c12b3673bd37b6bb1fe49b9b7abdc3c11e6062fe625d8a296b3cf769a80f878865e25e685f787763eede3ffb + checksum: 10c0/ba827f89369b4c93382cfca5a264d059dfefdaa56ecc5e338ffa58a6471f5ed93b71a20add1d52290a4873d92381174382658c885ac1a2305f7baca363ce9cce languageName: node linkType: hard @@ -6976,14 +7061,14 @@ __metadata: typed-array-length: "npm:^1.0.4" unbox-primitive: "npm:^1.0.2" which-typed-array: "npm:^1.1.14" - checksum: 062e562a000e280c0c0683ad4a7b81732f97463bc769110c668a8edb739cd5df56975fa55965f5304a3256fd6eee03b9b66a47d863076f8976c2050731946b1f + checksum: 10c0/dc332c3a010c5e7b77b7ea8a4532ac455fa02e7bcabf996a47447165bafa72d0d99967407d0cf5dbbb5fbbf87f53cd8b706608ec70953523b8cd2b831b9a9d64 languageName: node linkType: hard "es-array-method-boxes-properly@npm:^1.0.0": version: 1.0.0 resolution: "es-array-method-boxes-properly@npm:1.0.0" - checksum: 27a8a21acf20f3f51f69dce8e643f151e380bffe569e95dc933b9ded9fcd89a765ee21b5229c93f9206c93f87395c6b75f80be8ac8c08a7ceb8771e1822ff1fb + checksum: 10c0/4b7617d3fbd460d6f051f684ceca6cf7e88e6724671d9480388d3ecdd72119ddaa46ca31f2c69c5426a82e4b3091c1e81867c71dcdc453565cd90005ff2c382d languageName: node linkType: hard @@ -6992,14 +7077,14 @@ __metadata: resolution: "es-define-property@npm:1.0.0" dependencies: get-intrinsic: "npm:^1.2.4" - checksum: f66ece0a887b6dca71848fa71f70461357c0e4e7249696f81bad0a1f347eed7b31262af4a29f5d726dc026426f085483b6b90301855e647aa8e21936f07293c6 + checksum: 10c0/6bf3191feb7ea2ebda48b577f69bdfac7a2b3c9bcf97307f55fd6ef1bbca0b49f0c219a935aca506c993d8c5d8bddd937766cb760cd5e5a1071351f2df9f9aa4 languageName: node linkType: hard "es-errors@npm:^1.0.0, es-errors@npm:^1.1.0, es-errors@npm:^1.2.1, es-errors@npm:^1.3.0": version: 1.3.0 resolution: "es-errors@npm:1.3.0" - checksum: 96e65d640156f91b707517e8cdc454dd7d47c32833aa3e85d79f24f9eb7ea85f39b63e36216ef0114996581969b59fe609a94e30316b08f5f4df1d44134cf8d5 + checksum: 10c0/0a61325670072f98d8ae3b914edab3559b6caa980f08054a3b872052640d91da01d38df55df797fcc916389d77fc92b8d5906cf028f4db46d7e3003abecbca85 languageName: node linkType: hard @@ -7016,7 +7101,7 @@ __metadata: is-string: "npm:^1.0.7" isarray: "npm:^2.0.5" stop-iteration-iterator: "npm:^1.0.0" - checksum: bc2194befbe55725f9489098626479deee3c801eda7e83ce0dff2eb266a28dc808edb9b623ff01d31ebc1328f09d661333d86b601036692c2e3c1a6942319433 + checksum: 10c0/ebd11effa79851ea75d7f079405f9d0dc185559fd65d986c6afea59a0ff2d46c2ed8675f19f03dce7429d7f6c14ff9aede8d121fbab78d75cfda6a263030bac0 languageName: node linkType: hard @@ -7039,14 +7124,14 @@ __metadata: internal-slot: "npm:^1.0.7" iterator.prototype: "npm:^1.1.2" safe-array-concat: "npm:^1.1.0" - checksum: 42c6eb65368d34b556dac1cc8d34ba753eb526bc7d4594be3b77799440be78d31fddfd60717af2d9ce6d021de8346e7a573141d789821e38836e60441f93ccfd + checksum: 10c0/d0f281257e7165f068fd4fc3beb63d07ae4f18fbef02a2bbe4a39272b764164c1ce3311ae7c5429ac30003aef290fcdf569050e4a9ba3560e044440f68e9a47c languageName: node linkType: hard "es-module-lexer@npm:^0.9.3": version: 0.9.3 resolution: "es-module-lexer@npm:0.9.3" - checksum: c3e39465d06a6ecd103ccdb746508c88ee4bdd56c15238b0013de38b949a4eca91d5e44d2a9b88d772fe7821547c5fe9200ba0f3353116e208d44bb50c7bc1ea + checksum: 10c0/be77d73aee709fdc68d22b9938da81dfee3bc45e8d601629258643fe5bfdab253d6e2540035e035cfa8cf52a96366c1c19b46bcc23b4507b1d44e5907d2e7f6c languageName: node linkType: hard @@ -7057,7 +7142,7 @@ __metadata: get-intrinsic: "npm:^1.2.4" has-tostringtag: "npm:^1.0.2" hasown: "npm:^2.0.1" - checksum: 7227fa48a41c0ce83e0377b11130d324ac797390688135b8da5c28994c0165be8b252e15cd1de41e1325e5a5412511586960213e88f9ab4a5e7d028895db5129 + checksum: 10c0/f22aff1585eb33569c326323f0b0d175844a1f11618b86e193b386f8be0ea9474cfbe46df39c45d959f7aa8f6c06985dc51dd6bce5401645ec5a74c4ceaa836a languageName: node linkType: hard @@ -7066,7 +7151,7 @@ __metadata: resolution: "es-shim-unscopables@npm:1.0.2" dependencies: hasown: "npm:^2.0.0" - checksum: 6d3bf91f658a27cc7217cd32b407a0d714393a84d125ad576319b9e83a893bea165cf41270c29e9ceaa56d3cf41608945d7e2a2c31fd51c0009b0c31402b91c7 + checksum: 10c0/f495af7b4b7601a4c0cfb893581c352636e5c08654d129590386a33a0432cf13a7bdc7b6493801cadd990d838e2839b9013d1de3b880440cb537825e834fe783 languageName: node linkType: hard @@ -7077,14 +7162,14 @@ __metadata: is-callable: "npm:^1.1.4" is-date-object: "npm:^1.0.1" is-symbol: "npm:^1.0.2" - checksum: 74aeeefe2714cf99bb40cab7ce3012d74e1e2c1bd60d0a913b467b269edde6e176ca644b5ba03a5b865fb044a29bca05671cd445c85ca2cdc2de155d7fc8fe9b + checksum: 10c0/0886572b8dc075cb10e50c0af62a03d03a68e1e69c388bd4f10c0649ee41b1fbb24840a1b7e590b393011b5cdbe0144b776da316762653685432df37d6de60f1 languageName: node linkType: hard "esbuild-plugin-alias@npm:^0.2.1": version: 0.2.1 resolution: "esbuild-plugin-alias@npm:0.2.1" - checksum: afe2d2c8b5f09d5321cb8d9c0825e8a9f6e03c2d50df92f953a291d4620cc29eddb3da9e33b238f6d8f77738e0277bdcb831f127399449fecf78fb84c04e5da9 + checksum: 10c0/a67bc6bc2744fc8637f7321f00c1f00e4fae86c182662421738ebfabf3ad344967b9c667185c6c34d9edd5b289807d34bfdceef94620e94e0a45683534af69e0 languageName: node linkType: hard @@ -7095,7 +7180,7 @@ __metadata: debug: "npm:^4.3.4" peerDependencies: esbuild: ">=0.12 <1" - checksum: af6874ce9b5fcdb0974c9d9e9f16530a5b9bd80c699b2ba9d7ace33439c1af1be6948535c775d9a6439e2bf23fb31cfd54ac882cfa38308a3f182039f4b98a01 + checksum: 10c0/9ccd0573cb66018e4cce3c1416eed0f5f3794c7026ce469a94e2f8761335abed8e363fc8e8bb036ab9ad7e579bb4296b8568a04ae5626596c123576b0d9c9bde languageName: node linkType: hard @@ -7172,42 +7257,42 @@ __metadata: optional: true bin: esbuild: bin/esbuild - checksum: 1f723ec71c3aa196473bf3298316eedc3f62d523924652dfeb60701b609792f918fc60db84b420d1d8ba9bfa7d69de2fc1d3157ba47c028bdae5d507a26a3c64 + checksum: 10c0/473b1d92842f50a303cf948a11ebd5f69581cd254d599dd9d62f9989858e0533f64e83b723b5e1398a5b488c0f5fd088795b4235f65ecaf4f007d4b79f04bc88 languageName: node linkType: hard "escalade@npm:^3.1.1": version: 3.1.2 resolution: "escalade@npm:3.1.2" - checksum: a1e07fea2f15663c30e40b9193d658397846ffe28ce0a3e4da0d8e485fedfeca228ab846aee101a05015829adf39f9934ff45b2a3fca47bed37a29646bd05cd3 + checksum: 10c0/6b4adafecd0682f3aa1cd1106b8fff30e492c7015b178bc81b2d2f75106dabea6c6d6e8508fc491bd58e597c74abb0e8e2368f943ecb9393d4162e3c2f3cf287 languageName: node linkType: hard "escape-html@npm:~1.0.3": version: 1.0.3 resolution: "escape-html@npm:1.0.3" - checksum: 6213ca9ae00d0ab8bccb6d8d4e0a98e76237b2410302cf7df70aaa6591d509a2a37ce8998008cbecae8fc8ffaadf3fb0229535e6a145f3ce0b211d060decbb24 + checksum: 10c0/524c739d776b36c3d29fa08a22e03e8824e3b2fd57500e5e44ecf3cc4707c34c60f9ca0781c0e33d191f2991161504c295e98f68c78fe7baa6e57081ec6ac0a3 languageName: node linkType: hard "escape-string-regexp@npm:^1.0.5": version: 1.0.5 resolution: "escape-string-regexp@npm:1.0.5" - checksum: 6092fda75c63b110c706b6a9bfde8a612ad595b628f0bd2147eea1d3406723020810e591effc7db1da91d80a71a737a313567c5abb3813e8d9c71f4aa595b410 + checksum: 10c0/a968ad453dd0c2724e14a4f20e177aaf32bb384ab41b674a8454afe9a41c5e6fe8903323e0a1052f56289d04bd600f81278edf140b0fcc02f5cac98d0f5b5371 languageName: node linkType: hard "escape-string-regexp@npm:^2.0.0": version: 2.0.0 resolution: "escape-string-regexp@npm:2.0.0" - checksum: 9f8a2d5743677c16e85c810e3024d54f0c8dea6424fad3c79ef6666e81dd0846f7437f5e729dfcdac8981bc9e5294c39b4580814d114076b8d36318f46ae4395 + checksum: 10c0/2530479fe8db57eace5e8646c9c2a9c80fa279614986d16dcc6bcaceb63ae77f05a851ba6c43756d816c61d7f4534baf56e3c705e3e0d884818a46808811c507 languageName: node linkType: hard "escape-string-regexp@npm:^4.0.0": version: 4.0.0 resolution: "escape-string-regexp@npm:4.0.0" - checksum: 98b48897d93060f2322108bf29db0feba7dd774be96cd069458d1453347b25ce8682ecc39859d4bca2203cc0ab19c237bcc71755eff49a0f8d90beadeeba5cc5 + checksum: 10c0/9497d4dd307d845bd7f75180d8188bb17ea8c151c1edbf6b6717c100e104d629dc2dfb687686181b0f4b7d732c7dfdc4d5e7a8ff72de1b0ca283a75bbb3a9cd9 languageName: node linkType: hard @@ -7225,7 +7310,7 @@ __metadata: bin: escodegen: bin/escodegen.js esgenerate: bin/esgenerate.js - checksum: 47719a65b2888b4586e3fa93769068b275961c13089e90d5d01a96a6e8e95871b1c3893576814c8fbf08a4a31a496f37e7b2c937cf231270f4d81de012832c7c + checksum: 10c0/e1450a1f75f67d35c061bf0d60888b15f62ab63aef9df1901cffc81cffbbb9e8b3de237c5502cf8613a017c1df3a3003881307c78835a1ab54d8c8d2206e01d3 languageName: node linkType: hard @@ -7236,7 +7321,7 @@ __metadata: eslint: ">=7.0.0" bin: eslint-config-prettier: bin/cli.js - checksum: 411e3b3b1c7aa04e3e0f20d561271b3b909014956c4dba51c878bf1a23dbb8c800a3be235c46c4732c70827276e540b6eed4636d9b09b444fd0a8e07f0fcd830 + checksum: 10c0/6d332694b36bc9ac6fdb18d3ca2f6ac42afa2ad61f0493e89226950a7091e38981b66bac2b47ba39d15b73fff2cd32c78b850a9cf9eed9ca9a96bfb2f3a2f10d languageName: node linkType: hard @@ -7245,7 +7330,7 @@ __metadata: resolution: "eslint-import-resolver-alias@npm:1.1.2" peerDependencies: eslint-plugin-import: ">=1.4.0" - checksum: 3fbb9aeda98335060bb438ed8446a060d282f80a365838a82edb1f8743b1d54c89303009c7717e3c915d5d722e57148082c5ada4455e811acdc8ed3a65059fa1 + checksum: 10c0/71f156e131242db509fe1cfdb410cca665cc9c6e4201e20609689016414e3c6c0b9df27a74b83367694b8ccc5f41687abde26b6cd2c96f961ba16152aca40e43 languageName: node linkType: hard @@ -7256,7 +7341,7 @@ __metadata: debug: "npm:^3.2.7" is-core-module: "npm:^2.13.0" resolve: "npm:^1.22.4" - checksum: d52e08e1d96cf630957272e4f2644dcfb531e49dcfd1edd2e07e43369eb2ec7a7d4423d417beee613201206ff2efa4eb9a582b5825ee28802fc7c71fcd53ca83 + checksum: 10c0/0ea8a24a72328a51fd95aa8f660dcca74c1429806737cf10261ab90cfcaaf62fd1eff664b76a44270868e0a932711a81b250053942595bcd00a93b1c1575dd61 languageName: node linkType: hard @@ -7268,7 +7353,7 @@ __metadata: peerDependenciesMeta: eslint: optional: true - checksum: 3e7892c0a984c963632da56b30ccf8254c29b535467138f91086c2ecdb2ebd10e2be61b54e553f30e5abf1d14d47a7baa0dac890e3a658fd3cd07dca63afbe6d + checksum: 10c0/1aeeb97bf4b688d28de136ee57c824480c37691b40fa825c711a4caf85954e94b99c06ac639d7f1f6c1d69223bd21bcb991155b3e589488e958d5b83dfd0f882 languageName: node linkType: hard @@ -7295,7 +7380,7 @@ __metadata: tsconfig-paths: "npm:^3.15.0" peerDependencies: eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 - checksum: 5865f05c38552145423c535326ec9a7113ab2305c7614c8b896ff905cfabc859c8805cac21e979c9f6f742afa333e6f62f812eabf891a7e8f5f0b853a32593c1 + checksum: 10c0/5f35dfbf4e8e67f741f396987de9504ad125c49f4144508a93282b4ea0127e052bde65ab6def1f31b6ace6d5d430be698333f75bdd7dca3bc14226c92a083196 languageName: node linkType: hard @@ -7321,7 +7406,7 @@ __metadata: object.fromentries: "npm:^2.0.7" peerDependencies: eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 - checksum: 7a8e4498531a43d988ce2f12502a3f5ce96eacfec13f956cf927f24bb041b724fb7fc0f0306ea19d143bfc79e138bf25e25acca0822847206ac6bf5ce095e846 + checksum: 10c0/199b883e526e6f9d7c54cb3f094abc54f11a1ec816db5fb6cae3b938eb0e503acc10ccba91ca7451633a9d0b9abc0ea03601844a8aba5fe88c5e8897c9ac8f49 languageName: node linkType: hard @@ -7341,7 +7426,7 @@ __metadata: optional: true eslint-config-prettier: optional: true - checksum: 4f26a30444adc61ed692cdb5a9f7e8d9f5794f0917151051e66755ce032a08c3cc72c8b5d56101412e90f6d77035bd8194ea8731e9c16aacdd5ae345a8dae188 + checksum: 10c0/f45d5fc1fcfec6b0cf038a7a65ddd10a25df4fe3f9e1f6b7f0d5100e66f046a26a2492e69ee765dddf461b93c114cf2e1eb18d4970aafa6f385448985c136e09 languageName: node linkType: hard @@ -7350,7 +7435,7 @@ __metadata: resolution: "eslint-plugin-react-hooks@npm:4.6.0" peerDependencies: eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 - checksum: 3c63134e056a6d98d66e2c475c81f904169db817e89316d14e36269919e31f4876a2588aa0e466ec8ef160465169c627fe823bfdaae7e213946584e4a165a3ac + checksum: 10c0/58c7e10ea5792c33346fcf5cb4024e14837035ce412ff99c2dcb7c4f903dc9b17939078f80bfef826301ce326582c396c00e8e0ac9d10ac2cde2b42d33763c65 languageName: node linkType: hard @@ -7359,7 +7444,7 @@ __metadata: resolution: "eslint-plugin-react-refresh@npm:0.4.5" peerDependencies: eslint: ">=7" - checksum: f1526f55829f7eb4d9031fa082cb967f0bc578e8e2a3dfb9e5a47fc31cb0785bfa58ae717157f57241f7086a8790a88a6ec82743eaa5ca392a6b0fdb379169f8 + checksum: 10c0/ea696811c6264d2efee10efe07f80aaae75ded66c941d8d5ce65e15e6c4bb8ad50ac225310ed04f35ed68d2d57937ba4c6f06d9306e78931d583648abf496a41 languageName: node linkType: hard @@ -7385,7 +7470,7 @@ __metadata: string.prototype.matchall: "npm:^4.0.8" peerDependencies: eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 - checksum: cb8c5dd5859cace330e24b7d74b9c652c0d93ef1d87957261fe1ac2975c27c918d0d5dc607f25aba4972ce74d04456f4f93883a16ac10cd598680d047fc3495d + checksum: 10c0/f9b247861024bafc396c4bd3c9ac946604b3b23077251c98f23602aa22027a0c33a69157fd49564e4ff7f17b3678e5dc366a46c7ec42a09454d7cbce786d5001 languageName: node linkType: hard @@ -7398,7 +7483,7 @@ __metadata: lodash: "npm:^4.17.15" peerDependencies: eslint: ">=0.8.0" - checksum: 073102a28b25f81f9ea0f4334dd0bf9fe23d3585039bae0706b7b1d7848601c3cf8d003f56932d7948745ba5290e748f9f62f93b4fca2fe0cfdb9508dab3e835 + checksum: 10c0/848ff1a4007cf4ad4c566373b1c80b260f65d41cbaa01c787c82c2690b7242e60f4777518015f000b5a88bd087280d3b70efd2265d67f247359937adc33af727 languageName: node linkType: hard @@ -7412,7 +7497,7 @@ __metadata: ts-dedent: "npm:^2.2.0" peerDependencies: eslint: ">=6" - checksum: a66e6737298af9bb830e3b14cdbd204e589a38adb810f02d843849936ef9175a80a49c8b8fa9263f8c2b9a8f36fdd3a2d429382d8051568c58d6272c65c2f5d3 + checksum: 10c0/c76f6decdd4c826cd6a8bb613085e0cde804f4648093a0464a39867cc0ba4e1d34be15ff91eed827730da5efbbf55ae5e71af648bb0b461946d5e41384669ab8 languageName: node linkType: hard @@ -7422,7 +7507,7 @@ __metadata: dependencies: esrecurse: "npm:^4.3.0" estraverse: "npm:^4.1.1" - checksum: c541ef384c92eb5c999b7d3443d80195fcafb3da335500946f6db76539b87d5826c8f2e1d23bf6afc3154ba8cd7c8e566f8dc00f1eea25fdf3afc8fb9c87b238 + checksum: 10c0/d30ef9dc1c1cbdece34db1539a4933fe3f9b14e1ffb27ecc85987902ee663ad7c9473bbd49a9a03195a373741e62e2f807c4938992e019b511993d163450e70a languageName: node linkType: hard @@ -7432,14 +7517,14 @@ __metadata: dependencies: esrecurse: "npm:^4.3.0" estraverse: "npm:^5.2.0" - checksum: 5c660fb905d5883ad018a6fea2b49f3cb5b1cbf2cd4bd08e98646e9864f9bc2c74c0839bed2d292e90a4a328833accc197c8f0baed89cbe8d605d6f918465491 + checksum: 10c0/613c267aea34b5a6d6c00514e8545ef1f1433108097e857225fed40d397dd6b1809dffd11c2fde23b37ca53d7bf935fe04d2a18e6fc932b31837b6ad67e1c116 languageName: node linkType: hard "eslint-visitor-keys@npm:^3.3.0, eslint-visitor-keys@npm:^3.4.1, eslint-visitor-keys@npm:^3.4.3": version: 3.4.3 resolution: "eslint-visitor-keys@npm:3.4.3" - checksum: 3f357c554a9ea794b094a09bd4187e5eacd1bc0d0653c3adeb87962c548e6a1ab8f982b86963ae1337f5d976004146536dcee5d0e2806665b193fbfbf1a9231b + checksum: 10c0/92708e882c0a5ffd88c23c0b404ac1628cf20104a108c745f240a13c332a11aac54f49a22d5762efbffc18ecbc9a580d1b7ad034bf5f3cc3307e5cbff2ec9820 languageName: node linkType: hard @@ -7487,7 +7572,7 @@ __metadata: text-table: "npm:^0.2.0" bin: eslint: bin/eslint.js - checksum: 00496e218b23747a7a9817bf58b522276d0dc1f2e546dceb4eea49f9871574088f72f1f069a6b560ef537efa3a75261b8ef70e51ef19033da1cc4c86a755ef15 + checksum: 10c0/00bb96fd2471039a312435a6776fe1fd557c056755eaa2b96093ef3a8508c92c8775d5f754768be6b1dddd09fdd3379ddb231eeb9b6c579ee17ea7d68000a529 languageName: node linkType: hard @@ -7498,7 +7583,7 @@ __metadata: acorn: "npm:^8.9.0" acorn-jsx: "npm:^5.3.2" eslint-visitor-keys: "npm:^3.4.1" - checksum: 255ab260f0d711a54096bdeda93adff0eadf02a6f9b92f02b323e83a2b7fc258797919437ad331efec3930475feb0142c5ecaaf3cdab4befebd336d47d3f3134 + checksum: 10c0/1a2e9b4699b715347f62330bcc76aee224390c28bb02b31a3752e9d07549c473f5f986720483c6469cf3cfb3c9d05df612ffc69eb1ee94b54b739e67de9bb460 languageName: node linkType: hard @@ -7508,7 +7593,7 @@ __metadata: bin: esparse: ./bin/esparse.js esvalidate: ./bin/esvalidate.js - checksum: f1d3c622ad992421362294f7acf866aa9409fbad4eb2e8fa230bd33944ce371d32279667b242d8b8907ec2b6ad7353a717f3c0e60e748873a34a7905174bc0eb + checksum: 10c0/ad4bab9ead0808cf56501750fd9d3fb276f6b105f987707d059005d57e182d18a7c9ec7f3a01794ebddcca676773e42ca48a32d67a250c9d35e009ca613caba3 languageName: node linkType: hard @@ -7517,7 +7602,7 @@ __metadata: resolution: "esquery@npm:1.5.0" dependencies: estraverse: "npm:^5.1.0" - checksum: e65fcdfc1e0ff5effbf50fb4f31ea20143ae5df92bb2e4953653d8d40aa4bc148e0d06117a592ce4ea53eeab1dafdfded7ea7e22a5be87e82d73757329a1b01d + checksum: 10c0/a084bd049d954cc88ac69df30534043fb2aee5555b56246493f42f27d1e168f00d9e5d4192e46f10290d312dc30dc7d58994d61a609c579c1219d636996f9213 languageName: node linkType: hard @@ -7526,42 +7611,49 @@ __metadata: resolution: "esrecurse@npm:4.3.0" dependencies: estraverse: "npm:^5.2.0" - checksum: 44ffcd89e714ea6b30143e7f119b104fc4d75e77ee913f34d59076b40ef2d21967f84e019f84e1fd0465b42cdbf725db449f232b5e47f29df29ed76194db8e16 + checksum: 10c0/81a37116d1408ded88ada45b9fb16dbd26fba3aadc369ce50fcaf82a0bac12772ebd7b24cd7b91fc66786bf2c1ac7b5f196bc990a473efff972f5cb338877cf5 languageName: node linkType: hard "estraverse@npm:^4.1.1": version: 4.3.0 resolution: "estraverse@npm:4.3.0" - checksum: 3f67ad02b6dbfaddd9ea459cf2b6ef4ecff9a6082a7af9d22e445b9abc082ad9ca47e1825557b293fcdae477f4714e561123e30bb6a5b2f184fb2bad4a9497eb + checksum: 10c0/9cb46463ef8a8a4905d3708a652d60122a0c20bb58dec7e0e12ab0e7235123d74214fc0141d743c381813e1b992767e2708194f6f6e0f9fd00c1b4e0887b8b6d languageName: node linkType: hard "estraverse@npm:^5.1.0, estraverse@npm:^5.2.0, estraverse@npm:^5.3.0": version: 5.3.0 resolution: "estraverse@npm:5.3.0" - checksum: 37cbe6e9a68014d34dbdc039f90d0baf72436809d02edffcc06ba3c2a12eb298048f877511353b130153e532aac8d68ba78430c0dd2f44806ebc7c014b01585e + checksum: 10c0/1ff9447b96263dec95d6d67431c5e0771eb9776427421260a3e2f0fdd5d6bd4f8e37a7338f5ad2880c9f143450c9b1e4fc2069060724570a49cf9cf0312bd107 languageName: node linkType: hard "estree-walker@npm:^2.0.1, estree-walker@npm:^2.0.2": version: 2.0.2 resolution: "estree-walker@npm:2.0.2" - checksum: b02109c5d46bc2ed47de4990eef770f7457b1159a229f0999a09224d2b85ffeed2d7679cffcff90aeb4448e94b0168feb5265b209cdec29aad50a3d6e93d21e2 + checksum: 10c0/53a6c54e2019b8c914dc395890153ffdc2322781acf4bd7d1a32d7aedc1710807bdcd866ac133903d5629ec601fbb50abe8c2e5553c7f5a0afdd9b6af6c945af languageName: node linkType: hard "esutils@npm:^2.0.2": version: 2.0.3 resolution: "esutils@npm:2.0.3" - checksum: b23acd24791db11d8f65be5ea58fd9a6ce2df5120ae2da65c16cfc5331ff59d5ac4ef50af66cd4bde238881503ec839928a0135b99a036a9cdfa22d17fd56cdb + checksum: 10c0/9a2fe69a41bfdade834ba7c42de4723c97ec776e40656919c62cbd13607c45e127a003f05f724a1ea55e5029a4cf2de444b13009f2af71271e42d93a637137c7 languageName: node linkType: hard "etag@npm:~1.8.1": version: 1.8.1 resolution: "etag@npm:1.8.1" - checksum: 571aeb3dbe0f2bbd4e4fadbdb44f325fc75335cd5f6f6b6a091e6a06a9f25ed5392f0863c5442acb0646787446e816f13cbfc6edce5b07658541dff573cab1ff + checksum: 10c0/12be11ef62fb9817314d790089a0a49fae4e1b50594135dcb8076312b7d7e470884b5100d249b28c18581b7fd52f8b485689ffae22a11ed9ec17377a33a08f84 + languageName: node + linkType: hard + +"eventemitter3@npm:^5.0.1": + version: 5.0.1 + resolution: "eventemitter3@npm:5.0.1" + checksum: 10c0/4ba5c00c506e6c786b4d6262cfbce90ddc14c10d4667e5c83ae993c9de88aa856033994dd2b35b83e8dc1170e224e66a319fa80adc4c32adcd2379bbc75da814 languageName: node linkType: hard @@ -7578,11 +7670,11 @@ __metadata: onetime: "npm:^5.1.2" signal-exit: "npm:^3.0.3" strip-final-newline: "npm:^2.0.0" - checksum: 8ada91f2d70f7dff702c861c2c64f21dfdc1525628f3c0454fd6f02fce65f7b958616cbd2b99ca7fa4d474e461a3d363824e91b3eb881705231abbf387470597 + checksum: 10c0/c8e615235e8de4c5addf2fa4c3da3e3aa59ce975a3e83533b4f6a71750fb816a2e79610dc5f1799b6e28976c9ae86747a36a606655bf8cb414a74d8d507b304f languageName: node linkType: hard -"execa@npm:^8.0.1": +"execa@npm:^8.0.1, execa@npm:~8.0.1": version: 8.0.1 resolution: "execa@npm:8.0.1" dependencies: @@ -7595,14 +7687,14 @@ __metadata: onetime: "npm:^6.0.0" signal-exit: "npm:^4.1.0" strip-final-newline: "npm:^3.0.0" - checksum: d2ab5fe1e2bb92b9788864d0713f1fce9a07c4594e272c0c97bc18c90569897ab262e4ea58d27a694d288227a2e24f16f5e2575b44224ad9983b799dc7f1098d + checksum: 10c0/2c52d8775f5bf103ce8eec9c7ab3059909ba350a5164744e9947ed14a53f51687c040a250bda833f906d1283aa8803975b84e6c8f7a7c42f99dc8ef80250d1af languageName: node linkType: hard "exit@npm:^0.1.2": version: 0.1.2 resolution: "exit@npm:0.1.2" - checksum: 387555050c5b3c10e7a9e8df5f43194e95d7737c74532c409910e585d5554eaff34960c166643f5e23d042196529daad059c292dcf1fb61b8ca878d3677f4b87 + checksum: 10c0/71d2ad9b36bc25bb8b104b17e830b40a08989be7f7d100b13269aaae7c3784c3e6e1e88a797e9e87523993a25ba27c8958959a554535370672cfb4d824af8989 languageName: node linkType: hard @@ -7615,14 +7707,14 @@ __metadata: jest-matcher-utils: "npm:^29.7.0" jest-message-util: "npm:^29.7.0" jest-util: "npm:^29.7.0" - checksum: 63f97bc51f56a491950fb525f9ad94f1916e8a014947f8d8445d3847a665b5471b768522d659f5e865db20b6c2033d2ac10f35fcbd881a4d26407a4f6f18451a + checksum: 10c0/2eddeace66e68b8d8ee5f7be57f3014b19770caaf6815c7a08d131821da527fb8c8cb7b3dcd7c883d2d3d8d184206a4268984618032d1e4b16dc8d6596475d41 languageName: node linkType: hard "exponential-backoff@npm:^3.1.1": version: 3.1.1 resolution: "exponential-backoff@npm:3.1.1" - checksum: 2d9bbb6473de7051f96790d5f9a678f32e60ed0aa70741dc7fdc96fec8d631124ec3374ac144387604f05afff9500f31a1d45bd9eee4cdc2e4f9ad2d9b9d5dbd + checksum: 10c0/160456d2d647e6019640bd07111634d8c353038d9fa40176afb7cd49b0548bdae83b56d05e907c2cce2300b81cae35d800ef92fefb9d0208e190fa3b7d6bb579 languageName: node linkType: hard @@ -7661,14 +7753,14 @@ __metadata: type-is: "npm:~1.6.18" utils-merge: "npm:1.0.1" vary: "npm:~1.1.2" - checksum: 869ae89ed6ff4bed7b373079dc58e5dddcf2915a2669b36037ff78c99d675ae930e5fe052b35c24f56557d28a023bb1cbe3e2f2fb87eaab96a1cedd7e597809d + checksum: 10c0/75af556306b9241bc1d7bdd40c9744b516c38ce50ae3210658efcbf96e3aed4ab83b3432f06215eae5610c123bc4136957dc06e50dfc50b7d4d775af56c4c59c languageName: node linkType: hard "extend@npm:^3.0.0": version: 3.0.2 resolution: "extend@npm:3.0.2" - checksum: 59e89e2dc798ec0f54b36d82f32a27d5f6472c53974f61ca098db5d4648430b725387b53449a34df38fd0392045434426b012f302b3cc049a6500ccf82877e4e + checksum: 10c0/73bf6e27406e80aa3e85b0d1c4fd987261e628064e170ca781125c0b635a3dabad5e05adbf07595ea0cf1e6c5396cacb214af933da7cbaf24fe75ff14818e8f9 languageName: node linkType: hard @@ -7682,21 +7774,21 @@ __metadata: yauzl: "npm:^2.10.0" bin: extract-zip: cli.js - checksum: a9a5e2b118cc1d3b780d296f056308a8fda580bb18a26e12d6137321e5d3ef1d09355195ff187e9c7039aab42a253ac1e3996c66d031c44abca5abde6fd51393 + checksum: 10c0/333f1349ee678d47268315f264dbfcd7003747d25640441e186e87c66efd7129f171f1bcfe8ff1151a24da19d5f8602daff002ee24145dc65516bc9a8e40ee08 languageName: node linkType: hard "fast-deep-equal@npm:^3.1.1, fast-deep-equal@npm:^3.1.3": version: 3.1.3 resolution: "fast-deep-equal@npm:3.1.3" - checksum: e21a9d8d84f53493b6aa15efc9cfd53dd5b714a1f23f67fb5dc8f574af80df889b3bce25dc081887c6d25457cce704e636395333abad896ccdec03abaf1f3f9d + checksum: 10c0/40dedc862eb8992c54579c66d914635afbec43350afbbe991235fdcb4e3a8d5af1b23ae7e79bef7d4882d0ecee06c3197488026998fb19f72dc95acff1d1b1d0 languageName: node linkType: hard "fast-diff@npm:^1.1.2": version: 1.3.0 resolution: "fast-diff@npm:1.3.0" - checksum: 9e57415bc69cd6efcc720b3b8fe9fdaf42dcfc06f86f0f45378b1fa512598a8aac48aa3928c8751d58e2f01bb4ba4f07e4f3d9bc0d57586d45f1bd1e872c6cde + checksum: 10c0/5c19af237edb5d5effda008c891a18a585f74bf12953be57923f17a3a4d0979565fc64dbc73b9e20926b9d895f5b690c618cbb969af0cf022e3222471220ad29 languageName: node linkType: hard @@ -7709,21 +7801,21 @@ __metadata: glob-parent: "npm:^5.1.2" merge2: "npm:^1.3.0" micromatch: "npm:^4.0.4" - checksum: 222512e9315a0efca1276af9adb2127f02105d7288fa746145bf45e2716383fb79eb983c89601a72a399a56b7c18d38ce70457c5466218c5f13fad957cee16df + checksum: 10c0/42baad7b9cd40b63e42039132bde27ca2cb3a4950d0a0f9abe4639ea1aa9d3e3b40f98b1fe31cbc0cc17b664c9ea7447d911a152fa34ec5b72977b125a6fc845 languageName: node linkType: hard "fast-json-stable-stringify@npm:^2.0.0, fast-json-stable-stringify@npm:^2.1.0": version: 2.1.0 resolution: "fast-json-stable-stringify@npm:2.1.0" - checksum: 2c20055c1fa43c922428f16ca8bb29f2807de63e5c851f665f7ac9790176c01c3b40335257736b299764a8d383388dabc73c8083b8e1bc3d99f0a941444ec60e + checksum: 10c0/7f081eb0b8a64e0057b3bb03f974b3ef00135fbf36c1c710895cd9300f13c94ba809bb3a81cf4e1b03f6e5285610a61abbd7602d0652de423144dfee5a389c9b languageName: node linkType: hard "fast-levenshtein@npm:^2.0.6": version: 2.0.6 resolution: "fast-levenshtein@npm:2.0.6" - checksum: eb7e220ecf2bab5159d157350b81d01f75726a4382f5a9266f42b9150c4523b9795f7f5d9fbbbeaeac09a441b2369f05ee02db48ea938584205530fe5693cfe1 + checksum: 10c0/111972b37338bcb88f7d9e2c5907862c280ebf4234433b95bc611e518d192ccb2d38119c4ac86e26b668d75f7f3894f4ff5c4982899afced7ca78633b08287c4 languageName: node linkType: hard @@ -7732,7 +7824,7 @@ __metadata: resolution: "fastq@npm:1.17.1" dependencies: reusify: "npm:^1.0.4" - checksum: a443180068b527dd7b3a63dc7f2a47ceca2f3e97b9c00a1efe5538757e6cc4056a3526df94308075d7727561baf09ebaa5b67da8dcbddb913a021c5ae69d1f69 + checksum: 10c0/1095f16cea45fb3beff558bb3afa74ca7a9250f5a670b65db7ed585f92b4b48381445cd328b3d87323da81e43232b5d5978a8201bde84e0cd514310f1ea6da34 languageName: node linkType: hard @@ -7741,7 +7833,7 @@ __metadata: resolution: "fault@npm:1.0.4" dependencies: format: "npm:^0.2.0" - checksum: 5ac610d8b09424e0f2fa8cf913064372f2ee7140a203a79957f73ed557c0e79b1a3d096064d7f40bde8132a69204c1fe25ec23634c05c6da2da2039cff26c4e7 + checksum: 10c0/c86c11500c1b676787296f31ade8473adcc6784f118f07c1a9429730b6288d0412f96e069ce010aa57e4f65a9cccb5abee8868bbe3c5f10de63b20482c9baebd languageName: node linkType: hard @@ -7750,7 +7842,7 @@ __metadata: resolution: "fb-watchman@npm:2.0.2" dependencies: bser: "npm:2.1.1" - checksum: 4f95d336fb805786759e383fd7fff342ceb7680f53efcc0ef82f502eb479ce35b98e8b207b6dfdfeea0eba845862107dc73813775fc6b56b3098c6e90a2dad77 + checksum: 10c0/feae89ac148adb8f6ae8ccd87632e62b13563e6fb114cacb5265c51f585b17e2e268084519fb2edd133872f1d47a18e6bfd7e5e08625c0d41b93149694187581 languageName: node linkType: hard @@ -7759,21 +7851,21 @@ __metadata: resolution: "fd-slicer@npm:1.1.0" dependencies: pend: "npm:~1.2.0" - checksum: db3e34fa483b5873b73f248e818f8a8b59a6427fd8b1436cd439c195fdf11e8659419404826059a642b57d18075c856d06d6a50a1413b714f12f833a9341ead3 + checksum: 10c0/304dd70270298e3ffe3bcc05e6f7ade2511acc278bc52d025f8918b48b6aa3b77f10361bddfadfe2a28163f7af7adbdce96f4d22c31b2f648ba2901f0c5fc20e languageName: node linkType: hard "fecha@npm:^4.2.0": version: 4.2.3 resolution: "fecha@npm:4.2.3" - checksum: 534ce630c8f63c116292145607fc18c0f06bfa2fd74094357bf65daacc5d3f4f2b285bf8eb112c3bbf98c5caa6d386cced797f44b9b1b33da0c0a81020444826 + checksum: 10c0/0e895965959cf6a22bb7b00f0bf546f2783836310f510ddf63f463e1518d4c96dec61ab33fdfd8e79a71b4856a7c865478ce2ee8498d560fe125947703c9b1cf languageName: node linkType: hard "fetch-retry@npm:^5.0.2": version: 5.0.6 resolution: "fetch-retry@npm:5.0.6" - checksum: 9d64b37f9d179fecf486725ada210d169375803b731304a9500754e094a2a6aa81630d946adbb313d7f9d54457ad0d17c3ed5c115034961a719e8a65faa8b77c + checksum: 10c0/349f50db631039630e915f70c763469cb696f3ac92ca6f63823109334a2bc62f63670b8c5a5c7e0195c39df517e60ef385cc5264f4c4904d0c6707d371fa8999 languageName: node linkType: hard @@ -7782,7 +7874,7 @@ __metadata: resolution: "file-entry-cache@npm:6.0.1" dependencies: flat-cache: "npm:^3.0.4" - checksum: 099bb9d4ab332cb93c48b14807a6918a1da87c45dce91d4b61fd40e6505d56d0697da060cb901c729c90487067d93c9243f5da3dc9c41f0358483bfdebca736b + checksum: 10c0/58473e8a82794d01b38e5e435f6feaf648e3f36fdb3a56e98f417f4efae71ad1c0d4ebd8a9a7c50c3ad085820a93fc7494ad721e0e4ebc1da3573f4e1c3c7cdd languageName: node linkType: hard @@ -7792,7 +7884,7 @@ __metadata: dependencies: fs-extra: "npm:11.1.1" ramda: "npm:0.29.0" - checksum: 8f0530aaa8bed115ef1b00f69accde8d1311d0eaffc6e37bb0b5057b8be79e6e960823025ea3c980a58147eed0ba690b9906c2229e132f5d96158e9b635a052c + checksum: 10c0/43de19f0db32e6546bb7abeecb1d6ea83c1eca23b38905c9415a29f6219cc9d6d87b0c1a6aca92c46a0f1bc276241a339f2f68b8aa0ca5c2eb64b6e1e3e4da01 languageName: node linkType: hard @@ -7801,7 +7893,7 @@ __metadata: resolution: "filelist@npm:1.0.4" dependencies: minimatch: "npm:^5.0.1" - checksum: 4b436fa944b1508b95cffdfc8176ae6947b92825483639ef1b9a89b27d82f3f8aa22b21eed471993f92709b431670d4e015b39c087d435a61e1bb04564cf51de + checksum: 10c0/426b1de3944a3d153b053f1c0ebfd02dccd0308a4f9e832ad220707a6d1f1b3c9784d6cadf6b2f68f09a57565f63ebc7bcdc913ccf8012d834f472c46e596f41 languageName: node linkType: hard @@ -7810,7 +7902,16 @@ __metadata: resolution: "fill-range@npm:7.0.1" dependencies: to-regex-range: "npm:^5.0.1" - checksum: e260f7592fd196b4421504d3597cc76f4a1ca7a9488260d533b611fc3cefd61e9a9be1417cb82d3b01ad9f9c0ff2dbf258e1026d2445e26b0cf5148ff4250429 + checksum: 10c0/7cdad7d426ffbaadf45aeb5d15ec675bbd77f7597ad5399e3d2766987ed20bda24d5fac64b3ee79d93276f5865608bb22344a26b9b1ae6c4d00bd94bf611623f + languageName: node + linkType: hard + +"fill-range@npm:^7.1.1": + version: 7.1.1 + resolution: "fill-range@npm:7.1.1" + dependencies: + to-regex-range: "npm:^5.0.1" + checksum: 10c0/b75b691bbe065472f38824f694c2f7449d7f5004aa950426a2c28f0306c60db9b880c0b0e4ed819997ffb882d1da02cfcfc819bddc94d71627f5269682edf018 languageName: node linkType: hard @@ -7825,7 +7926,7 @@ __metadata: parseurl: "npm:~1.3.3" statuses: "npm:2.0.1" unpipe: "npm:~1.0.0" - checksum: 635718cb203c6d18e6b48dfbb6c54ccb08ea470e4f474ddcef38c47edcf3227feec316f886dd701235997d8af35240cae49856721ce18f539ad038665ebbf163 + checksum: 10c0/64b7e5ff2ad1fcb14931cd012651631b721ce657da24aedb5650ddde9378bf8e95daa451da43398123f5de161a81e79ff5affe4f9f2a6d2df4a813d6d3e254b7 languageName: node linkType: hard @@ -7836,7 +7937,7 @@ __metadata: commondir: "npm:^1.0.1" make-dir: "npm:^2.0.0" pkg-dir: "npm:^3.0.0" - checksum: 60ad475a6da9f257df4e81900f78986ab367d4f65d33cf802c5b91e969c28a8762f098693d7a571b6e4dd4c15166c2da32ae2d18b6766a18e2071079448fdce4 + checksum: 10c0/556117fd0af14eb88fb69250f4bba9e905e7c355c6136dff0e161b9cbd1f5285f761b778565a278da73a130f42eccc723d7ad4c002ae547ed1d698d39779dabb languageName: node linkType: hard @@ -7847,7 +7948,7 @@ __metadata: commondir: "npm:^1.0.1" make-dir: "npm:^3.0.2" pkg-dir: "npm:^4.1.0" - checksum: 3907c2e0b15132704ed67083686cd3e68ab7d9ecc22e50ae9da20678245d488b01fa22c0e34c0544dc6edc4354c766f016c8c186a787be7c17f7cde8c5281e85 + checksum: 10c0/92747cda42bff47a0266b06014610981cfbb71f55d60f2c8216bc3108c83d9745507fb0b14ecf6ab71112bed29cd6fb1a137ee7436179ea36e11287e3159e587 languageName: node linkType: hard @@ -7856,7 +7957,7 @@ __metadata: resolution: "find-up@npm:3.0.0" dependencies: locate-path: "npm:^3.0.0" - checksum: 38eba3fe7a66e4bc7f0f5a1366dc25508b7cfc349f852640e3678d26ad9a6d7e2c43eff0a472287de4a9753ef58f066a0ea892a256fa3636ad51b3fe1e17fae9 + checksum: 10c0/2c2e7d0a26db858e2f624f39038c74739e38306dee42b45f404f770db357947be9d0d587f1cac72d20c114deb38aa57316e879eb0a78b17b46da7dab0a3bd6e3 languageName: node linkType: hard @@ -7866,7 +7967,7 @@ __metadata: dependencies: locate-path: "npm:^5.0.0" path-exists: "npm:^4.0.0" - checksum: 4c172680e8f8c1f78839486e14a43ef82e9decd0e74145f40707cc42e7420506d5ec92d9a11c22bd2c48fb0c384ea05dd30e10dd152fefeec6f2f75282a8b844 + checksum: 10c0/0406ee89ebeefa2d507feb07ec366bebd8a6167ae74aa4e34fb4c4abd06cf782a3ce26ae4194d70706f72182841733f00551c209fe575cb00bd92104056e78c1 languageName: node linkType: hard @@ -7876,7 +7977,7 @@ __metadata: dependencies: locate-path: "npm:^6.0.0" path-exists: "npm:^4.0.0" - checksum: 07955e357348f34660bde7920783204ff5a26ac2cafcaa28bace494027158a97b9f56faaf2d89a6106211a8174db650dd9f503f9c0d526b1202d5554a00b9095 + checksum: 10c0/062c5a83a9c02f53cdd6d175a37ecf8f87ea5bbff1fdfb828f04bfa021441bc7583e8ebc0872a4c1baab96221fb8a8a275a19809fb93fbc40bd69ec35634069a languageName: node linkType: hard @@ -7887,28 +7988,28 @@ __metadata: flatted: "npm:^3.2.9" keyv: "npm:^4.5.3" rimraf: "npm:^3.0.2" - checksum: 02381c6ece5e9fa5b826c9bbea481d7fd77645d96e4b0b1395238124d581d10e56f17f723d897b6d133970f7a57f0fab9148cbbb67237a0a0ffe794ba60c0c70 + checksum: 10c0/b76f611bd5f5d68f7ae632e3ae503e678d205cf97a17c6ab5b12f6ca61188b5f1f7464503efae6dc18683ed8f0b41460beb48ac4b9ac63fe6201296a91ba2f75 languageName: node linkType: hard "flatted@npm:^3.2.9": version: 3.3.1 resolution: "flatted@npm:3.3.1" - checksum: 7b8376061d5be6e0d3658bbab8bde587647f68797cf6bfeae9dea0e5137d9f27547ab92aaff3512dd9d1299086a6d61be98e9d48a56d17531b634f77faadbc49 + checksum: 10c0/324166b125ee07d4ca9bcf3a5f98d915d5db4f39d711fba640a3178b959919aae1f7cfd8aabcfef5826ed8aa8a2aa14cc85b2d7d18ff638ddf4ae3df39573eaf languageName: node linkType: hard "flow-parser@npm:0.*": version: 0.229.2 resolution: "flow-parser@npm:0.229.2" - checksum: 6308b26f8dbeed073ef2d1890d99cef59669a19e87afd6b071114867795dce71901f81207b9d89ed649f6cd826452f8f826702a49c7b2dc4bc94264d6cdd3bd3 + checksum: 10c0/6f2b0b393fb02ae6b839e7e13fd31e91872941843a805eb529b3e969cf0a8a6a0546fc2ab5c7d557eead8be3e2a3fe41c07276f826ba1f84702fcbefa7723fbc languageName: node linkType: hard "fn.name@npm:1.x.x": version: 1.1.0 resolution: "fn.name@npm:1.1.0" - checksum: 000198af190ae02f0138ac5fa4310da733224c628e0230c81e3fff7c4e094af7e0e8bb9f4357cabd21db601759d89f3445da744afbae20623cfa41edf3888397 + checksum: 10c0/8ad62aa2d4f0b2a76d09dba36cfec61c540c13a0fd72e5d94164e430f987a7ce6a743112bbeb14877c810ef500d1f73d7f56e76d029d2e3413f20d79e3460a9a languageName: node linkType: hard @@ -7918,7 +8019,7 @@ __metadata: peerDependenciesMeta: debug: optional: true - checksum: d467f13c1c6aa734599b8b369cd7a625b20081af358f6204ff515f6f4116eb440de9c4e0c49f10798eeb0df26c95dd05d5e0d9ddc5786ab1a8a8abefe92929b4 + checksum: 10c0/418d71688ceaf109dfd6f85f747a0c75de30afe43a294caa211def77f02ef19865b547dfb73fde82b751e1cc507c06c754120b848fe5a7400b0a669766df7615 languageName: node linkType: hard @@ -7927,7 +8028,7 @@ __metadata: resolution: "for-each@npm:0.3.3" dependencies: is-callable: "npm:^1.1.3" - checksum: fdac0cde1be35610bd635ae958422e8ce0cc1313e8d32ea6d34cfda7b60850940c1fd07c36456ad76bd9c24aef6ff5e03b02beb58c83af5ef6c968a64eada676 + checksum: 10c0/22330d8a2db728dbf003ec9182c2d421fbcd2969b02b4f97ec288721cda63eb28f2c08585ddccd0f77cb2930af8d958005c9e72f47141dc51816127a118f39aa languageName: node linkType: hard @@ -7937,7 +8038,7 @@ __metadata: dependencies: cross-spawn: "npm:^7.0.0" signal-exit: "npm:^4.0.1" - checksum: 087edd44857d258c4f73ad84cb8df980826569656f2550c341b27adf5335354393eec24ea2fabd43a253233fb27cee177ebe46bd0b7ea129c77e87cb1e9936fb + checksum: 10c0/9700a0285628abaeb37007c9a4d92bd49f67210f09067638774338e146c8e9c825c5c877f072b2f75f41dc6a2d0be8664f79ffc03f6576649f54a84fb9b47de0 languageName: node linkType: hard @@ -7948,35 +8049,35 @@ __metadata: asynckit: "npm:^0.4.0" combined-stream: "npm:^1.0.8" mime-types: "npm:^2.1.12" - checksum: 7264aa760a8cf09482816d8300f1b6e2423de1b02bba612a136857413fdc96d7178298ced106817655facc6b89036c6e12ae31c9eb5bdc16aabf502ae8a5d805 + checksum: 10c0/cb6f3ac49180be03ff07ba3ff125f9eba2ff0b277fb33c7fc47569fc5e616882c5b1c69b9904c4c4187e97dd0419dd03b134174756f296dec62041e6527e2c6e languageName: node linkType: hard "format@npm:^0.2.0": version: 0.2.2 resolution: "format@npm:0.2.2" - checksum: 5f878b8fc1a672c8cbefa4f293bdd977c822862577d70d53456a48b4169ec9b51677c0c995bf62c633b4e5cd673624b7c273f57923b28735a6c0c0a72c382a4a + checksum: 10c0/6032ba747541a43abf3e37b402b2f72ee08ebcb58bf84d816443dd228959837f1cddf1e8775b29fa27ff133f4bd146d041bfca5f9cf27f048edf3d493cf8fee6 languageName: node linkType: hard "forwarded@npm:0.2.0": version: 0.2.0 resolution: "forwarded@npm:0.2.0" - checksum: 29ba9fd347117144e97cbb8852baae5e8b2acb7d1b591ef85695ed96f5b933b1804a7fac4a15dd09ca7ac7d0cdc104410e8102aae2dd3faa570a797ba07adb81 + checksum: 10c0/9b67c3fac86acdbc9ae47ba1ddd5f2f81526fa4c8226863ede5600a3f7c7416ef451f6f1e240a3cc32d0fd79fcfe6beb08fd0da454f360032bde70bf80afbb33 languageName: node linkType: hard "fresh@npm:0.5.2": version: 0.5.2 resolution: "fresh@npm:0.5.2" - checksum: 64c88e489b5d08e2f29664eb3c79c705ff9a8eb15d3e597198ef76546d4ade295897a44abb0abd2700e7ef784b2e3cbf1161e4fbf16f59129193fd1030d16da1 + checksum: 10c0/c6d27f3ed86cc5b601404822f31c900dd165ba63fff8152a3ef714e2012e7535027063bc67ded4cb5b3a49fa596495d46cacd9f47d6328459cf570f08b7d9e5a languageName: node linkType: hard "fs-constants@npm:^1.0.0": version: 1.0.0 resolution: "fs-constants@npm:1.0.0" - checksum: 18f5b718371816155849475ac36c7d0b24d39a11d91348cfcb308b4494824413e03572c403c86d3a260e049465518c4f0d5bd00f0371cdfcad6d4f30a85b350d + checksum: 10c0/a0cde99085f0872f4d244e83e03a46aa387b74f5a5af750896c6b05e9077fac00e9932fdf5aef84f2f16634cd473c63037d7a512576da7d5c2b9163d1909f3a8 languageName: node linkType: hard @@ -7987,7 +8088,7 @@ __metadata: graceful-fs: "npm:^4.2.0" jsonfile: "npm:^6.0.1" universalify: "npm:^2.0.0" - checksum: c4e9fabf9762a70d1403316b7faa899f3d3303c8afa765b891c2210fdeba368461e04ae1203920b64ef6a7d066a39ab8cef2160b5ce8d1011bb4368688cd9bb7 + checksum: 10c0/a2480243d7dcfa7d723c5f5b24cf4eba02a6ccece208f1524a2fbde1c629492cfb9a59e4b6d04faff6fbdf71db9fdc8ef7f396417a02884195a625f5d8dc9427 languageName: node linkType: hard @@ -7998,7 +8099,7 @@ __metadata: graceful-fs: "npm:^4.2.0" jsonfile: "npm:^6.0.1" universalify: "npm:^2.0.0" - checksum: 0579bf6726a4cd054d4aa308f10b483f52478bb16284f32cf60b4ce0542063d551fca1a08a2af365e35db21a3fa5a06cf2a6ed614004b4368982bc754cb816b3 + checksum: 10c0/d77a9a9efe60532d2e790e938c81a02c1b24904ef7a3efb3990b835514465ba720e99a6ea56fd5e2db53b4695319b644d76d5a0e9988a2beef80aa7b1da63398 languageName: node linkType: hard @@ -8007,7 +8108,7 @@ __metadata: resolution: "fs-minipass@npm:2.1.0" dependencies: minipass: "npm:^3.0.0" - checksum: 03191781e94bc9a54bd376d3146f90fe8e082627c502185dbf7b9b3032f66b0b142c1115f3b2cc5936575fc1b44845ce903dd4c21bec2a8d69f3bd56f9cee9ec + checksum: 10c0/703d16522b8282d7299337539c3ed6edddd1afe82435e4f5b76e34a79cd74e488a8a0e26a636afc2440e1a23b03878e2122e3a2cfe375a5cf63c37d92b86a004 languageName: node linkType: hard @@ -8016,14 +8117,14 @@ __metadata: resolution: "fs-minipass@npm:3.0.3" dependencies: minipass: "npm:^7.0.3" - checksum: af143246cf6884fe26fa281621d45cfe111d34b30535a475bfa38dafe343dadb466c047a924ffc7d6b7b18265df4110224ce3803806dbb07173bf2087b648d7f + checksum: 10c0/63e80da2ff9b621e2cb1596abcb9207f1cf82b968b116ccd7b959e3323144cce7fb141462200971c38bbf2ecca51695069db45265705bed09a7cd93ae5b89f94 languageName: node linkType: hard "fs.realpath@npm:^1.0.0": version: 1.0.0 resolution: "fs.realpath@npm:1.0.0" - checksum: e703107c28e362d8d7b910bbcbfd371e640a3bb45ae157a362b5952c0030c0b6d4981140ec319b347bce7adc025dd7813da1ff908a945ac214d64f5402a51b96 + checksum: 10c0/444cf1291d997165dfd4c0d58b69f0e4782bfd9149fd72faa4fe299e68e0e93d6db941660b37dd29153bf7186672ececa3b50b7e7249477b03fdf850f287c948 languageName: node linkType: hard @@ -8032,7 +8133,7 @@ __metadata: resolution: "fsevents@npm:2.3.3" dependencies: node-gyp: "npm:latest" - checksum: 4c1ade961ded57cdbfbb5cac5106ec17bc8bccd62e16343c569a0ceeca83b9dfef87550b4dc5cbb89642da412b20c5071f304c8c464b80415446e8e155a038c0 + checksum: 10c0/a1f0c44595123ed717febbc478aa952e47adfc28e2092be66b8ab1635147254ca6cfe1df792a8997f22716d4cbafc73309899ff7bfac2ac3ad8cf2e4ecc3ec60 conditions: os=darwin languageName: node linkType: hard @@ -8049,7 +8150,7 @@ __metadata: "function-bind@npm:^1.1.2": version: 1.1.2 resolution: "function-bind@npm:1.1.2" - checksum: 185e20d20f10c8d661d59aac0f3b63b31132d492e1b11fcc2a93cb2c47257ebaee7407c38513efd2b35cafdf972d9beb2ea4593c1e0f3bf8f2744836928d7454 + checksum: 10c0/d8680ee1e5fcd4c197e4ac33b2b4dce03c71f4d91717292785703db200f5c21f977c568d28061226f9b5900cbcd2c84463646134fd5337e7925e0942bc3f46d5 languageName: node linkType: hard @@ -8061,28 +8162,35 @@ __metadata: define-properties: "npm:^1.2.0" es-abstract: "npm:^1.22.1" functions-have-names: "npm:^1.2.3" - checksum: 4d40be44d4609942e4e90c4fff77a811fa936f4985d92d2abfcf44f673ba344e2962bf223a33101f79c1a056465f36f09b072b9c289d7660ca554a12491cd5a2 + checksum: 10c0/9eae11294905b62cb16874adb4fc687927cda3162285e0ad9612e6a1d04934005d46907362ea9cdb7428edce05a2f2c3dabc3b2d21e9fd343e9bb278230ad94b languageName: node linkType: hard "functions-have-names@npm:^1.2.3": version: 1.2.3 resolution: "functions-have-names@npm:1.2.3" - checksum: 0ddfd3ed1066a55984aaecebf5419fbd9344a5c38dd120ffb0739fac4496758dcf371297440528b115e4367fc46e3abc86a2cc0ff44612181b175ae967a11a05 + checksum: 10c0/33e77fd29bddc2d9bb78ab3eb854c165909201f88c75faa8272e35899e2d35a8a642a15e7420ef945e1f64a9670d6aa3ec744106b2aa42be68ca5114025954ca languageName: node linkType: hard "gensync@npm:^1.0.0-beta.2": version: 1.0.0-beta.2 resolution: "gensync@npm:1.0.0-beta.2" - checksum: 17d8333460204fbf1f9160d067e1e77f908a5447febb49424b8ab043026049835c9ef3974445c57dbd39161f4d2b04356d7de12b2eecaa27a7a7ea7d871cbedd + checksum: 10c0/782aba6cba65b1bb5af3b095d96249d20edbe8df32dbf4696fd49be2583faf676173bf4809386588828e4dd76a3354fcbeb577bab1c833ccd9fc4577f26103f8 languageName: node linkType: hard "get-caller-file@npm:^2.0.5": version: 2.0.5 resolution: "get-caller-file@npm:2.0.5" - checksum: b9769a836d2a98c3ee734a88ba712e62703f1df31b94b784762c433c27a386dd6029ff55c2a920c392e33657d80191edbf18c61487e198844844516f843496b9 + checksum: 10c0/c6c7b60271931fa752aeb92f2b47e355eac1af3a2673f47c9589e8f8a41adc74d45551c1bc57b5e66a80609f10ffb72b6f575e4370d61cc3f7f3aaff01757cde + languageName: node + linkType: hard + +"get-east-asian-width@npm:^1.0.0": + version: 1.2.0 + resolution: "get-east-asian-width@npm:1.2.0" + checksum: 10c0/914b1e217cf38436c24b4c60b4c45289e39a45bf9e65ef9fd343c2815a1a02b8a0215aeec8bf9c07c516089004b6e3826332481f40a09529fcadbf6e579f286b languageName: node linkType: hard @@ -8095,49 +8203,49 @@ __metadata: has-proto: "npm:^1.0.1" has-symbols: "npm:^1.0.3" hasown: "npm:^2.0.0" - checksum: 85bbf4b234c3940edf8a41f4ecbd4e25ce78e5e6ad4e24ca2f77037d983b9ef943fd72f00f3ee97a49ec622a506b67db49c36246150377efcda1c9eb03e5f06d + checksum: 10c0/0a9b82c16696ed6da5e39b1267104475c47e3a9bdbe8b509dfe1710946e38a87be70d759f4bb3cda042d76a41ef47fe769660f3b7c0d1f68750299344ffb15b7 languageName: node linkType: hard "get-nonce@npm:^1.0.0": version: 1.0.1 resolution: "get-nonce@npm:1.0.1" - checksum: ad5104871d114a694ecc506a2d406e2331beccb961fe1e110dc25556b38bcdbf399a823a8a375976cd8889668156a9561e12ebe3fa6a4c6ba169c8466c2ff868 + checksum: 10c0/2d7df55279060bf0568549e1ffc9b84bc32a32b7541675ca092dce56317cdd1a59a98dcc4072c9f6a980779440139a3221d7486f52c488e69dc0fd27b1efb162 languageName: node linkType: hard "get-npm-tarball-url@npm:^2.0.3": version: 2.1.0 resolution: "get-npm-tarball-url@npm:2.1.0" - checksum: 02b96993ad5a04cbd0ef0577ac3cc9e2e78a7c60db6bb5e6c8fe78950fc1fc3d093314987629a2fda3083228d91a93670bde321767ca2cf89ce7f463c9e44071 + checksum: 10c0/af779fa5b9c89a3deaf9640630a23368f5ba6a028a1179872aaf581a59485fb2c2c6bd9b94670de228cfc5f23600c89a01e594879085f7fb4dddf820a63105b8 languageName: node linkType: hard "get-package-type@npm:^0.1.0": version: 0.1.0 resolution: "get-package-type@npm:0.1.0" - checksum: bba0811116d11e56d702682ddef7c73ba3481f114590e705fc549f4d868972263896af313c57a25c076e3c0d567e11d919a64ba1b30c879be985fc9d44f96148 + checksum: 10c0/e34cdf447fdf1902a1f6d5af737eaadf606d2ee3518287abde8910e04159368c268568174b2e71102b87b26c2020486f126bfca9c4fb1ceb986ff99b52ecd1be languageName: node linkType: hard "get-port@npm:^5.1.1": version: 5.1.1 resolution: "get-port@npm:5.1.1" - checksum: 0162663ffe5c09e748cd79d97b74cd70e5a5c84b760a475ce5767b357fb2a57cb821cee412d646aa8a156ed39b78aab88974eddaa9e5ee926173c036c0713787 + checksum: 10c0/2873877a469b24e6d5e0be490724a17edb39fafc795d1d662e7bea951ca649713b4a50117a473f9d162312cb0e946597bd0e049ed2f866e79e576e8e213d3d1c languageName: node linkType: hard "get-stream@npm:^6.0.0": version: 6.0.1 resolution: "get-stream@npm:6.0.1" - checksum: 781266d29725f35c59f1d214aedc92b0ae855800a980800e2923b3fbc4e56b3cb6e462c42e09a1cf1a00c64e056a78fa407cbe06c7c92b7e5cd49b4b85c2a497 + checksum: 10c0/49825d57d3fd6964228e6200a58169464b8e8970489b3acdc24906c782fb7f01f9f56f8e6653c4a50713771d6658f7cfe051e5eb8c12e334138c9c918b296341 languageName: node linkType: hard "get-stream@npm:^8.0.1": version: 8.0.1 resolution: "get-stream@npm:8.0.1" - checksum: dde5511e2e65a48e9af80fea64aff11b4921b14b6e874c6f8294c50975095af08f41bfb0b680c887f28b566dd6ec2cb2f960f9d36a323359be324ce98b766e9e + checksum: 10c0/5c2181e98202b9dae0bb4a849979291043e5892eb40312b47f0c22b9414fc9b28a3b6063d2375705eb24abc41ecf97894d9a51f64ff021511b504477b27b4290 languageName: node linkType: hard @@ -8148,7 +8256,7 @@ __metadata: call-bind: "npm:^1.0.5" es-errors: "npm:^1.3.0" get-intrinsic: "npm:^1.2.4" - checksum: e1cb53bc211f9dbe9691a4f97a46837a553c4e7caadd0488dc24ac694db8a390b93edd412b48dcdd0b4bbb4c595de1709effc75fc87c0839deedc6968f5bd973 + checksum: 10c0/867be6d63f5e0eb026cb3b0ef695ec9ecf9310febb041072d2e142f260bd91ced9eeb426b3af98791d1064e324e653424afa6fd1af17dee373bea48ae03162bc languageName: node linkType: hard @@ -8166,14 +8274,14 @@ __metadata: tar: "npm:^6.2.0" bin: giget: dist/cli.mjs - checksum: 5d50c70754fef1f199547fc58ad8ad18fed7f4ee3a2e624827d3f214476b731492ee96bd14934ae23b863524369801b23fd0785028576837be0c23bf2031c2b7 + checksum: 10c0/7a2a66146278f36a1fe0e57e792d43500a757c9a70e796a84e264cf4dfdbc3677499b308dfd96dd53940b5d1065ee14cba75dd75d78a78c2a9abec74e5e4ea62 languageName: node linkType: hard "github-slugger@npm:^1.0.0": version: 1.5.0 resolution: "github-slugger@npm:1.5.0" - checksum: c70988224578b3bdaa25df65973ffc8c24594a77a28550c3636e495e49d17aef5cdb04c04fa3f1744babef98c61eecc6a43299a13ea7f3cc33d680bf9053ffbe + checksum: 10c0/116f99732925f939cbfd6f2e57db1aa7e111a460db0d103e3b3f2fce6909d44311663d4542350706cad806345b9892358cc3b153674f88eeae77f43380b3bfca languageName: node linkType: hard @@ -8182,7 +8290,7 @@ __metadata: resolution: "glob-parent@npm:5.1.2" dependencies: is-glob: "npm:^4.0.1" - checksum: 32cd106ce8c0d83731966d31517adb766d02c3812de49c30cfe0675c7c0ae6630c11214c54a5ae67aca882cf738d27fd7768f21aa19118b9245950554be07247 + checksum: 10c0/cab87638e2112bee3f839ef5f6e0765057163d39c66be8ec1602f3823da4692297ad4e972de876ea17c44d652978638d2fd583c6713d0eb6591706825020c9ee languageName: node linkType: hard @@ -8191,7 +8299,7 @@ __metadata: resolution: "glob-parent@npm:6.0.2" dependencies: is-glob: "npm:^4.0.3" - checksum: c13ee97978bef4f55106b71e66428eb1512e71a7466ba49025fc2aec59a5bfb0954d5abd58fc5ee6c9b076eef4e1f6d3375c2e964b88466ca390da4419a786a8 + checksum: 10c0/317034d88654730230b3f43bb7ad4f7c90257a426e872ea0bf157473ac61c99bf5d205fad8f0185f989be8d2fa6d3c7dce1645d99d545b6ea9089c39f838e7f8 languageName: node linkType: hard @@ -8202,14 +8310,14 @@ __metadata: "@types/glob": "npm:^7.1.3" peerDependencies: glob: ^7.1.6 - checksum: c1a3d95f7c8393e4151d4899ec4e42bb2e8237160f840ad1eccbe9247407da8b6c13e28f463022e011708bc40862db87b9b77236d35afa3feb8aa86d518f2dfe + checksum: 10c0/3eb01bed2901539365df6a4d27800afb8788840647d01f9bf3500b3de756597f2ff4b8c823971ace34db228c83159beca459dc42a70968d4e9c8200ed2cc96bd languageName: node linkType: hard "glob-to-regexp@npm:^0.4.1": version: 0.4.1 resolution: "glob-to-regexp@npm:0.4.1" - checksum: 9009529195a955c40d7b9690794aeff5ba665cc38f1519e111c58bb54366fd0c106bde80acf97ba4e533208eb53422c83b136611a54c5fefb1edd8dc267cb62e + checksum: 10c0/0486925072d7a916f052842772b61c3e86247f0a80cc0deb9b5a3e8a1a9faad5b04fb6f58986a09f34d3e96cd2a22a24b7e9882fb1cf904c31e9a310de96c429 languageName: node linkType: hard @@ -8224,7 +8332,7 @@ __metadata: path-scurry: "npm:^1.10.1" bin: glob: dist/esm/bin.mjs - checksum: 38bdb2c9ce75eb5ed168f309d4ed05b0798f640b637034800a6bf306f39d35409bf278b0eaaffaec07591085d3acb7184a201eae791468f0f617771c2486a6a8 + checksum: 10c0/13d8a1feb7eac7945f8c8480e11cd4a44b24d26503d99a8d8ac8d5aefbf3e9802a2b6087318a829fad04cb4e829f25c5f4f1110c68966c498720dd261c7e344d languageName: node linkType: hard @@ -8238,14 +8346,14 @@ __metadata: minimatch: "npm:^3.1.1" once: "npm:^1.3.0" path-is-absolute: "npm:^1.0.0" - checksum: 59452a9202c81d4508a43b8af7082ca5c76452b9fcc4a9ab17655822e6ce9b21d4f8fbadabe4fe3faef448294cec249af305e2cd824b7e9aaf689240e5e96a7b + checksum: 10c0/65676153e2b0c9095100fe7f25a778bf45608eeb32c6048cf307f579649bcc30353277b3b898a3792602c65764e5baa4f643714dfbdfd64ea271d210c7a425fe languageName: node linkType: hard "globals@npm:^11.1.0": version: 11.12.0 resolution: "globals@npm:11.12.0" - checksum: 9f054fa38ff8de8fa356502eb9d2dae0c928217b8b5c8de1f09f5c9b6c8a96d8b9bd3afc49acbcd384a98a81fea713c859e1b09e214c60509517bb8fc2bc13c2 + checksum: 10c0/758f9f258e7b19226bd8d4af5d3b0dcf7038780fb23d82e6f98932c44e239f884847f1766e8fa9cc5635ccb3204f7fa7314d4408dd4002a5e8ea827b4018f0a1 languageName: node linkType: hard @@ -8254,7 +8362,7 @@ __metadata: resolution: "globals@npm:13.24.0" dependencies: type-fest: "npm:^0.20.2" - checksum: 62c5b1997d06674fc7191d3e01e324d3eda4d65ac9cc4e78329fa3b5c4fd42a0e1c8722822497a6964eee075255ce21ccf1eec2d83f92ef3f06653af4d0ee28e + checksum: 10c0/d3c11aeea898eb83d5ec7a99508600fbe8f83d2cf00cbb77f873dbf2bcb39428eff1b538e4915c993d8a3b3473fa71eeebfe22c9bb3a3003d1e26b1f2c8a42cd languageName: node linkType: hard @@ -8263,7 +8371,7 @@ __metadata: resolution: "globalthis@npm:1.0.3" dependencies: define-properties: "npm:^1.1.3" - checksum: 45ae2f3b40a186600d0368f2a880ae257e8278b4c7704f0417d6024105ad7f7a393661c5c2fa1334669cd485ea44bc883a08fdd4516df2428aec40c99f52aa89 + checksum: 10c0/0db6e9af102a5254630351557ac15e6909bc7459d3e3f6b001e59fe784c96d31108818f032d9095739355a88467459e6488ff16584ee6250cd8c27dec05af4b0 languageName: node linkType: hard @@ -8277,14 +8385,14 @@ __metadata: ignore: "npm:^5.2.0" merge2: "npm:^1.4.1" slash: "npm:^3.0.0" - checksum: 288e95e310227bbe037076ea81b7c2598ccbc3122d87abc6dab39e1eec309aa14f0e366a98cdc45237ffcfcbad3db597778c0068217dcb1950fef6249104e1b1 + checksum: 10c0/b39511b4afe4bd8a7aead3a27c4ade2b9968649abab0a6c28b1a90141b96ca68ca5db1302f7c7bd29eab66bf51e13916b8e0a3d0ac08f75e1e84a39b35691189 languageName: node linkType: hard "globrex@npm:^0.1.2": version: 0.1.2 resolution: "globrex@npm:0.1.2" - checksum: 81ce62ee6f800d823d6b7da7687f841676d60ee8f51f934ddd862e4057316d26665c4edc0358d4340a923ac00a514f8b67c787e28fe693aae16350f4e60d55e9 + checksum: 10c0/a54c029520cf58bda1d8884f72bd49b4cd74e977883268d931fd83bcbd1a9eb96d57c7dbd4ad80148fb9247467ebfb9b215630b2ed7563b2a8de02e1ff7f89d1 languageName: node linkType: hard @@ -8293,21 +8401,21 @@ __metadata: resolution: "gopd@npm:1.0.1" dependencies: get-intrinsic: "npm:^1.1.3" - checksum: 5fbc7ad57b368ae4cd2f41214bd947b045c1a4be2f194a7be1778d71f8af9dbf4004221f3b6f23e30820eb0d052b4f819fe6ebe8221e2a3c6f0ee4ef173421ca + checksum: 10c0/505c05487f7944c552cee72087bf1567debb470d4355b1335f2c262d218ebbff805cd3715448fe29b4b380bae6912561d0467233e4165830efd28da241418c63 languageName: node linkType: hard "graceful-fs@npm:^4.1.11, graceful-fs@npm:^4.1.2, graceful-fs@npm:^4.1.6, graceful-fs@npm:^4.2.0, graceful-fs@npm:^4.2.4, graceful-fs@npm:^4.2.6, graceful-fs@npm:^4.2.9": version: 4.2.11 resolution: "graceful-fs@npm:4.2.11" - checksum: bf152d0ed1dc159239db1ba1f74fdbc40cb02f626770dcd5815c427ce0688c2635a06ed69af364396da4636d0408fcf7d4afdf7881724c3307e46aff30ca49e2 + checksum: 10c0/386d011a553e02bc594ac2ca0bd6d9e4c22d7fa8cfbfc448a6d148c59ea881b092db9dbe3547ae4b88e55f1b01f7c4a2ecc53b310c042793e63aa44cf6c257f2 languageName: node linkType: hard "graphemer@npm:^1.4.0": version: 1.4.0 resolution: "graphemer@npm:1.4.0" - checksum: 6dd60dba97007b21e3a829fab3f771803cc1292977fe610e240ea72afd67e5690ac9eeaafc4a99710e78962e5936ab5a460787c2a1180f1cb0ccfac37d29f897 + checksum: 10c0/e951259d8cd2e0d196c72ec711add7115d42eb9a8146c8eeda5b8d3ac91e5dd816b9cd68920726d9fd4490368e7ed86e9c423f40db87e2d8dfafa00fa17c3a31 languageName: node linkType: hard @@ -8323,7 +8431,7 @@ __metadata: through2: "npm:^2.0.3" bin: gunzip-maybe: bin.js - checksum: 82a4eadb617e50ac63cb88b3c1ebef0f85de702c0c2031c5d9c0575837e1eef7c94fa4ad69ca4aec2dc3d939c89054ec07c91c233648433058efa7d44354d456 + checksum: 10c0/42798a8061759885c2084e1804e51313d14f2dc9cf6c137e222953ec802f914e592d6f9dbf6ad67f4e78eb036e86db017d9c7c93bb23e90cd5ae09326296ed77 languageName: node linkType: hard @@ -8341,35 +8449,35 @@ __metadata: optional: true bin: handlebars: bin/handlebars - checksum: bd528f4dd150adf67f3f857118ef0fa43ff79a153b1d943fa0a770f2599e38b25a7a0dbac1a3611a4ec86970fd2325a81310fb788b5c892308c9f8743bd02e11 + checksum: 10c0/7aff423ea38a14bb379316f3857fe0df3c5d66119270944247f155ba1f08e07a92b340c58edaa00cfe985c21508870ee5183e0634dcb53dd405f35c93ef7f10d languageName: node linkType: hard "harmony-reflect@npm:^1.4.6": version: 1.6.2 resolution: "harmony-reflect@npm:1.6.2" - checksum: 69d30ebfb5dbd6ff0553725c7922404cf1dfe5390db1618298eed27fe6c9bd2f3f677727e9da969d21648f4a6a39041e2f46e99976be4385f9e34bac23058cd4 + checksum: 10c0/fa5b251fbeff0e2d925f0bfb5ffe39e0627639e998c453562d6a39e41789c15499649dc022178c807cf99bfb97e7b974bbbc031ba82078a26be7b098b9bc2b1a languageName: node linkType: hard "has-bigints@npm:^1.0.1, has-bigints@npm:^1.0.2": version: 1.0.2 resolution: "has-bigints@npm:1.0.2" - checksum: 4e0426c900af034d12db14abfece02ce7dbf53f2022d28af1a97913ff4c07adb8799476d57dc44fbca0e07d1dbda2a042c2928b1f33d3f09c15de0640a7fb81b + checksum: 10c0/724eb1485bfa3cdff6f18d95130aa190561f00b3fcf9f19dc640baf8176b5917c143b81ec2123f8cddb6c05164a198c94b13e1377c497705ccc8e1a80306e83b languageName: node linkType: hard "has-flag@npm:^3.0.0": version: 3.0.0 resolution: "has-flag@npm:3.0.0" - checksum: 4a15638b454bf086c8148979aae044dd6e39d63904cd452d970374fa6a87623423da485dfb814e7be882e05c096a7ccf1ebd48e7e7501d0208d8384ff4dea73b + checksum: 10c0/1c6c83b14b8b1b3c25b0727b8ba3e3b647f99e9e6e13eb7322107261de07a4c1be56fc0d45678fc376e09772a3a1642ccdaf8fc69bdf123b6c086598397ce473 languageName: node linkType: hard "has-flag@npm:^4.0.0": version: 4.0.0 resolution: "has-flag@npm:4.0.0" - checksum: 261a1357037ead75e338156b1f9452c016a37dcd3283a972a30d9e4a87441ba372c8b81f818cd0fbcd9c0354b4ae7e18b9e1afa1971164aef6d18c2b6095a8ad + checksum: 10c0/2e789c61b7888d66993e14e8331449e525ef42aac53c627cc53d1c3334e768bcb6abdc4f5f0de1478a25beec6f0bd62c7549058b7ac53e924040d4f301f02fd1 languageName: node linkType: hard @@ -8378,21 +8486,21 @@ __metadata: resolution: "has-property-descriptors@npm:1.0.2" dependencies: es-define-property: "npm:^1.0.0" - checksum: 2d8c9ab8cebb572e3362f7d06139a4592105983d4317e68f7adba320fe6ddfc8874581e0971e899e633fd5f72e262830edce36d5a0bc863dad17ad20572484b2 + checksum: 10c0/253c1f59e80bb476cf0dde8ff5284505d90c3bdb762983c3514d36414290475fe3fd6f574929d84de2a8eec00d35cf07cb6776205ff32efd7c50719125f00236 languageName: node linkType: hard "has-proto@npm:^1.0.1, has-proto@npm:^1.0.3": version: 1.0.3 resolution: "has-proto@npm:1.0.3" - checksum: 0b67c2c94e3bea37db3e412e3c41f79d59259875e636ba471e94c009cdfb1fa82bf045deeffafc7dbb9c148e36cae6b467055aaa5d9fad4316e11b41e3ba551a + checksum: 10c0/35a6989f81e9f8022c2f4027f8b48a552de714938765d019dbea6bb547bd49ce5010a3c7c32ec6ddac6e48fc546166a3583b128f5a7add8b058a6d8b4afec205 languageName: node linkType: hard "has-symbols@npm:^1.0.2, has-symbols@npm:^1.0.3": version: 1.0.3 resolution: "has-symbols@npm:1.0.3" - checksum: 464f97a8202a7690dadd026e6d73b1ceeddd60fe6acfd06151106f050303eaa75855aaa94969df8015c11ff7c505f196114d22f7386b4a471038da5874cf5e9b + checksum: 10c0/e6922b4345a3f37069cdfe8600febbca791c94988c01af3394d86ca3360b4b93928bbf395859158f88099cb10b19d98e3bbab7c9ff2c1bd09cf665ee90afa2c3 languageName: node linkType: hard @@ -8401,7 +8509,7 @@ __metadata: resolution: "has-tostringtag@npm:1.0.2" dependencies: has-symbols: "npm:^1.0.3" - checksum: c74c5f5ceee3c8a5b8bc37719840dc3749f5b0306d818974141dda2471a1a2ca6c8e46b9d6ac222c5345df7a901c9b6f350b1e6d62763fec877e26609a401bfe + checksum: 10c0/a8b166462192bafe3d9b6e420a1d581d93dd867adb61be223a17a8d6dad147aa77a8be32c961bb2f27b3ef893cae8d36f564ab651f5e9b7938ae86f74027c48c languageName: node linkType: hard @@ -8410,14 +8518,14 @@ __metadata: resolution: "hasown@npm:2.0.1" dependencies: function-bind: "npm:^1.1.2" - checksum: b7f9107387ee68abed88e965c2b99e868b5e0e9d289db1ddd080706ffafb69533b4f538b0e6362585bae8d6cbd080249f65e79702f74c225990f66d6106be3f6 + checksum: 10c0/9e27e70e8e4204f4124c8f99950d1ba2b1f5174864fd39ff26da190f9ea6488c1b3927dcc64981c26d1f637a971783c9489d62c829d393ea509e6f1ba20370bb languageName: node linkType: hard "hast-util-parse-selector@npm:^2.0.0": version: 2.2.5 resolution: "hast-util-parse-selector@npm:2.2.5" - checksum: 22ee4afbd11754562144cb3c4f3ec52524dafba4d90ee52512902d17cf11066d83b38f7bdf6ca571bbc2541f07ba30db0d234657b6ecb8ca4631587466459605 + checksum: 10c0/29b7ee77960ded6a99d30c287d922243071cc07b39f2006f203bd08ee54eb8f66bdaa86ef6527477c766e2382d520b60ee4e4087f189888c35d8bcc020173648 languageName: node linkType: hard @@ -8426,7 +8534,7 @@ __metadata: resolution: "hast-util-parse-selector@npm:3.1.1" dependencies: "@types/hast": "npm:^2.0.0" - checksum: 511d373465f60dd65e924f88bf0954085f4fb6e3a2b062a4b5ac43b93cbfd36a8dce6234b5d1e3e63499d936375687e83fc5da55628b22bd6b581b5ee167d1c4 + checksum: 10c0/34ac1707a477fd9764e328087163f1f21857bdb0f8d425bf41f6def7baf840e50e4bca2eb03072e3da4e39856de28893c4b688dcba0cc305160d53afcece4df4 languageName: node linkType: hard @@ -8439,7 +8547,7 @@ __metadata: hast-util-parse-selector: "npm:^2.0.0" property-information: "npm:^5.0.0" space-separated-tokens: "npm:^1.0.0" - checksum: 78f91b71e50506f7499c8275d67645f9f4f130e6f12b038853261d1fa7393432da4113baf3508c41b79d933f255089d6d593beea9d4cda89dfd34d0a498cf378 + checksum: 10c0/f76d9cf373cb075c8523c8ad52709f09f7e02b7c9d3152b8d35c65c265b9f1878bed6023f215a7d16523921036d40a7da292cb6f4399af9b5eccac2a5a5eb330 languageName: node linkType: hard @@ -8452,14 +8560,14 @@ __metadata: hast-util-parse-selector: "npm:^3.0.0" property-information: "npm:^6.0.0" space-separated-tokens: "npm:^2.0.0" - checksum: 98740e0b69b4765a23d0174fb93eb1c1bdcae6a9f1c9e1b07de6aca2d578427a42e1d45ee98eda26463ac58ff73a8ce45af19c4eb8b5f6f768a9c8543964d28f + checksum: 10c0/579912b03ff4a5b19eb609df7403c6dba2505ef1a1e2bc47cbf467cbd7cffcd51df40e74d882de1ccdda40aaf18487f82619eb9cb9f2077cba778017e95e868e languageName: node linkType: hard "highlight.js@npm:^10.4.1, highlight.js@npm:~10.7.0": version: 10.7.3 resolution: "highlight.js@npm:10.7.3" - checksum: db8d10a541936b058e221dbde77869664b2b45bca75d660aa98065be2cd29f3924755fbc7348213f17fd931aefb6e6597448ba6fe82afba6d8313747a91983ee + checksum: 10c0/073837eaf816922427a9005c56c42ad8786473dc042332dfe7901aa065e92bc3d94ebf704975257526482066abb2c8677cc0326559bb8621e046c21c5991c434 languageName: node linkType: hard @@ -8468,14 +8576,14 @@ __metadata: resolution: "hoist-non-react-statics@npm:3.3.2" dependencies: react-is: "npm:^16.7.0" - checksum: 1acbe85f33e5a39f90c822ad4d28b24daeb60f71c545279431dc98c312cd28a54f8d64788e477fe21dc502b0e3cf58589ebe5c1ad22af27245370391c2d24ea6 + checksum: 10c0/fe0889169e845d738b59b64badf5e55fa3cf20454f9203d1eb088df322d49d4318df774828e789898dcb280e8a5521bb59b3203385662ca5e9218a6ca5820e74 languageName: node linkType: hard "hosted-git-info@npm:^2.1.4": version: 2.8.9 resolution: "hosted-git-info@npm:2.8.9" - checksum: 96da7d412303704af41c3819207a09ea2cab2de97951db4cf336bb8bce8d8e36b9a6821036ad2e55e67d3be0af8f967a7b57981203fbfb88bc05cd803407b8c3 + checksum: 10c0/317cbc6b1bbbe23c2a40ae23f3dafe9fa349ce42a89a36f930e3f9c0530c179a3882d2ef1e4141a4c3674d6faaea862138ec55b43ad6f75e387fda2483a13c70 languageName: node linkType: hard @@ -8485,7 +8593,7 @@ __metadata: dependencies: domhandler: "npm:5.0.3" htmlparser2: "npm:9.1.0" - checksum: fb1f67e151008abc00404e80d69d499ee365b7e8a1a618ad0ccaedfd28889c99fb8fcd0cf57464c7b0898784c28b9c0ef9704110d51bca6ff2e5367e5a0d13a3 + checksum: 10c0/a0fcd84e0729c7b18c5df03ac0d2de6feae61846e746a7aab11315d6eb3e2352d12897cb8af7667b493a55f3fed066550037d8c37efaa1fe05ebedb28921b7d1 languageName: node linkType: hard @@ -8494,14 +8602,14 @@ __metadata: resolution: "html-encoding-sniffer@npm:3.0.0" dependencies: whatwg-encoding: "npm:^2.0.0" - checksum: 707a812ec2acaf8bb5614c8618dc81e2fb6b4399d03e95ff18b65679989a072f4e919b9bef472039301a1bbfba64063ba4c79ea6e851c653ac9db80dbefe8fe5 + checksum: 10c0/b17b3b0fb5d061d8eb15121c3b0b536376c3e295ecaf09ba48dd69c6b6c957839db124fe1e2b3f11329753a4ee01aa7dedf63b7677999e86da17fbbdd82c5386 languageName: node linkType: hard "html-escaper@npm:^2.0.0": version: 2.0.2 resolution: "html-escaper@npm:2.0.2" - checksum: 034d74029dcca544a34fb6135e98d427acd73019796ffc17383eaa3ec2fe1c0471dcbbc8f8ed39e46e86d43ccd753a160631615e4048285e313569609b66d5b7 + checksum: 10c0/208e8a12de1a6569edbb14544f4567e6ce8ecc30b9394fcaa4e7bb1e60c12a7c9a1ed27e31290817157e8626f3a4f29e76c8747030822eb84a6abb15c255f0a0 languageName: node linkType: hard @@ -8519,14 +8627,14 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: d52c121d043d20402c0c768d38f85f89dfa280191acad7367c5c86f7cbd9de67899856b6ff9fbc113143ce22756393dc0452851b9784cc8cd0473f2a813c47a6 + checksum: 10c0/e761dc1db61dcc384a5d6fffedb3d83b4a6793eda36551394d0907cb8afe75ac1b5ca36b26bdd054da7940c920d278837bd9c34ccd39c29da4ebc4b016c0a96d languageName: node linkType: hard "html-tags@npm:^3.1.0": version: 3.3.1 resolution: "html-tags@npm:3.3.1" - checksum: d0e808544b92d8b999cbcc86d539577255a2f0f2f4f73110d10749d1d36e6fe6ad706a0355a8477afb6e000ecdc93d8455b3602951f9a2b694ac9e28f1b52878 + checksum: 10c0/680165e12baa51bad7397452d247dbcc5a5c29dac0e6754b1187eee3bf26f514bc1907a431dd2f7eb56207611ae595ee76a0acc8eaa0d931e72c791dd6463d79 languageName: node linkType: hard @@ -8538,14 +8646,14 @@ __metadata: domhandler: "npm:^5.0.3" domutils: "npm:^3.1.0" entities: "npm:^4.5.0" - checksum: 6352fa2a5495781fa9a02c9049908334cd068ff36d753870d30cd13b841e99c19646717567a2f9e9c44075bbe43d364e102f9d013a731ce962226d63746b794f + checksum: 10c0/394f6323efc265bbc791d8c0d96bfe95984e0407565248521ab92e2dc7668e5ceeca7bc6ed18d408b9ee3b25032c5743368a4280d280332d782821d5d467ad8f languageName: node linkType: hard "http-cache-semantics@npm:^4.1.1": version: 4.1.1 resolution: "http-cache-semantics@npm:4.1.1" - checksum: 362d5ed66b12ceb9c0a328fb31200b590ab1b02f4a254a697dc796850cc4385603e75f53ec59f768b2dad3bfa1464bd229f7de278d2899a0e3beffc634b6683f + checksum: 10c0/ce1319b8a382eb3cbb4a37c19f6bfe14e5bb5be3d09079e885e8c513ab2d3cd9214902f8a31c9dc4e37022633ceabfc2d697405deeaf1b8f3552bb4ed996fdfc languageName: node linkType: hard @@ -8558,7 +8666,7 @@ __metadata: setprototypeof: "npm:1.2.0" statuses: "npm:2.0.1" toidentifier: "npm:1.0.1" - checksum: 0e7f76ee8ff8a33e58a3281a469815b893c41357378f408be8f6d4aa7d1efafb0da064625518e7078381b6a92325949b119dc38fcb30bdbc4e3a35f78c44c439 + checksum: 10c0/fc6f2715fe188d091274b5ffc8b3657bd85c63e969daa68ccb77afb05b071a4b62841acb7a21e417b5539014dff2ebf9550f0b14a9ff126f2734a7c1387f8e19 languageName: node linkType: hard @@ -8569,7 +8677,7 @@ __metadata: "@tootallnate/once": "npm:2" agent-base: "npm:6" debug: "npm:4" - checksum: 5ee19423bc3e0fd5f23ce991b0755699ad2a46a440ce9cec99e8126bb98448ad3479d2c0ea54be5519db5b19a4ffaa69616bac01540db18506dd4dac3dc418f0 + checksum: 10c0/32a05e413430b2c1e542e5c74b38a9f14865301dd69dff2e53ddb684989440e3d2ce0c4b64d25eb63cf6283e6265ff979a61cf93e3ca3d23047ddfdc8df34a32 languageName: node linkType: hard @@ -8579,7 +8687,7 @@ __metadata: dependencies: agent-base: "npm:^7.1.0" debug: "npm:^4.3.4" - checksum: d062acfa0cb82beeb558f1043c6ba770ea892b5fb7b28654dbc70ea2aeea55226dd34c02a294f6c1ca179a5aa483c4ea641846821b182edbd9cc5d89b54c6848 + checksum: 10c0/4207b06a4580fb85dd6dff521f0abf6db517489e70863dca1a0291daa7f2d3d2d6015a57bd702af068ea5cf9f1f6ff72314f5f5b4228d299c0904135d2aef921 languageName: node linkType: hard @@ -8589,7 +8697,7 @@ __metadata: dependencies: agent-base: "npm:5" debug: "npm:4" - checksum: e90ca77ec10ef9987ad464853dfee744fb13fb02ad72f31c770ba09fb55675206a1de3c8b7e74d809fc00ed3baa7e01a48c569a419a675bfa3ef1ee975822b70 + checksum: 10c0/fbba3e037ec04e1850e867064a763b86dd884baae9c5f4ad380504e321068c9e9b5de79cf2f3a28ede7c36036dce905b58d9f51703c5b3884d887114f4887f77 languageName: node linkType: hard @@ -8599,7 +8707,7 @@ __metadata: dependencies: agent-base: "npm:6" debug: "npm:4" - checksum: f0dce7bdcac5e8eaa0be3c7368bb8836ed010fb5b6349ffb412b172a203efe8f807d9a6681319105ea1b6901e1972c7b5ea899672a7b9aad58309f766dcbe0df + checksum: 10c0/6dd639f03434003577c62b27cafdb864784ef19b2de430d8ae2a1d45e31c4fd60719e5637b44db1a88a046934307da7089e03d6089ec3ddacc1189d8de8897d1 languageName: node linkType: hard @@ -8609,21 +8717,21 @@ __metadata: dependencies: agent-base: "npm:^7.0.2" debug: "npm:4" - checksum: 405fe582bba461bfe5c7e2f8d752b384036854488b828ae6df6a587c654299cbb2c50df38c4b6ab303502c3c5e029a793fbaac965d1e86ee0be03faceb554d63 + checksum: 10c0/bc4f7c38da32a5fc622450b6cb49a24ff596f9bd48dcedb52d2da3fa1c1a80e100fb506bd59b326c012f21c863c69b275c23de1a01d0b84db396822fdf25e52b languageName: node linkType: hard "human-signals@npm:^2.1.0": version: 2.1.0 resolution: "human-signals@npm:2.1.0" - checksum: df59be9e0af479036798a881d1f136c4a29e0b518d4abb863afbd11bf30efa3eeb1d0425fc65942dcc05ab3bf40205ea436b0ff389f2cd20b75b8643d539bf86 + checksum: 10c0/695edb3edfcfe9c8b52a76926cd31b36978782062c0ed9b1192b36bebc75c4c87c82e178dfcb0ed0fc27ca59d434198aac0bd0be18f5781ded775604db22304a languageName: node linkType: hard "human-signals@npm:^5.0.0": version: 5.0.0 resolution: "human-signals@npm:5.0.0" - checksum: 30f8870d831cdcd2d6ec0486a7d35d49384996742052cee792854273fa9dd9e7d5db06bb7985d4953e337e10714e994e0302e90dc6848069171b05ec836d65b0 + checksum: 10c0/5a9359073fe17a8b58e5a085e9a39a950366d9f00217c4ff5878bd312e09d80f460536ea6a3f260b5943a01fe55c158d1cea3fc7bee3d0520aeef04f6d915c82 languageName: node linkType: hard @@ -8632,16 +8740,16 @@ __metadata: resolution: "hunspell-spellchecker@npm:1.0.2" bin: hunspell-tojson: ./bin/hunspell-tojson.js - checksum: 84f58980d354f63e81a630370d19cd36790770fb545d4e9025858ed40c0a641b90fae4925cd0b83f766eefafcedc239803c52c617dd8c63b428266bf2ff63a7d + checksum: 10c0/61f6ec11ce65470ae4bba9406fedf2b36dfd0d6a39d56817de9dd5e48847517af9e7719cdf23368af1f33254e60f15be6323707addb66a2b7cb7e2c5c5e60b14 languageName: node linkType: hard -"husky@npm:9.0.11": - version: 9.0.11 - resolution: "husky@npm:9.0.11" +"husky@npm:9.1.6": + version: 9.1.6 + resolution: "husky@npm:9.1.6" bin: - husky: bin.mjs - checksum: 8a9b7cb9dc8494b470b3b47b386e65d579608c6206da80d3cc8b71d10e37947264af3dfe00092368dad9673b51d2a5ee87afb4b2291e77ba9e7ec1ac36e56cd1 + husky: bin.js + checksum: 10c0/705673db4a247c1febd9c5df5f6a3519106cf0335845027bb50a15fba9b1f542cb2610932ede96fd08008f6d9f49db0f15560509861808b0031cdc0e7c798bac languageName: node linkType: hard @@ -8650,7 +8758,7 @@ __metadata: resolution: "iconv-lite@npm:0.4.24" dependencies: safer-buffer: "npm:>= 2.1.2 < 3" - checksum: 6d3a2dac6e5d1fb126d25645c25c3a1209f70cceecc68b8ef51ae0da3cdc078c151fade7524a30b12a3094926336831fca09c666ef55b37e2c69638b5d6bd2e3 + checksum: 10c0/c6886a24cc00f2a059767440ec1bc00d334a89f250db8e0f7feb4961c8727118457e27c495ba94d082e51d3baca378726cd110aaf7ded8b9bbfd6a44760cf1d4 languageName: node linkType: hard @@ -8659,7 +8767,7 @@ __metadata: resolution: "iconv-lite@npm:0.6.3" dependencies: safer-buffer: "npm:>= 2.1.2 < 3.0.0" - checksum: 24e3292dd3dadaa81d065c6f8c41b274a47098150d444b96e5f53b4638a9a71482921ea6a91a1f59bb71d9796de25e04afd05919fa64c360347ba65d3766f10f + checksum: 10c0/98102bc66b33fcf5ac044099d1257ba0b7ad5e3ccd3221f34dd508ab4070edff183276221684e1e0555b145fce0850c9f7d2b60a9fcac50fbb4ea0d6e845a3b1 languageName: node linkType: hard @@ -8668,35 +8776,35 @@ __metadata: resolution: "identity-obj-proxy@npm:3.0.0" dependencies: harmony-reflect: "npm:^1.4.6" - checksum: 66fe4d2ffc67655174f6abe100ab3b36d2f5e4de5b28a7c3121e5f51bd4e7c8c1bee4f9a41ce0586ace57fb63bfedbfc39508b7cb43b9e3ed6dc42f762158b4e + checksum: 10c0/a3fc4de0042d7b45bf8652d5596c80b42139d8625c9cd6a8834e29e1b6dce8fccabd1228e08744b78677a19ceed7201a32fed8ca3dc3e4852e8fee24360a6cfc languageName: node linkType: hard "ieee754@npm:^1.1.13": version: 1.2.1 resolution: "ieee754@npm:1.2.1" - checksum: d9f2557a59036f16c282aaeb107832dc957a93d73397d89bbad4eb1130560560eb695060145e8e6b3b498b15ab95510226649a0b8f52ae06583575419fe10fc4 + checksum: 10c0/b0782ef5e0935b9f12883a2e2aa37baa75da6e66ce6515c168697b42160807d9330de9a32ec1ed73149aea02e0d822e572bca6f1e22bdcbd2149e13b050b17bb languageName: node linkType: hard "ignore@npm:^5.2.0, ignore@npm:^5.2.4": version: 5.3.1 resolution: "ignore@npm:5.3.1" - checksum: 0a884c2fbc8c316f0b9f92beaf84464253b73230a4d4d286697be45fca081199191ca33e1c2e82d9e5f851f5e9a48a78e25a35c951e7eb41e59f150db3530065 + checksum: 10c0/703f7f45ffb2a27fb2c5a8db0c32e7dee66b33a225d28e8db4e1be6474795f606686a6e3bcc50e1aa12f2042db4c9d4a7d60af3250511de74620fbed052ea4cd languageName: node linkType: hard "immer@npm:^9.0.21": version: 9.0.21 resolution: "immer@npm:9.0.21" - checksum: 8455d6b4dc8abfe40f06eeec9bcc944d147c81279424c0f927a4d4905ae34e5af19ab6da60bcc700c14f51c452867d7089b3b9236f5a9a2248e39b4a09ee89de + checksum: 10c0/03ea3ed5d4d72e8bd428df4a38ad7e483ea8308e9a113d3b42e0ea2cc0cc38340eb0a6aca69592abbbf047c685dbda04e3d34bf2ff438ab57339ed0a34cc0a05 languageName: node linkType: hard "immutable@npm:^4.0.0": version: 4.3.5 resolution: "immutable@npm:4.3.5" - checksum: dbc1b8c808b9aa18bfce2e0c7bc23714a47267bc311f082145cc9220b2005e9b9cd2ae78330f164a19266a2b0f78846c60f4f74893853ac16fd68b5ae57092d2 + checksum: 10c0/63d2d7908241a955d18c7822fd2215b6e89ff5a1a33cc72cd475b013cbbdef7a705aa5170a51ce9f84a57f62fdddfaa34e7b5a14b33d8a43c65cc6a881d6e894 languageName: node linkType: hard @@ -8706,7 +8814,7 @@ __metadata: dependencies: parent-module: "npm:^1.0.0" resolve-from: "npm:^4.0.0" - checksum: 2cacfad06e652b1edc50be650f7ec3be08c5e5a6f6d12d035c440a42a8cc028e60a5b99ca08a77ab4d6b1346da7d971915828f33cdab730d3d42f08242d09baa + checksum: 10c0/7f882953aa6b740d1f0e384d0547158bc86efbf2eea0f1483b8900a6f65c5a5123c2cf09b0d542cc419d0b98a759ecaeb394237e97ea427f2da221dc3cd80cc3 languageName: node linkType: hard @@ -8718,21 +8826,21 @@ __metadata: resolve-cwd: "npm:^3.0.0" bin: import-local-fixture: fixtures/cli.js - checksum: bfcdb63b5e3c0e245e347f3107564035b128a414c4da1172a20dc67db2504e05ede4ac2eee1252359f78b0bfd7b19ef180aec427c2fce6493ae782d73a04cddd + checksum: 10c0/c67ecea72f775fe8684ca3d057e54bdb2ae28c14bf261d2607c269c18ea0da7b730924c06262eca9aed4b8ab31e31d65bc60b50e7296c85908a56e2f7d41ecd2 languageName: node linkType: hard "imurmurhash@npm:^0.1.4": version: 0.1.4 resolution: "imurmurhash@npm:0.1.4" - checksum: 2d30b157a91fe1c1d7c6f653cbf263f039be6c5bfa959245a16d4ee191fc0f2af86c08545b6e6beeb041c56b574d2d5b9f95343d378ab49c0f37394d541e7fc8 + checksum: 10c0/8b51313850dd33605c6c9d3fd9638b714f4c4c40250cff658209f30d40da60f78992fb2df5dabee4acf589a6a82bbc79ad5486550754bd9ec4e3fc0d4a57d6a6 languageName: node linkType: hard "indent-string@npm:^4.0.0": version: 4.0.0 resolution: "indent-string@npm:4.0.0" - checksum: cd3f5cbc9ca2d624c6a1f53f12e6b341659aba0e2d3254ae2b4464aaea8b4294cdb09616abbc59458f980531f2429784ed6a420d48d245bcad0811980c9efae9 + checksum: 10c0/1e1904ddb0cb3d6cce7cd09e27a90184908b7a5d5c21b92e232c93579d314f0b83c246ffb035493d0504b1e9147ba2c9b21df0030f48673fba0496ecd698161f languageName: node linkType: hard @@ -8742,21 +8850,21 @@ __metadata: dependencies: once: "npm:^1.3.0" wrappy: "npm:1" - checksum: d2ebd65441a38c8336c223d1b80b921b9fa737e37ea466fd7e253cb000c64ae1f17fa59e68130ef5bda92cfd8d36b83d37dab0eb0a4558bcfec8e8cdfd2dcb67 + checksum: 10c0/7faca22584600a9dc5b9fca2cd5feb7135ac8c935449837b315676b4c90aa4f391ec4f42240178244b5a34e8bede1948627fda392ca3191522fc46b34e985ab2 languageName: node linkType: hard "inherits@npm:2, inherits@npm:2.0.4, inherits@npm:^2.0.1, inherits@npm:^2.0.3, inherits@npm:^2.0.4, inherits@npm:~2.0.3": version: 2.0.4 resolution: "inherits@npm:2.0.4" - checksum: cd45e923bee15186c07fa4c89db0aace24824c482fb887b528304694b2aa6ff8a898da8657046a5dcf3e46cd6db6c61629551f9215f208d7c3f157cf9b290521 + checksum: 10c0/4e531f648b29039fb7426fb94075e6545faa1eb9fe83c29f0b6d9e7263aceb4289d2d4557db0d428188eeb449cc7c5e77b0a0b2c4e248ff2a65933a0dee49ef2 languageName: node linkType: hard "inline-style-parser@npm:0.2.2": version: 0.2.2 resolution: "inline-style-parser@npm:0.2.2" - checksum: 352b1b9a691113033fc72e67b906244713551dc497d7e12791034668fe7d9e4c9e74eb8c251183d6225d3a263d0bcea911b9ca6281dec0413f6e2465ee8fbc2e + checksum: 10c0/82099645fd99451301ff243706f70917c066e3033d32bdb1074a54eb1909e08d1cafb48c426a643facbe8248cff362082e90ca14760b3d44e09a858fe668b3fe languageName: node linkType: hard @@ -8767,7 +8875,7 @@ __metadata: es-errors: "npm:^1.3.0" hasown: "npm:^2.0.0" side-channel: "npm:^1.0.4" - checksum: 3e66720508831153ecf37d13def9f6856f9f2960989ec8a0a0476c98f887fca9eff0163127466485cb825c900c2d6fc601aa9117b7783b90ffce23a71ea5d053 + checksum: 10c0/f8b294a4e6ea3855fc59551bbf35f2b832cf01fd5e6e2a97f5c201a071cc09b49048f856e484b67a6c721da5e55736c5b6ddafaf19e2dbeb4a3ff1821680de6c languageName: node linkType: hard @@ -8776,7 +8884,7 @@ __metadata: resolution: "invariant@npm:2.2.4" dependencies: loose-envify: "npm:^1.0.0" - checksum: cc3182d793aad82a8d1f0af697b462939cb46066ec48bbf1707c150ad5fad6406137e91a262022c269702e01621f35ef60269f6c0d7fd178487959809acdfb14 + checksum: 10c0/5af133a917c0bcf65e84e7f23e779e7abc1cd49cb7fdc62d00d1de74b0d8c1b5ee74ac7766099fb3be1b05b26dfc67bab76a17030d2fe7ea2eef867434362dfc languageName: node linkType: hard @@ -8786,42 +8894,42 @@ __metadata: dependencies: jsbn: "npm:1.1.0" sprintf-js: "npm:^1.1.3" - checksum: 1ed81e06721af012306329b31f532b5e24e00cb537be18ddc905a84f19fe8f83a09a1699862bf3a1ec4b9dea93c55a3fa5faf8b5ea380431469df540f38b092c + checksum: 10c0/331cd07fafcb3b24100613e4b53e1a2b4feab11e671e655d46dc09ee233da5011284d09ca40c4ecbdfe1d0004f462958675c224a804259f2f78d2465a87824bc languageName: node linkType: hard "ip@npm:^2.0.1": version: 2.0.1 resolution: "ip@npm:2.0.1" - checksum: d6dd154e1bc5e8725adfdd6fb92218635b9cbe6d873d051bd63b178f009777f751a5eea4c67021723a7056325fc3052f8b6599af0a2d56f042c93e684b4a0349 + checksum: 10c0/cab8eb3e88d0abe23e4724829621ec4c4c5cb41a7f936a2e626c947128c1be16ed543448d42af7cca95379f9892bfcacc1ccd8d09bc7e8bea0e86d492ce33616 languageName: node linkType: hard "ipaddr.js@npm:1.9.1": version: 1.9.1 resolution: "ipaddr.js@npm:1.9.1" - checksum: 864d0cced0c0832700e9621913a6429ccdc67f37c1bd78fb8c6789fff35c9d167cb329134acad2290497a53336813ab4798d2794fd675d5eb33b5fdf0982b9ca + checksum: 10c0/0486e775047971d3fdb5fb4f063829bac45af299ae0b82dcf3afa2145338e08290563a2a70f34b732d795ecc8311902e541a8530eeb30d75860a78ff4e94ce2a languageName: node linkType: hard "is-absolute-url@npm:^3.0.0": version: 3.0.3 resolution: "is-absolute-url@npm:3.0.3" - checksum: 5159b51d065d9ad29e16a2f78d6c0e41c43227caf90a45e659c54ea6fd50ef0595b1871ce392e84b1df7cfdcad9a8e66eec0813a029112188435abf115accb16 + checksum: 10c0/04c415974c32e73a83d3a21a9bea18fc4e2c14fbe6bbd64832cf1e67a75ade2af0e900f552f0b8a447f1305f5ffc9d143ccd8d005dbe715d198c359d342b86f0 languageName: node linkType: hard "is-alphabetical@npm:^1.0.0": version: 1.0.4 resolution: "is-alphabetical@npm:1.0.4" - checksum: 6508cce44fd348f06705d377b260974f4ce68c74000e7da4045f0d919e568226dc3ce9685c5a2af272195384df6930f748ce9213fc9f399b5d31b362c66312cb + checksum: 10c0/1505b1de5a1fd74022c05fb21b0e683a8f5229366bac8dc4d34cf6935bcfd104d1125a5e6b083fb778847629f76e5bdac538de5367bdf2b927a1356164e23985 languageName: node linkType: hard "is-alphabetical@npm:^2.0.0": version: 2.0.1 resolution: "is-alphabetical@npm:2.0.1" - checksum: 56207db8d9de0850f0cd30f4966bf731eb82cedfe496cbc2e97e7c3bacaf66fc54a972d2d08c0d93bb679cb84976a05d24c5ad63de56fabbfc60aadae312edaa + checksum: 10c0/932367456f17237533fd1fc9fe179df77957271020b83ea31da50e5cc472d35ef6b5fb8147453274ffd251134472ce24eb6f8d8398d96dee98237cdb81a6c9a7 languageName: node linkType: hard @@ -8831,7 +8939,7 @@ __metadata: dependencies: is-alphabetical: "npm:^1.0.0" is-decimal: "npm:^1.0.0" - checksum: e2e491acc16fcf5b363f7c726f666a9538dba0a043665740feb45bba1652457a73441e7c5179c6768a638ed396db3437e9905f403644ec7c468fb41f4813d03f + checksum: 10c0/d623abae7130a7015c6bf33d99151d4e7005572fd170b86568ff4de5ae86ac7096608b87dd4a1d4dbbd497e392b6396930ba76c9297a69455909cebb68005905 languageName: node linkType: hard @@ -8841,7 +8949,7 @@ __metadata: dependencies: is-alphabetical: "npm:^2.0.0" is-decimal: "npm:^2.0.0" - checksum: 87acc068008d4c9c4e9f5bd5e251041d42e7a50995c77b1499cf6ed248f971aadeddb11f239cabf09f7975ee58cac7a48ffc170b7890076d8d227b24a68663c9 + checksum: 10c0/4b35c42b18e40d41378293f82a3ecd9de77049b476f748db5697c297f686e1e05b072a6aaae2d16f54d2a57f85b00cbbe755c75f6d583d1c77d6657bd0feb5a2 languageName: node linkType: hard @@ -8851,7 +8959,7 @@ __metadata: dependencies: call-bind: "npm:^1.0.2" has-tostringtag: "npm:^1.0.0" - checksum: a170c7e26082e10de9be6e96d32ae3db4d5906194051b792e85fae3393b53cf2cb5b3557863e5c8ccbab55e2fd8f2f75aa643d437613f72052cf0356615c34be + checksum: 10c0/5ff1f341ee4475350adfc14b2328b38962564b7c2076be2f5bac7bd9b61779efba99b9f844a7b82ba7654adccf8e8eb19d1bb0cc6d1c1a085e498f6793d4328f languageName: node linkType: hard @@ -8861,21 +8969,21 @@ __metadata: dependencies: call-bind: "npm:^1.0.2" get-intrinsic: "npm:^1.2.1" - checksum: 34a26213d981d58b30724ef37a1e0682f4040d580fa9ff58fdfdd3cefcb2287921718c63971c1c404951e7b747c50fdc7caf6e867e951353fa71b369c04c969b + checksum: 10c0/42a49d006cc6130bc5424eae113e948c146f31f9d24460fc0958f855d9d810e6fd2e4519bf19aab75179af9c298ea6092459d8cafdec523cd19e529b26eab860 languageName: node linkType: hard "is-arrayish@npm:^0.2.1": version: 0.2.1 resolution: "is-arrayish@npm:0.2.1" - checksum: 73ced84fa35e59e2c57da2d01e12cd01479f381d7f122ce41dcbb713f09dbfc651315832cd2bf8accba7681a69e4d6f1e03941d94dd10040d415086360e7005e + checksum: 10c0/e7fb686a739068bb70f860b39b67afc62acc62e36bb61c5f965768abce1873b379c563e61dd2adad96ebb7edf6651111b385e490cf508378959b0ed4cac4e729 languageName: node linkType: hard "is-arrayish@npm:^0.3.1": version: 0.3.2 resolution: "is-arrayish@npm:0.3.2" - checksum: 81a78d518ebd8b834523e25d102684ee0f7e98637136d3bdc93fd09636350fa06f1d8ca997ea28143d4d13cb1b69c0824f082db0ac13e1ab3311c10ffea60ade + checksum: 10c0/f59b43dc1d129edb6f0e282595e56477f98c40278a2acdc8b0a5c57097c9eff8fe55470493df5775478cf32a4dc8eaf6d3a749f07ceee5bc263a78b2434f6a54 languageName: node linkType: hard @@ -8884,7 +8992,7 @@ __metadata: resolution: "is-async-function@npm:2.0.0" dependencies: has-tostringtag: "npm:^1.0.0" - checksum: 2cf336fbf8cba3badcf526aa3d10384c30bab32615ac4831b74492eb4e843ccb7d8439a119c27f84bcf217d72024e611b1373f870f433b48f3fa57d3d1b863f1 + checksum: 10c0/787bc931576aad525d751fc5ce211960fe91e49ac84a5c22d6ae0bc9541945fbc3f686dc590c3175722ce4f6d7b798a93f6f8ff4847fdb2199aea6f4baf5d668 languageName: node linkType: hard @@ -8893,7 +9001,7 @@ __metadata: resolution: "is-bigint@npm:1.0.4" dependencies: has-bigints: "npm:^1.0.1" - checksum: cc981cf0564c503aaccc1e5f39e994ae16ae2d1a8fcd14721f14ad431809071f39ec568cfceef901cff408045f1a6d6bac90d1b43eeb0b8e3bc34c8eb1bdb4c4 + checksum: 10c0/eb9c88e418a0d195ca545aff2b715c9903d9b0a5033bc5922fec600eb0c3d7b1ee7f882dbf2e0d5a6e694e42391be3683e4368737bd3c4a77f8ac293e7773696 languageName: node linkType: hard @@ -8902,7 +9010,7 @@ __metadata: resolution: "is-binary-path@npm:2.1.0" dependencies: binary-extensions: "npm:^2.0.0" - checksum: 078e51b4f956c2c5fd2b26bb2672c3ccf7e1faff38e0ebdba45612265f4e3d9fc3127a1fa8370bbf09eab61339203c3d3b7af5662cbf8be4030f8fac37745b0e + checksum: 10c0/a16eaee59ae2b315ba36fad5c5dcaf8e49c3e27318f8ab8fa3cdb8772bf559c8d1ba750a589c2ccb096113bb64497084361a25960899cb6172a6925ab6123d38 languageName: node linkType: hard @@ -8912,14 +9020,14 @@ __metadata: dependencies: call-bind: "npm:^1.0.2" has-tostringtag: "npm:^1.0.0" - checksum: ba794223b56a49a9f185e945eeeb6b7833b8ea52a335cec087d08196cf27b538940001615d3bb976511287cefe94e5907d55f00bb49580533f9ca9b4515fcc2e + checksum: 10c0/6090587f8a8a8534c0f816da868bc94f32810f08807aa72fa7e79f7e11c466d281486ffe7a788178809c2aa71fe3e700b167fe80dd96dad68026bfff8ebf39f7 languageName: node linkType: hard "is-callable@npm:^1.1.3, is-callable@npm:^1.1.4, is-callable@npm:^1.2.7": version: 1.2.7 resolution: "is-callable@npm:1.2.7" - checksum: 48a9297fb92c99e9df48706241a189da362bff3003354aea4048bd5f7b2eb0d823cd16d0a383cece3d76166ba16d85d9659165ac6fcce1ac12e6c649d66dbdb9 + checksum: 10c0/ceebaeb9d92e8adee604076971dd6000d38d6afc40bb843ea8e45c5579b57671c3f3b50d7f04869618242c6cee08d1b67806a8cb8edaaaf7c0748b3720d6066f languageName: node linkType: hard @@ -8928,7 +9036,7 @@ __metadata: resolution: "is-core-module@npm:2.13.1" dependencies: hasown: "npm:^2.0.0" - checksum: d53bd0cc24b0a0351fb4b206ee3908f71b9bbf1c47e9c9e14e5f06d292af1663704d2abd7e67700d6487b2b7864e0d0f6f10a1edf1892864bdffcb197d1845a2 + checksum: 10c0/2cba9903aaa52718f11c4896dabc189bab980870aae86a62dc0d5cedb546896770ee946fb14c84b7adf0735f5eaea4277243f1b95f5cefa90054f92fbcac2518 languageName: node linkType: hard @@ -8937,28 +9045,28 @@ __metadata: resolution: "is-date-object@npm:1.0.5" dependencies: has-tostringtag: "npm:^1.0.0" - checksum: cc80b3a4b42238fa0d358b9a6230dae40548b349e64a477cb7c5eff9b176ba194c11f8321daaf6dd157e44073e9b7fd01f87db1f14952a88d5657acdcd3a56e2 + checksum: 10c0/eed21e5dcc619c48ccef804dfc83a739dbb2abee6ca202838ee1bd5f760fe8d8a93444f0d49012ad19bb7c006186e2884a1b92f6e1c056da7fd23d0a9ad5992e languageName: node linkType: hard "is-decimal@npm:^1.0.0": version: 1.0.4 resolution: "is-decimal@npm:1.0.4" - checksum: ed483a387517856dc395c68403a10201fddcc1b63dc56513fbe2fe86ab38766120090ecdbfed89223d84ca8b1cd28b0641b93cb6597b6e8f4c097a7c24e3fb96 + checksum: 10c0/a4ad53c4c5c4f5a12214e7053b10326711f6a71f0c63ba1314a77bd71df566b778e4ebd29f9fb6815f07a4dc50c3767fb19bd6fc9fa05e601410f1d64ffeac48 languageName: node linkType: hard "is-decimal@npm:^2.0.0": version: 2.0.1 resolution: "is-decimal@npm:2.0.1" - checksum: 97132de7acdce77caa7b797632970a2ecd649a88e715db0e4dbc00ab0708b5e7574ba5903962c860cd4894a14fd12b100c0c4ac8aed445cf6f55c6cf747a4158 + checksum: 10c0/8085dd66f7d82f9de818fba48b9e9c0429cb4291824e6c5f2622e96b9680b54a07a624cfc663b24148b8e853c62a1c987cfe8b0b5a13f5156991afaf6736e334 languageName: node linkType: hard "is-deflate@npm:^1.0.0": version: 1.0.0 resolution: "is-deflate@npm:1.0.0" - checksum: c2f9f2d3db79ac50c5586697d1e69a55282a2b0cc5e437b3c470dd47f24e40b6216dcd7e024511e21381607bf57afa019343e3bd0e08a119032818b596004262 + checksum: 10c0/35f7ffcbef3549dd8a4d8df5dc09b4f4656a0fc88326e8b5201cda54114a9c2d8efb689d87c16f3f35c95bd71dcf13dc790d62b7504745b42c53ab4b40238f5a languageName: node linkType: hard @@ -8967,14 +9075,14 @@ __metadata: resolution: "is-docker@npm:2.2.1" bin: is-docker: cli.js - checksum: 3fef7ddbf0be25958e8991ad941901bf5922ab2753c46980b60b05c1bf9c9c2402d35e6dc32e4380b980ef5e1970a5d9d5e5aa2e02d77727c3b6b5e918474c56 + checksum: 10c0/e828365958d155f90c409cdbe958f64051d99e8aedc2c8c4cd7c89dcf35329daed42f7b99346f7828df013e27deb8f721cf9408ba878c76eb9e8290235fbcdcc languageName: node linkType: hard "is-extglob@npm:^2.1.1": version: 2.1.1 resolution: "is-extglob@npm:2.1.1" - checksum: df033653d06d0eb567461e58a7a8c9f940bd8c22274b94bf7671ab36df5719791aae15eef6d83bbb5e23283967f2f984b8914559d4449efda578c775c4be6f85 + checksum: 10c0/5487da35691fbc339700bbb2730430b07777a3c21b9ebaecb3072512dfd7b4ba78ac2381a87e8d78d20ea08affb3f1971b4af629173a6bf435ff8a4c47747912 languageName: node linkType: hard @@ -8983,21 +9091,37 @@ __metadata: resolution: "is-finalizationregistry@npm:1.0.2" dependencies: call-bind: "npm:^1.0.2" - checksum: 1b8e9e1bf2075e862315ef9d38ce6d39c43ca9d81d46f73b34473506992f4b0fbaadb47ec9b420a5e76afe3f564d9f1f0d9b552ef272cc2395e0f21d743c9c29 + checksum: 10c0/81caecc984d27b1a35c68741156fc651fb1fa5e3e6710d21410abc527eb226d400c0943a167922b2e920f6b3e58b0dede9aa795882b038b85f50b3a4b877db86 languageName: node linkType: hard "is-fullwidth-code-point@npm:^3.0.0": version: 3.0.0 resolution: "is-fullwidth-code-point@npm:3.0.0" - checksum: 44a30c29457c7fb8f00297bce733f0a64cd22eca270f83e58c105e0d015e45c019491a4ab2faef91ab51d4738c670daff901c799f6a700e27f7314029e99e348 + checksum: 10c0/bb11d825e049f38e04c06373a8d72782eee0205bda9d908cc550ccb3c59b99d750ff9537982e01733c1c94a58e35400661f57042158ff5e8f3e90cf936daf0fc + languageName: node + linkType: hard + +"is-fullwidth-code-point@npm:^4.0.0": + version: 4.0.0 + resolution: "is-fullwidth-code-point@npm:4.0.0" + checksum: 10c0/df2a717e813567db0f659c306d61f2f804d480752526886954a2a3e2246c7745fd07a52b5fecf2b68caf0a6c79dcdace6166fdf29cc76ed9975cc334f0a018b8 + languageName: node + linkType: hard + +"is-fullwidth-code-point@npm:^5.0.0": + version: 5.0.0 + resolution: "is-fullwidth-code-point@npm:5.0.0" + dependencies: + get-east-asian-width: "npm:^1.0.0" + checksum: 10c0/cd591b27d43d76b05fa65ed03eddce57a16e1eca0b7797ff7255de97019bcaf0219acfc0c4f7af13319e13541f2a53c0ace476f442b13267b9a6a7568f2b65c8 languageName: node linkType: hard "is-generator-fn@npm:^2.0.0": version: 2.1.0 resolution: "is-generator-fn@npm:2.1.0" - checksum: a6ad5492cf9d1746f73b6744e0c43c0020510b59d56ddcb78a91cbc173f09b5e6beff53d75c9c5a29feb618bfef2bf458e025ecf3a57ad2268e2fb2569f56215 + checksum: 10c0/2957cab387997a466cd0bf5c1b6047bd21ecb32bdcfd8996b15747aa01002c1c88731802f1b3d34ac99f4f6874b626418bd118658cf39380fe5fff32a3af9c4d languageName: node linkType: hard @@ -9006,7 +9130,7 @@ __metadata: resolution: "is-generator-function@npm:1.0.10" dependencies: has-tostringtag: "npm:^1.0.0" - checksum: 499a3ce6361064c3bd27fbff5c8000212d48506ebe1977842bbd7b3e708832d0deb1f4cc69186ece3640770e8c4f1287b24d99588a0b8058b2dbdd344bc1f47f + checksum: 10c0/df03514df01a6098945b5a0cfa1abff715807c8e72f57c49a0686ad54b3b74d394e2d8714e6f709a71eb00c9630d48e73ca1796c1ccc84ac95092c1fecc0d98b languageName: node linkType: hard @@ -9015,49 +9139,49 @@ __metadata: resolution: "is-glob@npm:4.0.3" dependencies: is-extglob: "npm:^2.1.1" - checksum: 3ed74f2b0cdf4f401f38edb0442ddfde3092d79d7d35c9919c86641efdbcbb32e45aa3c0f70ce5eecc946896cd5a0f26e4188b9f2b881876f7cb6c505b82da11 + checksum: 10c0/17fb4014e22be3bbecea9b2e3a76e9e34ff645466be702f1693e8f1ee1adac84710d0be0bd9f967d6354036fd51ab7c2741d954d6e91dae6bb69714de92c197a languageName: node linkType: hard "is-gzip@npm:^1.0.0": version: 1.0.0 resolution: "is-gzip@npm:1.0.0" - checksum: 0d28931c1f445fa29c900cf9f48e06e9d1d477a3bf7bd7332e7ce68f1333ccd8cb381de2f0f62a9a262d9c0912608a9a71b4a40e788e201b3dbd67072bb20d86 + checksum: 10c0/cbc1db080c636a6fb0f7346e3076f8276a29a9d8b52ae67c1971a8131c43f308e98ed227d1a6f49970e6c6ebabee0568e60aed7a3579dd4e1817cddf2faaf9b7 languageName: node linkType: hard "is-hexadecimal@npm:^1.0.0": version: 1.0.4 resolution: "is-hexadecimal@npm:1.0.4" - checksum: a452e047587b6069332d83130f54d30da4faf2f2ebaa2ce6d073c27b5703d030d58ed9e0b729c8e4e5b52c6f1dab26781bb77b7bc6c7805f14f320e328ff8cd5 + checksum: 10c0/ec4c64e5624c0f240922324bc697e166554f09d3ddc7633fc526084502626445d0a871fbd8cae52a9844e83bd0bb414193cc5a66806d7b2867907003fc70c5ea languageName: node linkType: hard "is-hexadecimal@npm:^2.0.0": version: 2.0.1 resolution: "is-hexadecimal@npm:2.0.1" - checksum: 66a2ea85994c622858f063f23eda506db29d92b52580709eb6f4c19550552d4dcf3fb81952e52f7cf972097237959e00adc7bb8c9400cd12886e15bf06145321 + checksum: 10c0/3eb60fe2f1e2bbc760b927dcad4d51eaa0c60138cf7fc671803f66353ad90c301605b502c7ea4c6bb0548e1c7e79dfd37b73b632652e3b76030bba603a7e9626 languageName: node linkType: hard "is-interactive@npm:^1.0.0": version: 1.0.0 resolution: "is-interactive@npm:1.0.0" - checksum: 824808776e2d468b2916cdd6c16acacebce060d844c35ca6d82267da692e92c3a16fdba624c50b54a63f38bdc4016055b6f443ce57d7147240de4f8cdabaf6f9 + checksum: 10c0/dd47904dbf286cd20aa58c5192161be1a67138485b9836d5a70433b21a45442e9611b8498b8ab1f839fc962c7620667a50535fdfb4a6bc7989b8858645c06b4d languageName: node linkType: hard "is-lambda@npm:^1.0.1": version: 1.0.1 resolution: "is-lambda@npm:1.0.1" - checksum: 93a32f01940220532e5948538699ad610d5924ac86093fcee83022252b363eb0cc99ba53ab084a04e4fb62bf7b5731f55496257a4c38adf87af9c4d352c71c35 + checksum: 10c0/85fee098ae62ba6f1e24cf22678805473c7afd0fb3978a3aa260e354cb7bcb3a5806cf0a98403188465efedec41ab4348e8e4e79305d409601323855b3839d4d languageName: node linkType: hard "is-map@npm:^2.0.1, is-map@npm:^2.0.2": version: 2.0.2 resolution: "is-map@npm:2.0.2" - checksum: 60ba910f835f2eacb1fdf5b5a6c60fe1c702d012a7673e6546992bcc0c873f62ada6e13d327f9e48f1720d49c152d6cdecae1fa47a261ef3d247c3ce6f0e1d39 + checksum: 10c0/119ff9137a37fd131a72fab3f4ab8c9d6a24b0a1ee26b4eff14dc625900d8675a97785eea5f4174265e2006ed076cc24e89f6e57ebd080a48338d914ec9168a5 languageName: node linkType: hard @@ -9067,14 +9191,14 @@ __metadata: dependencies: call-bind: "npm:^1.0.0" define-properties: "npm:^1.1.3" - checksum: 1f784d3472c09bc2e47acba7ffd4f6c93b0394479aa613311dc1d70f1bfa72eb0846c81350967722c959ba65811bae222204d6c65856fdce68f31986140c7b0e + checksum: 10c0/8bfb286f85763f9c2e28ea32e9127702fe980ffd15fa5d63ade3be7786559e6e21355d3625dd364c769c033c5aedf0a2ed3d4025d336abf1b9241e3d9eddc5b0 languageName: node linkType: hard "is-negative-zero@npm:^2.0.2": version: 2.0.3 resolution: "is-negative-zero@npm:2.0.3" - checksum: 8fe5cffd8d4fb2ec7b49d657e1691889778d037494c6f40f4d1a524cadd658b4b53ad7b6b73a59bcb4b143ae9a3d15829af864b2c0f9d65ac1e678c4c80f17e5 + checksum: 10c0/bcdcf6b8b9714063ffcfa9929c575ac69bfdabb8f4574ff557dfc086df2836cf07e3906f5bbc4f2a5c12f8f3ba56af640c843cdfc74da8caed86c7c7d66fd08e languageName: node linkType: hard @@ -9083,35 +9207,35 @@ __metadata: resolution: "is-number-object@npm:1.0.7" dependencies: has-tostringtag: "npm:^1.0.0" - checksum: 8700dcf7f602e0a9625830541345b8615d04953655acbf5c6d379c58eb1af1465e71227e95d501343346e1d49b6f2d53cbc166b1fc686a7ec19151272df582f9 + checksum: 10c0/aad266da1e530f1804a2b7bd2e874b4869f71c98590b3964f9d06cc9869b18f8d1f4778f838ecd2a11011bce20aeecb53cb269ba916209b79c24580416b74b1b languageName: node linkType: hard "is-number@npm:^7.0.0": version: 7.0.0 resolution: "is-number@npm:7.0.0" - checksum: 6a6c3383f68afa1e05b286af866017c78f1226d43ac8cb064e115ff9ed85eb33f5c4f7216c96a71e4dfea289ef52c5da3aef5bbfade8ffe47a0465d70c0c8e86 + checksum: 10c0/b4686d0d3053146095ccd45346461bc8e53b80aeb7671cc52a4de02dbbf7dc0d1d2a986e2fe4ae206984b4d34ef37e8b795ebc4f4295c978373e6575e295d811 languageName: node linkType: hard "is-path-cwd@npm:^2.2.0": version: 2.2.0 resolution: "is-path-cwd@npm:2.2.0" - checksum: 46a840921bb8cc0dc7b5b423a14220e7db338072a4495743a8230533ce78812dc152548c86f4b828411fe98c5451959f07cf841c6a19f611e46600bd699e8048 + checksum: 10c0/afce71533a427a759cd0329301c18950333d7589533c2c90205bd3fdcf7b91eb92d1940493190567a433134d2128ec9325de2fd281e05be1920fbee9edd22e0a languageName: node linkType: hard "is-path-inside@npm:^3.0.2, is-path-inside@npm:^3.0.3": version: 3.0.3 resolution: "is-path-inside@npm:3.0.3" - checksum: abd50f06186a052b349c15e55b182326f1936c89a78bf6c8f2b707412517c097ce04bc49a0ca221787bc44e1049f51f09a2ffb63d22899051988d3a618ba13e9 + checksum: 10c0/cf7d4ac35fb96bab6a1d2c3598fe5ebb29aafb52c0aaa482b5a3ed9d8ba3edc11631e3ec2637660c44b3ce0e61a08d54946e8af30dec0b60a7c27296c68ffd05 languageName: node linkType: hard "is-plain-object@npm:5.0.0": version: 5.0.0 resolution: "is-plain-object@npm:5.0.0" - checksum: e32d27061eef62c0847d303125440a38660517e586f2f3db7c9d179ae5b6674ab0f469d519b2e25c147a1a3bc87156d0d5f4d8821e0ce4a9ee7fe1fcf11ce45c + checksum: 10c0/893e42bad832aae3511c71fd61c0bf61aa3a6d853061c62a307261842727d0d25f761ce9379f7ba7226d6179db2a3157efa918e7fe26360f3bf0842d9f28942c languageName: node linkType: hard @@ -9120,14 +9244,14 @@ __metadata: resolution: "is-plain-object@npm:2.0.4" dependencies: isobject: "npm:^3.0.1" - checksum: 2a401140cfd86cabe25214956ae2cfee6fbd8186809555cd0e84574f88de7b17abacb2e477a6a658fa54c6083ecbda1e6ae404c7720244cd198903848fca70ca + checksum: 10c0/f050fdd5203d9c81e8c4df1b3ff461c4bc64e8b5ca383bcdde46131361d0a678e80bcf00b5257646f6c636197629644d53bd8e2375aea633de09a82d57e942f4 languageName: node linkType: hard "is-potential-custom-element-name@npm:^1.0.1": version: 1.0.1 resolution: "is-potential-custom-element-name@npm:1.0.1" - checksum: ced7bbbb6433a5b684af581872afe0e1767e2d1146b2207ca0068a648fb5cab9d898495d1ac0583524faaf24ca98176a7d9876363097c2d14fee6dd324f3a1ab + checksum: 10c0/b73e2f22bc863b0939941d369486d308b43d7aef1f9439705e3582bfccaa4516406865e32c968a35f97a99396dac84e2624e67b0a16b0a15086a785e16ce7db9 languageName: node linkType: hard @@ -9137,14 +9261,14 @@ __metadata: dependencies: call-bind: "npm:^1.0.2" has-tostringtag: "npm:^1.0.0" - checksum: 36d9174d16d520b489a5e9001d7d8d8624103b387be300c50f860d9414556d0485d74a612fdafc6ebbd5c89213d947dcc6b6bff6b2312093f71ea03cbb19e564 + checksum: 10c0/bb72aae604a69eafd4a82a93002058c416ace8cde95873589a97fc5dac96a6c6c78a9977d487b7b95426a8f5073969124dd228f043f9f604f041f32fcc465fc1 languageName: node linkType: hard "is-set@npm:^2.0.1, is-set@npm:^2.0.2": version: 2.0.2 resolution: "is-set@npm:2.0.2" - checksum: d89e82acdc7760993474f529e043f9c4a1d63ed4774d21cc2e331d0e401e5c91c27743cd7c889137028f6a742234759a4bd602368fbdbf0b0321994aefd5603f + checksum: 10c0/5f8bd1880df8c0004ce694e315e6e1e47a3452014be792880bb274a3b2cdb952fdb60789636ca6e084c7947ca8b7ae03ccaf54c93a7fcfed228af810559e5432 languageName: node linkType: hard @@ -9153,21 +9277,21 @@ __metadata: resolution: "is-shared-array-buffer@npm:1.0.3" dependencies: call-bind: "npm:^1.0.7" - checksum: bc5402900dc62b96ebb2548bf5b0a0bcfacc2db122236fe3ab3b3e3c884293a0d5eb777e73f059bcbf8dc8563bb65eae972fee0fb97e38a9ae27c8678f62bcfe + checksum: 10c0/adc11ab0acbc934a7b9e5e9d6c588d4ec6682f6fea8cda5180721704fa32927582ede5b123349e32517fdadd07958973d24716c80e7ab198970c47acc09e59c7 languageName: node linkType: hard "is-stream@npm:^2.0.0": version: 2.0.1 resolution: "is-stream@npm:2.0.1" - checksum: b8e05ccdf96ac330ea83c12450304d4a591f9958c11fd17bed240af8d5ffe08aedafa4c0f4cfccd4d28dc9d4d129daca1023633d5c11601a6cbc77521f6fae66 + checksum: 10c0/7c284241313fc6efc329b8d7f08e16c0efeb6baab1b4cd0ba579eb78e5af1aa5da11e68559896a2067cd6c526bd29241dda4eb1225e627d5aa1a89a76d4635a5 languageName: node linkType: hard "is-stream@npm:^3.0.0": version: 3.0.0 resolution: "is-stream@npm:3.0.0" - checksum: 172093fe99119ffd07611ab6d1bcccfe8bc4aa80d864b15f43e63e54b7abc71e779acd69afdb854c4e2a67fdc16ae710e370eda40088d1cfc956a50ed82d8f16 + checksum: 10c0/eb2f7127af02ee9aa2a0237b730e47ac2de0d4e76a4a905a50a11557f2339df5765eaea4ceb8029f1efa978586abe776908720bfcb1900c20c6ec5145f6f29d8 languageName: node linkType: hard @@ -9176,7 +9300,7 @@ __metadata: resolution: "is-string@npm:1.0.7" dependencies: has-tostringtag: "npm:^1.0.0" - checksum: 2bc292fe927493fb6dfc3338c099c3efdc41f635727c6ebccf704aeb2a27bca7acb9ce6fd34d103db78692b10b22111a8891de26e12bfa1c5e11e263c99d1fef + checksum: 10c0/905f805cbc6eedfa678aaa103ab7f626aac9ebbdc8737abb5243acaa61d9820f8edc5819106b8fcd1839e33db21de9f0116ae20de380c8382d16dc2a601921f6 languageName: node linkType: hard @@ -9185,7 +9309,7 @@ __metadata: resolution: "is-symbol@npm:1.0.4" dependencies: has-symbols: "npm:^1.0.2" - checksum: a47dd899a84322528b71318a89db25c7ecdec73197182dad291df15ffea501e17e3c92c8de0bfb50e63402747399981a687b31c519971b1fa1a27413612be929 + checksum: 10c0/9381dd015f7c8906154dbcbf93fad769de16b4b961edc94f88d26eb8c555935caa23af88bda0c93a18e65560f6d7cca0fd5a3f8a8e1df6f1abbb9bead4502ef7 languageName: node linkType: hard @@ -9194,21 +9318,21 @@ __metadata: resolution: "is-typed-array@npm:1.1.13" dependencies: which-typed-array: "npm:^1.1.14" - checksum: f850ba08286358b9a11aee6d93d371a45e3c59b5953549ee1c1a9a55ba5c1dd1bd9952488ae194ad8f32a9cf5e79c8fa5f0cc4d78c00720aa0bbcf238b38062d + checksum: 10c0/fa5cb97d4a80e52c2cc8ed3778e39f175a1a2ae4ddf3adae3187d69586a1fd57cfa0b095db31f66aa90331e9e3da79184cea9c6abdcd1abc722dc3c3edd51cca languageName: node linkType: hard "is-unicode-supported@npm:^0.1.0": version: 0.1.0 resolution: "is-unicode-supported@npm:0.1.0" - checksum: a2aab86ee7712f5c2f999180daaba5f361bdad1efadc9610ff5b8ab5495b86e4f627839d085c6530363c6d6d4ecbde340fb8e54bdb83da4ba8e0865ed5513c52 + checksum: 10c0/00cbe3455c3756be68d2542c416cab888aebd5012781d6819749fefb15162ff23e38501fe681b3d751c73e8ff561ac09a5293eba6f58fdf0178462ce6dcb3453 languageName: node linkType: hard "is-weakmap@npm:^2.0.1": version: 2.0.1 resolution: "is-weakmap@npm:2.0.1" - checksum: 289fa4e8ba1bdda40ca78481266f6925b7c46a85599e6a41a77010bf91e5a24dfb660db96863bbf655ecdbda0ab517204d6a4e0c151dbec9d022c556321f3776 + checksum: 10c0/9c9fec9efa7bf5030a4a927f33fff2a6976b93646259f92b517d3646c073cc5b98283a162ce75c412b060a46de07032444b530f0a4c9b6e012ef8f1741c3a987 languageName: node linkType: hard @@ -9217,7 +9341,7 @@ __metadata: resolution: "is-weakref@npm:1.0.2" dependencies: call-bind: "npm:^1.0.2" - checksum: 0023fd0e4bdf9c338438ffbe1eed7ebbbff7e7e18fb7cdc227caaf9d4bd024a2dcdf6a8c9f40c92192022eac8391243bb9e66cccebecbf6fe1d8a366108f8513 + checksum: 10c0/1545c5d172cb690c392f2136c23eec07d8d78a7f57d0e41f10078aa4f5daf5d7f57b6513a67514ab4f073275ad00c9822fc8935e00229d0a2089e1c02685d4b1 languageName: node linkType: hard @@ -9227,7 +9351,7 @@ __metadata: dependencies: call-bind: "npm:^1.0.2" get-intrinsic: "npm:^1.1.1" - checksum: 8f2ddb9639716fd7936784e175ea1183c5c4c05274c34f34f6a53175313cb1c9c35a8b795623306995e2f7cc8f25aa46302f15a2113e51c5052d447be427195c + checksum: 10c0/ef5136bd446ae4603229b897f73efd0720c6ab3ec6cc05c8d5c4b51aa9f95164713c4cad0a22ff1fedf04865ff86cae4648bc1d5eead4b6388e1150525af1cc1 languageName: node linkType: hard @@ -9236,49 +9360,49 @@ __metadata: resolution: "is-wsl@npm:2.2.0" dependencies: is-docker: "npm:^2.0.0" - checksum: 20849846ae414997d290b75e16868e5261e86ff5047f104027026fd61d8b5a9b0b3ade16239f35e1a067b3c7cc02f70183cb661010ed16f4b6c7c93dad1b19d8 + checksum: 10c0/a6fa2d370d21be487c0165c7a440d567274fbba1a817f2f0bfa41cc5e3af25041d84267baa22df66696956038a43973e72fca117918c91431920bdef490fa25e languageName: node linkType: hard "isarray@npm:^2.0.5": version: 2.0.5 resolution: "isarray@npm:2.0.5" - checksum: 1d8bc7911e13bb9f105b1b3e0b396c787a9e63046af0b8fe0ab1414488ab06b2b099b87a2d8a9e31d21c9a6fad773c7fc8b257c4880f2d957274479d28ca3414 + checksum: 10c0/4199f14a7a13da2177c66c31080008b7124331956f47bca57dd0b6ea9f11687aa25e565a2c7a2b519bc86988d10398e3049a1f5df13c9f6b7664154690ae79fd languageName: node linkType: hard "isarray@npm:~1.0.0": version: 1.0.0 resolution: "isarray@npm:1.0.0" - checksum: f032df8e02dce8ec565cf2eb605ea939bdccea528dbcf565cdf92bfa2da9110461159d86a537388ef1acef8815a330642d7885b29010e8f7eac967c9993b65ab + checksum: 10c0/18b5be6669be53425f0b84098732670ed4e727e3af33bc7f948aac01782110eb9a18b3b329c5323bcdd3acdaae547ee077d3951317e7f133bff7105264b3003d languageName: node linkType: hard "isexe@npm:^2.0.0": version: 2.0.0 resolution: "isexe@npm:2.0.0" - checksum: 7c9f715c03aff08f35e98b1fadae1b9267b38f0615d501824f9743f3aab99ef10e303ce7db3f186763a0b70a19de5791ebfc854ff884d5a8c4d92211f642ec92 + checksum: 10c0/228cfa503fadc2c31596ab06ed6aa82c9976eec2bfd83397e7eaf06d0ccf42cd1dfd6743bf9aeb01aebd4156d009994c5f76ea898d2832c1fe342da923ca457d languageName: node linkType: hard "isexe@npm:^3.1.1": version: 3.1.1 resolution: "isexe@npm:3.1.1" - checksum: 7fe1931ee4e88eb5aa524cd3ceb8c882537bc3a81b02e438b240e47012eef49c86904d0f0e593ea7c3a9996d18d0f1f3be8d3eaa92333977b0c3a9d353d5563e + checksum: 10c0/9ec257654093443eb0a528a9c8cbba9c0ca7616ccb40abd6dde7202734d96bb86e4ac0d764f0f8cd965856aacbff2f4ce23e730dc19dfb41e3b0d865ca6fdcc7 languageName: node linkType: hard "isobject@npm:^3.0.1": version: 3.0.1 resolution: "isobject@npm:3.0.1" - checksum: db85c4c970ce30693676487cca0e61da2ca34e8d4967c2e1309143ff910c207133a969f9e4ddb2dc6aba670aabce4e0e307146c310350b298e74a31f7d464703 + checksum: 10c0/03344f5064a82f099a0cd1a8a407f4c0d20b7b8485e8e816c39f249e9416b06c322e8dec5b842b6bb8a06de0af9cb48e7bc1b5352f0fadc2f0abac033db3d4db languageName: node linkType: hard "istanbul-lib-coverage@npm:^3.0.0, istanbul-lib-coverage@npm:^3.2.0": version: 3.2.2 resolution: "istanbul-lib-coverage@npm:3.2.2" - checksum: 40bbdd1e937dfd8c830fa286d0f665e81b7a78bdabcd4565f6d5667c99828bda3db7fb7ac6b96a3e2e8a2461ddbc5452d9f8bc7d00cb00075fa6a3e99f5b6a81 + checksum: 10c0/6c7ff2106769e5f592ded1fb418f9f73b4411fd5a084387a5410538332b6567cd1763ff6b6cadca9b9eb2c443cce2f7ea7d7f1b8d315f9ce58539793b1e0922b languageName: node linkType: hard @@ -9291,7 +9415,7 @@ __metadata: "@istanbuljs/schema": "npm:^0.1.2" istanbul-lib-coverage: "npm:^3.2.0" semver: "npm:^6.3.0" - checksum: bbc4496c2f304d799f8ec22202ab38c010ac265c441947f075c0f7d46bd440b45c00e46017cf9053453d42182d768b1d6ed0e70a142c95ab00df9843aa5ab80e + checksum: 10c0/8a1bdf3e377dcc0d33ec32fe2b6ecacdb1e4358fd0eb923d4326bb11c67622c0ceb99600a680f3dad5d29c66fc1991306081e339b4d43d0b8a2ab2e1d910a6ee languageName: node linkType: hard @@ -9304,7 +9428,7 @@ __metadata: "@istanbuljs/schema": "npm:^0.1.3" istanbul-lib-coverage: "npm:^3.2.0" semver: "npm:^7.5.4" - checksum: 3aee19be199350182827679a137e1df142a306e9d7e20bb5badfd92ecc9023a7d366bc68e7c66e36983654a02a67401d75d8debf29fc6d4b83670fde69a594fc + checksum: 10c0/405c6ac037bf8c7ee7495980b0cd5544b2c53078c10534d0c9ceeb92a9ea7dcf8510f58ccfce31336458a8fa6ccef27b570bbb602abaa8c1650f5496a807477c languageName: node linkType: hard @@ -9315,7 +9439,7 @@ __metadata: istanbul-lib-coverage: "npm:^3.0.0" make-dir: "npm:^4.0.0" supports-color: "npm:^7.1.0" - checksum: 86a83421ca1cf2109a9f6d193c06c31ef04a45e72a74579b11060b1e7bb9b6337a4e6f04abfb8857e2d569c271273c65e855ee429376a0d7c91ad91db42accd1 + checksum: 10c0/84323afb14392de8b6a5714bd7e9af845cfbd56cfe71ed276cda2f5f1201aea673c7111901227ee33e68e4364e288d73861eb2ed48f6679d1e69a43b6d9b3ba7 languageName: node linkType: hard @@ -9326,7 +9450,7 @@ __metadata: debug: "npm:^4.1.1" istanbul-lib-coverage: "npm:^3.0.0" source-map: "npm:^0.6.1" - checksum: 5526983462799aced011d776af166e350191b816821ea7bcf71cab3e5272657b062c47dc30697a22a43656e3ced78893a42de677f9ccf276a28c913190953b82 + checksum: 10c0/19e4cc405016f2c906dff271a76715b3e881fa9faeb3f09a86cb99b8512b3a5ed19cadfe0b54c17ca0e54c1142c9c6de9330d65506e35873994e06634eebeb66 languageName: node linkType: hard @@ -9336,7 +9460,7 @@ __metadata: dependencies: html-escaper: "npm:^2.0.0" istanbul-lib-report: "npm:^3.0.0" - checksum: f1faaa4684efaf57d64087776018d7426312a59aa6eeb4e0e3a777347d23cd286ad18f427e98f0e3dee666103d7404c9d7abc5f240406a912fa16bd6695437fa + checksum: 10c0/a379fadf9cf8dc5dfe25568115721d4a7eb82fbd50b005a6672aff9c6989b20cc9312d7865814e0859cd8df58cbf664482e1d3604be0afde1f7fc3ccc1394a51 languageName: node linkType: hard @@ -9349,7 +9473,7 @@ __metadata: has-symbols: "npm:^1.0.3" reflect.getprototypeof: "npm:^1.0.4" set-function-name: "npm:^2.0.1" - checksum: b5013967ad8f28c9ca1be8e159eb10f591b8e46deae87476fe39d668c04374fe9158c815e8b6d2f45885b0a3fd842a8ba13f497ec762b3a0eff49bec278670b1 + checksum: 10c0/a32151326095e916f306990d909f6bbf23e3221999a18ba686419535dcd1749b10ded505e89334b77dc4c7a58a8508978f0eb16c2c8573e6d412eb7eb894ea79 languageName: node linkType: hard @@ -9362,7 +9486,7 @@ __metadata: dependenciesMeta: "@pkgjs/parseargs": optional: true - checksum: 6e6490d676af8c94a7b5b29b8fd5629f21346911ebe2e32931c2a54210134408171c24cee1a109df2ec19894ad04a429402a8438cbf5cc2794585d35428ace76 + checksum: 10c0/f01d8f972d894cd7638bc338e9ef5ddb86f7b208ce177a36d718eac96ec86638a6efa17d0221b10073e64b45edc2ce15340db9380b1f5d5c5d000cbc517dc111 languageName: node linkType: hard @@ -9376,7 +9500,7 @@ __metadata: minimatch: "npm:^3.1.2" bin: jake: bin/cli.js - checksum: ad1cfe398836df4e6962954e5095597c21c5af1ea5a4182f6adf0869df8aca467a2eeca7869bf44f47120f4dd4ea52589d16050d295c87a5906c0d744775acc3 + checksum: 10c0/89326d01a8bc110d02d973729a66394c79a34b34461116f5c530a2a2dbc30265683fe6737928f75df9178e9d369ff1442f5753fb983d525e740eefdadc56a103 languageName: node linkType: hard @@ -9387,7 +9511,7 @@ __metadata: execa: "npm:^5.0.0" jest-util: "npm:^29.7.0" p-limit: "npm:^3.1.0" - checksum: 3d93742e56b1a73a145d55b66e96711fbf87ef89b96c2fab7cfdfba8ec06612591a982111ca2b712bb853dbc16831ec8b43585a2a96b83862d6767de59cbf83d + checksum: 10c0/e071384d9e2f6bb462231ac53f29bff86f0e12394c1b49ccafbad225ce2ab7da226279a8a94f421949920bef9be7ef574fd86aee22e8adfa149be73554ab828b languageName: node linkType: hard @@ -9415,7 +9539,7 @@ __metadata: pure-rand: "npm:^6.0.0" slash: "npm:^3.0.0" stack-utils: "npm:^2.0.3" - checksum: 716a8e3f40572fd0213bcfc1da90274bf30d856e5133af58089a6ce45089b63f4d679bd44e6be9d320e8390483ebc3ae9921981993986d21639d9019b523123d + checksum: 10c0/8d15344cf7a9f14e926f0deed64ed190c7a4fa1ed1acfcd81e4cc094d3cc5bf7902ebb7b874edc98ada4185688f90c91e1747e0dfd7ac12463b097968ae74b5e languageName: node linkType: hard @@ -9441,7 +9565,7 @@ __metadata: optional: true bin: jest: bin/jest.js - checksum: 6cc62b34d002c034203065a31e5e9a19e7c76d9e8ef447a6f70f759c0714cb212c6245f75e270ba458620f9c7b26063cd8cf6cd1f7e3afd659a7cc08add17307 + checksum: 10c0/a658fd55050d4075d65c1066364595962ead7661711495cfa1dfeecf3d6d0a8ffec532f3dbd8afbb3e172dd5fd2fb2e813c5e10256e7cf2fea766314942fb43a languageName: node linkType: hard @@ -9479,7 +9603,7 @@ __metadata: optional: true ts-node: optional: true - checksum: 6bdf570e9592e7d7dd5124fc0e21f5fe92bd15033513632431b211797e3ab57eaa312f83cc6481b3094b72324e369e876f163579d60016677c117ec4853cf02b + checksum: 10c0/bab23c2eda1fff06e0d104b00d6adfb1d1aabb7128441899c9bff2247bd26710b050a5364281ce8d52b46b499153bf7e3ee88b19831a8f3451f1477a0246a0f1 languageName: node linkType: hard @@ -9491,7 +9615,7 @@ __metadata: diff-sequences: "npm:^29.6.3" jest-get-type: "npm:^29.6.3" pretty-format: "npm:^29.7.0" - checksum: 6f3a7eb9cd9de5ea9e5aa94aed535631fa6f80221832952839b3cb59dd419b91c20b73887deb0b62230d06d02d6b6cf34ebb810b88d904bb4fe1e2e4f0905c98 + checksum: 10c0/89a4a7f182590f56f526443dde69acefb1f2f0c9e59253c61d319569856c4931eae66b8a3790c443f529267a0ddba5ba80431c585deed81827032b2b2a1fc999 languageName: node linkType: hard @@ -9500,7 +9624,7 @@ __metadata: resolution: "jest-docblock@npm:29.7.0" dependencies: detect-newline: "npm:^3.0.0" - checksum: 8d48818055bc96c9e4ec2e217a5a375623c0d0bfae8d22c26e011074940c202aa2534a3362294c81d981046885c05d304376afba9f2874143025981148f3e96d + checksum: 10c0/d932a8272345cf6b6142bb70a2bb63e0856cc0093f082821577ea5bdf4643916a98744dfc992189d2b1417c38a11fa42466f6111526bc1fb81366f56410f3be9 languageName: node linkType: hard @@ -9513,7 +9637,7 @@ __metadata: jest-get-type: "npm:^29.6.3" jest-util: "npm:^29.7.0" pretty-format: "npm:^29.7.0" - checksum: bd1a077654bdaa013b590deb5f7e7ade68f2e3289180a8c8f53bc8a49f3b40740c0ec2d3a3c1aee906f682775be2bebbac37491d80b634d15276b0aa0f2e3fda + checksum: 10c0/f7f9a90ebee80cc688e825feceb2613627826ac41ea76a366fa58e669c3b2403d364c7c0a74d862d469b103c843154f8456d3b1c02b487509a12afa8b59edbb4 languageName: node linkType: hard @@ -9534,7 +9658,7 @@ __metadata: peerDependenciesMeta: canvas: optional: true - checksum: 23bbfc9bca914baef4b654f7983175a4d49b0f515a5094ebcb8f819f28ec186f53c0ba06af1855eac04bab1457f4ea79dae05f70052cf899863e8096daa6e0f5 + checksum: 10c0/139b94e2c8ec1bb5a46ce17df5211da65ce867354b3fd4e00fa6a0d1da95902df4cf7881273fc6ea937e5c325d39d6773f0d41b6c469363334de9d489d2c321f languageName: node linkType: hard @@ -9548,14 +9672,14 @@ __metadata: "@types/node": "npm:*" jest-mock: "npm:^29.7.0" jest-util: "npm:^29.7.0" - checksum: 9cf7045adf2307cc93aed2f8488942e39388bff47ec1df149a997c6f714bfc66b2056768973770d3f8b1bf47396c19aa564877eb10ec978b952c6018ed1bd637 + checksum: 10c0/61f04fec077f8b1b5c1a633e3612fc0c9aa79a0ab7b05600683428f1e01a4d35346c474bde6f439f9fcc1a4aa9a2861ff852d079a43ab64b02105d1004b2592b languageName: node linkType: hard "jest-get-type@npm:^29.6.3": version: 29.6.3 resolution: "jest-get-type@npm:29.6.3" - checksum: 88ac9102d4679d768accae29f1e75f592b760b44277df288ad76ce5bf038c3f5ce3719dea8aa0f035dac30e9eb034b848ce716b9183ad7cc222d029f03e92205 + checksum: 10c0/552e7a97a983d3c2d4e412a44eb7de0430ff773dd99f7500962c268d6dfbfa431d7d08f919c9d960530e5f7f78eb47f267ad9b318265e5092b3ff9ede0db7c2b languageName: node linkType: hard @@ -9578,7 +9702,7 @@ __metadata: dependenciesMeta: fsevents: optional: true - checksum: 8531b42003581cb18a69a2774e68c456fb5a5c3280b1b9b77475af9e346b6a457250f9d756bfeeae2fe6cbc9ef28434c205edab9390ee970a919baddfa08bb85 + checksum: 10c0/2683a8f29793c75a4728787662972fedd9267704c8f7ef9d84f2beed9a977f1cf5e998c07b6f36ba5603f53cb010c911fe8cd0ac9886e073fe28ca66beefd30c languageName: node linkType: hard @@ -9588,7 +9712,7 @@ __metadata: dependencies: jest-get-type: "npm:^29.6.3" pretty-format: "npm:^29.7.0" - checksum: e3950e3ddd71e1d0c22924c51a300a1c2db6cf69ec1e51f95ccf424bcc070f78664813bef7aed4b16b96dfbdeea53fe358f8aeaaea84346ae15c3735758f1605 + checksum: 10c0/71bb9f77fc489acb842a5c7be030f2b9acb18574dc9fb98b3100fc57d422b1abc55f08040884bd6e6dbf455047a62f7eaff12aa4058f7cbdc11558718ca6a395 languageName: node linkType: hard @@ -9600,7 +9724,7 @@ __metadata: jest-diff: "npm:^29.7.0" jest-get-type: "npm:^29.6.3" pretty-format: "npm:^29.7.0" - checksum: 981904a494299cf1e3baed352f8a3bd8b50a8c13a662c509b6a53c31461f94ea3bfeffa9d5efcfeb248e384e318c87de7e3baa6af0f79674e987482aa189af40 + checksum: 10c0/0d0e70b28fa5c7d4dce701dc1f46ae0922102aadc24ed45d594dd9b7ae0a8a6ef8b216718d1ab79e451291217e05d4d49a82666e1a3cc2b428b75cd9c933244e languageName: node linkType: hard @@ -9617,7 +9741,7 @@ __metadata: pretty-format: "npm:^29.7.0" slash: "npm:^3.0.0" stack-utils: "npm:^2.0.3" - checksum: 31d53c6ed22095d86bab9d14c0fa70c4a92c749ea6ceece82cf30c22c9c0e26407acdfbdb0231435dc85a98d6d65ca0d9cbcd25cd1abb377fe945e843fb770b9 + checksum: 10c0/850ae35477f59f3e6f27efac5215f706296e2104af39232bb14e5403e067992afb5c015e87a9243ec4d9df38525ef1ca663af9f2f4766aa116f127247008bd22 languageName: node linkType: hard @@ -9627,7 +9751,7 @@ __metadata: dependencies: "@jest/types": "npm:^27.5.1" "@types/node": "npm:*" - checksum: be9a8777801659227d3bb85317a3aca617542779a290a6a45c9addec8bda29f494a524cb4af96c82b825ecb02171e320dfbfde3e3d9218672f9e38c9fac118f4 + checksum: 10c0/6ad58454b37ee3f726930b07efbf40a7c79d2d2d9c7b226708b4b550bc0904de93bcacf714105d11952a5c0bc855e5d59145c8c9dbbb4e69b46e7367abf53b52 languageName: node linkType: hard @@ -9638,7 +9762,7 @@ __metadata: "@jest/types": "npm:^29.6.3" "@types/node": "npm:*" jest-util: "npm:^29.7.0" - checksum: ae51d1b4f898724be5e0e52b2268a68fcd876d9b20633c864a6dd6b1994cbc48d62402b0f40f3a1b669b30ebd648821f086c26c08ffde192ced951ff4670d51c + checksum: 10c0/7b9f8349ee87695a309fe15c46a74ab04c853369e5c40952d68061d9dc3159a0f0ed73e215f81b07ee97a9faaf10aebe5877a9d6255068a0977eae6a9ff1d5ac languageName: node linkType: hard @@ -9650,14 +9774,14 @@ __metadata: peerDependenciesMeta: jest-resolve: optional: true - checksum: db1a8ab2cb97ca19c01b1cfa9a9c8c69a143fde833c14df1fab0766f411b1148ff0df878adea09007ac6a2085ec116ba9a996a6ad104b1e58c20adbf88eed9b2 + checksum: 10c0/86eec0c78449a2de733a6d3e316d49461af6a858070e113c97f75fb742a48c2396ea94150cbca44159ffd4a959f743a47a8b37a792ef6fdad2cf0a5cba973fac languageName: node linkType: hard "jest-regex-util@npm:^29.6.3": version: 29.6.3 resolution: "jest-regex-util@npm:29.6.3" - checksum: 0518beeb9bf1228261695e54f0feaad3606df26a19764bc19541e0fc6e2a3737191904607fb72f3f2ce85d9c16b28df79b7b1ec9443aa08c3ef0e9efda6f8f2a + checksum: 10c0/4e33fb16c4f42111159cafe26397118dcfc4cf08bc178a67149fb05f45546a91928b820894572679d62559839d0992e21080a1527faad65daaae8743a5705a3b languageName: node linkType: hard @@ -9667,7 +9791,7 @@ __metadata: dependencies: jest-regex-util: "npm:^29.6.3" jest-snapshot: "npm:^29.7.0" - checksum: 1e206f94a660d81e977bcfb1baae6450cb4a81c92e06fad376cc5ea16b8e8c6ea78c383f39e95591a9eb7f925b6a1021086c38941aa7c1b8a6a813c2f6e93675 + checksum: 10c0/b6e9ad8ae5b6049474118ea6441dfddd385b6d1fc471db0136f7c8fbcfe97137a9665e4f837a9f49f15a29a1deb95a14439b7aec812f3f99d08f228464930f0d languageName: node linkType: hard @@ -9684,7 +9808,7 @@ __metadata: resolve: "npm:^1.20.0" resolve.exports: "npm:^2.0.0" slash: "npm:^3.0.0" - checksum: faa466fd9bc69ea6c37a545a7c6e808e073c66f46ab7d3d8a6ef084f8708f201b85d5fe1799789578b8b47fa1de47b9ee47b414d1863bc117a49e032ba77b7c7 + checksum: 10c0/59da5c9c5b50563e959a45e09e2eace783d7f9ac0b5dcc6375dea4c0db938d2ebda97124c8161310082760e8ebbeff9f6b177c15ca2f57fb424f637a5d2adb47 languageName: node linkType: hard @@ -9713,7 +9837,7 @@ __metadata: jest-worker: "npm:^29.7.0" p-limit: "npm:^3.1.0" source-map-support: "npm:0.5.13" - checksum: 9d8748a494bd90f5c82acea99be9e99f21358263ce6feae44d3f1b0cd90991b5df5d18d607e73c07be95861ee86d1cbab2a3fc6ca4b21805f07ac29d47c1da1e + checksum: 10c0/2194b4531068d939f14c8d3274fe5938b77fa73126aedf9c09ec9dec57d13f22c72a3b5af01ac04f5c1cf2e28d0ac0b4a54212a61b05f10b5d6b47f2a1097bb4 languageName: node linkType: hard @@ -9743,7 +9867,7 @@ __metadata: jest-util: "npm:^29.7.0" slash: "npm:^3.0.0" strip-bom: "npm:^4.0.0" - checksum: 59eb58eb7e150e0834a2d0c0d94f2a0b963ae7182cfa6c63f2b49b9c6ef794e5193ef1634e01db41420c36a94cefc512cdd67a055cd3e6fa2f41eaf0f82f5a20 + checksum: 10c0/7cd89a1deda0bda7d0941835434e44f9d6b7bd50b5c5d9b0fc9a6c990b2d4d2cab59685ab3cb2850ed4cc37059f6de903af5a50565d7f7f1192a77d3fd6dd2a6 languageName: node linkType: hard @@ -9771,7 +9895,7 @@ __metadata: natural-compare: "npm:^1.4.0" pretty-format: "npm:^29.7.0" semver: "npm:^7.5.3" - checksum: cb19a3948256de5f922d52f251821f99657339969bf86843bd26cf3332eae94883e8260e3d2fba46129a27c3971c1aa522490e460e16c7fad516e82d10bbf9f8 + checksum: 10c0/6e9003c94ec58172b4a62864a91c0146513207bedf4e0a06e1e2ac70a4484088a2683e3a0538d8ea913bcfd53dc54a9b98a98cdfa562e7fe1d1339aeae1da570 languageName: node linkType: hard @@ -9785,7 +9909,7 @@ __metadata: ci-info: "npm:^3.2.0" graceful-fs: "npm:^4.2.9" picomatch: "npm:^2.2.3" - checksum: 30d58af6967e7d42bd903ccc098f3b4d3859ed46238fbc88d4add6a3f10bea00c226b93660285f058bc7a65f6f9529cf4eb80f8d4707f79f9e3a23686b4ab8f3 + checksum: 10c0/bc55a8f49fdbb8f51baf31d2a4f312fb66c9db1483b82f602c9c990e659cdd7ec529c8e916d5a89452ecbcfae4949b21b40a7a59d4ffc0cd813a973ab08c8150 languageName: node linkType: hard @@ -9799,7 +9923,7 @@ __metadata: jest-get-type: "npm:^29.6.3" leven: "npm:^3.1.0" pretty-format: "npm:^29.7.0" - checksum: 8ee1163666d8eaa16d90a989edba2b4a3c8ab0ffaa95ad91b08ca42b015bfb70e164b247a5b17f9de32d096987cada63ed8491ab82761bfb9a28bc34b27ae161 + checksum: 10c0/a20b930480c1ed68778c739f4739dce39423131bc070cd2505ddede762a5570a256212e9c2401b7ae9ba4d7b7c0803f03c5b8f1561c62348213aba18d9dbece2 languageName: node linkType: hard @@ -9815,7 +9939,7 @@ __metadata: emittery: "npm:^0.13.1" jest-util: "npm:^29.7.0" string-length: "npm:^4.0.1" - checksum: 4f616e0345676631a7034b1d94971aaa719f0cd4a6041be2aa299be437ea047afd4fe05c48873b7963f5687a2f6c7cbf51244be8b14e313b97bfe32b1e127e55 + checksum: 10c0/ec6c75030562fc8f8c727cb8f3b94e75d831fc718785abfc196e1f2a2ebc9a2e38744a15147170039628a853d77a3b695561ce850375ede3a4ee6037a2574567 languageName: node linkType: hard @@ -9827,7 +9951,7 @@ __metadata: jest-util: "npm:^29.7.0" merge-stream: "npm:^2.0.0" supports-color: "npm:^8.0.0" - checksum: 364cbaef00d8a2729fc760227ad34b5e60829e0869bd84976bdfbd8c0d0f9c2f22677b3e6dd8afa76ed174765351cd12bae3d4530c62eefb3791055127ca9745 + checksum: 10c0/5570a3a005b16f46c131968b8a5b56d291f9bbb85ff4217e31c80bd8a02e7de799e59a54b95ca28d5c302f248b54cbffde2d177c2f0f52ffcee7504c6eabf660 languageName: node linkType: hard @@ -9846,21 +9970,21 @@ __metadata: optional: true bin: jest: bin/jest.js - checksum: 97023d78446098c586faaa467fbf2c6b07ff06e2c85a19e3926adb5b0effe9ac60c4913ae03e2719f9c01ae8ffd8d92f6b262cedb9555ceeb5d19263d8c6362a + checksum: 10c0/f40eb8171cf147c617cc6ada49d062fbb03b4da666cb8d39cdbfb739a7d75eea4c3ca150fb072d0d273dce0c753db4d0467d54906ad0293f59c54f9db4a09d8b languageName: node linkType: hard "js-base64@npm:3.7.7": version: 3.7.7 resolution: "js-base64@npm:3.7.7" - checksum: 185e34c536a6b1c4e1ad8bd96d25b49a9ea4e6803e259eaaaca95f1b392a0d590b2933c5ca8580c776f7279507944b81ff1faf889d84baa5e31f026e96d676a5 + checksum: 10c0/3c905a7e78b601e4751b5e710edd0d6d045ce2d23eb84c9df03515371e1b291edc72808dc91e081cb9855aef6758292a2407006f4608ec3705373dd8baf2f80f languageName: node linkType: hard "js-tokens@npm:^3.0.0 || ^4.0.0, js-tokens@npm:^4.0.0": version: 4.0.0 resolution: "js-tokens@npm:4.0.0" - checksum: af37d0d913fb56aec6dc0074c163cc71cd23c0b8aad5c2350747b6721d37ba118af35abdd8b33c47ec2800de07dedb16a527ca9c530ee004093e04958bd0cbf2 + checksum: 10c0/e248708d377aa058eacf2037b07ded847790e6de892bbad3dac0abba2e759cb9f121b00099a65195616badcb6eca8d14d975cb3e89eb1cfda644756402c8aeed languageName: node linkType: hard @@ -9872,7 +9996,7 @@ __metadata: esprima: "npm:^4.0.0" bin: js-yaml: bin/js-yaml.js - checksum: 9e22d80b4d0105b9899135365f746d47466ed53ef4223c529b3c0f7a39907743fdbd3c4379f94f1106f02755b5e90b2faaf84801a891135544e1ea475d1a1379 + checksum: 10c0/6746baaaeac312c4db8e75fa22331d9a04cccb7792d126ed8ce6a0bbcfef0cedaddd0c5098fade53db067c09fe00aa1c957674b4765610a8b06a5a189e46433b languageName: node linkType: hard @@ -9883,14 +10007,14 @@ __metadata: argparse: "npm:^2.0.1" bin: js-yaml: bin/js-yaml.js - checksum: c138a34a3fd0d08ebaf71273ad4465569a483b8a639e0b118ff65698d257c2791d3199e3f303631f2cb98213fa7b5f5d6a4621fd0fff819421b990d30d967140 + checksum: 10c0/184a24b4eaacfce40ad9074c64fd42ac83cf74d8c8cd137718d456ced75051229e5061b8633c3366b8aada17945a7a356b337828c19da92b51ae62126575018f languageName: node linkType: hard "jsbn@npm:1.1.0": version: 1.1.0 resolution: "jsbn@npm:1.1.0" - checksum: bebe7ae829bbd586ce8cbe83501dd8cb8c282c8902a8aeeed0a073a89dc37e8103b1244f3c6acd60278bcbfe12d93a3f83c9ac396868a3b3bbc3c5e5e3b648ef + checksum: 10c0/4f907fb78d7b712e11dea8c165fe0921f81a657d3443dde75359ed52eb2b5d33ce6773d97985a089f09a65edd80b11cb75c767b57ba47391fee4c969f7215c96 languageName: node linkType: hard @@ -9925,7 +10049,7 @@ __metadata: optional: true bin: jscodeshift: bin/jscodeshift.js - checksum: 5f4354d80a95de4dba5dd402e97e5bba8c6b31261f426719cb184099ac83c57c47e4160923b7c035a5da4113e56c39eb68233e3b55a910372013d66d3b1f1c64 + checksum: 10c0/79afb059b9ca92712af02bdc8d6ff144de7aaf5e2cdcc6f6534e7a86a7347b0a278d9f4884f2c78dac424162a353aafff183a60e868f71132be2c5b5304aeeb8 languageName: node linkType: hard @@ -9964,7 +10088,7 @@ __metadata: peerDependenciesMeta: canvas: optional: true - checksum: a4cdcff5b07eed87da90b146b82936321533b5efe8124492acf7160ebd5b9cf2b3c2435683592bf1cffb479615245756efb6c173effc1906f845a86ed22af985 + checksum: 10c0/b109073bb826a966db7828f46cb1d7371abecd30f182b143c52be5fe1ed84513bbbe995eb3d157241681fcd18331381e61e3dc004d4949f3a63bca02f6214902 languageName: node linkType: hard @@ -9973,7 +10097,7 @@ __metadata: resolution: "jsesc@npm:2.5.2" bin: jsesc: bin/jsesc - checksum: d2096abdcdec56969764b40ffc91d4a23408aa2f351b4d1c13f736f25476643238c43fdbaf38a191c26b1b78fd856d965f5d4d0dde7b89459cd94025190cdf13 + checksum: 10c0/dbf59312e0ebf2b4405ef413ec2b25abb5f8f4d9bc5fb8d9f90381622ebca5f2af6a6aa9a8578f65903f9e33990a6dc798edd0ce5586894bf0e9e31803a1de88 languageName: node linkType: hard @@ -9982,49 +10106,49 @@ __metadata: resolution: "jsesc@npm:0.5.0" bin: jsesc: bin/jsesc - checksum: fab949f585c71e169c5cbe00f049f20de74f067081bbd64a55443bad1c71e1b5a5b448f2359bf2fe06f5ed7c07e2e4a9101843b01c823c30b6afc11f5bfaf724 + checksum: 10c0/f93792440ae1d80f091b65f8ceddf8e55c4bb7f1a09dee5dcbdb0db5612c55c0f6045625aa6b7e8edb2e0a4feabd80ee48616dbe2d37055573a84db3d24f96d9 languageName: node linkType: hard "json-buffer@npm:3.0.1": version: 3.0.1 resolution: "json-buffer@npm:3.0.1" - checksum: 82876154521b7b68ba71c4f969b91572d1beabadd87bd3a6b236f85fbc7dc4695089191ed60bb59f9340993c51b33d479f45b6ba9f3548beb519705281c32c3c + checksum: 10c0/0d1c91569d9588e7eef2b49b59851f297f3ab93c7b35c7c221e288099322be6b562767d11e4821da500f3219542b9afd2e54c5dc573107c1126ed1080f8e96d7 languageName: node linkType: hard "json-parse-even-better-errors@npm:^2.3.0": version: 2.3.1 resolution: "json-parse-even-better-errors@npm:2.3.1" - checksum: 5f3a99009ed5f2a5a67d06e2f298cc97bc86d462034173308156f15b43a6e850be8511dc204b9b94566305da2947f7d90289657237d210351a39059ff9d666cf + checksum: 10c0/140932564c8f0b88455432e0f33c4cb4086b8868e37524e07e723f4eaedb9425bdc2bafd71bd1d9765bd15fd1e2d126972bc83990f55c467168c228c24d665f3 languageName: node linkType: hard "json-schema-traverse@npm:^0.4.1": version: 0.4.1 resolution: "json-schema-traverse@npm:0.4.1" - checksum: 7486074d3ba247769fda17d5181b345c9fb7d12e0da98b22d1d71a5db9698d8b4bd900a3ec1a4ffdd60846fc2556274a5c894d0c48795f14cb03aeae7b55260b + checksum: 10c0/108fa90d4cc6f08243aedc6da16c408daf81793bf903e9fd5ab21983cda433d5d2da49e40711da016289465ec2e62e0324dcdfbc06275a607fe3233fde4942ce languageName: node linkType: hard "json-schema-traverse@npm:^1.0.0": version: 1.0.0 resolution: "json-schema-traverse@npm:1.0.0" - checksum: 02f2f466cdb0362558b2f1fd5e15cce82ef55d60cd7f8fa828cf35ba74330f8d767fcae5c5c2adb7851fa811766c694b9405810879bc4e1ddd78a7c0e03658ad + checksum: 10c0/71e30015d7f3d6dc1c316d6298047c8ef98a06d31ad064919976583eb61e1018a60a0067338f0f79cabc00d84af3fcc489bd48ce8a46ea165d9541ba17fb30c6 languageName: node linkType: hard "json-schema@npm:0.4.0": version: 0.4.0 resolution: "json-schema@npm:0.4.0" - checksum: 8b3b64eff4a807dc2a3045b104ed1b9335cd8d57aa74c58718f07f0f48b8baa3293b00af4dcfbdc9144c3aafea1e97982cc27cc8e150fc5d93c540649507a458 + checksum: 10c0/d4a637ec1d83544857c1c163232f3da46912e971d5bf054ba44fdb88f07d8d359a462b4aec46f2745efbc57053365608d88bc1d7b1729f7b4fc3369765639ed3 languageName: node linkType: hard "json-stable-stringify-without-jsonify@npm:^1.0.1": version: 1.0.1 resolution: "json-stable-stringify-without-jsonify@npm:1.0.1" - checksum: 12786c2e2f22c27439e6db0532ba321f1d0617c27ad8cb1c352a0e9249a50182fd1ba8b52a18899291604b0c32eafa8afd09e51203f19109a0537f68db2b652d + checksum: 10c0/cb168b61fd4de83e58d09aaa6425ef71001bae30d260e2c57e7d09a5fd82223e2f22a042dedaab8db23b7d9ae46854b08bb1f91675a8be11c5cffebef5fb66a5 languageName: node linkType: hard @@ -10035,7 +10159,7 @@ __metadata: minimist: "npm:^1.2.0" bin: json5: lib/cli.js - checksum: a78d812dbbd5642c4f637dd130954acfd231b074965871c3e28a5bbd571f099d623ecf9161f1960c4ddf68e0cc98dee8bebfdb94a71ad4551f85a1afc94b63f6 + checksum: 10c0/9ee316bf21f000b00752e6c2a3b79ecf5324515a5c60ee88983a1910a45426b643a4f3461657586e8aeca87aaf96f0a519b0516d2ae527a6c3e7eed80f68717f languageName: node linkType: hard @@ -10044,7 +10168,7 @@ __metadata: resolution: "json5@npm:2.2.3" bin: json5: lib/cli.js - checksum: 1db67b853ff0de3534085d630691d3247de53a2ed1390ba0ddff681ea43e9b3e30ecbdb65c5e9aab49435e44059c23dbd6fee8ee619419ba37465bb0dd7135da + checksum: 10c0/5a04eed94810fa55c5ea138b2f7a5c12b97c3750bc63d11e511dcecbfef758003861522a070c2272764ee0f4e3e323862f386945aeb5b85b87ee43f084ba586c languageName: node linkType: hard @@ -10057,7 +10181,7 @@ __metadata: dependenciesMeta: graceful-fs: optional: true - checksum: 03014769e7dc77d4cf05fa0b534907270b60890085dd5e4d60a382ff09328580651da0b8b4cdf44d91e4c8ae64d91791d965f05707beff000ed494a38b6fec85 + checksum: 10c0/4f95b5e8a5622b1e9e8f33c96b7ef3158122f595998114d1e7f03985649ea99cb3cd99ce1ed1831ae94c8c8543ab45ebd044207612f31a56fd08462140e46865 languageName: node linkType: hard @@ -10069,7 +10193,7 @@ __metadata: array.prototype.flat: "npm:^1.3.1" object.assign: "npm:^4.1.4" object.values: "npm:^1.1.6" - checksum: b61d44613687dfe4cc8ad4b4fbf3711bf26c60b8d5ed1f494d723e0808415c59b24a7c0ed8ab10736a40ff84eef38cbbfb68b395e05d31117b44ffc59d31edfc + checksum: 10c0/a32679e9cb55469cb6d8bbc863f7d631b2c98b7fc7bf172629261751a6e7bc8da6ae374ddb74d5fbd8b06cf0eb4572287b259813d92b36e384024ed35e4c13e1 languageName: node linkType: hard @@ -10078,35 +10202,35 @@ __metadata: resolution: "keyv@npm:4.5.4" dependencies: json-buffer: "npm:3.0.1" - checksum: 167eb6ef64cc84b6fa0780ee50c9de456b422a1e18802209234f7c2cf7eae648c7741f32e50d7e24ccb22b24c13154070b01563d642755b156c357431a191e75 + checksum: 10c0/aa52f3c5e18e16bb6324876bb8b59dd02acf782a4b789c7b2ae21107fab95fab3890ed448d4f8dba80ce05391eeac4bfabb4f02a20221342982f806fa2cf271e languageName: node linkType: hard "kind-of@npm:^6.0.2": version: 6.0.3 resolution: "kind-of@npm:6.0.3" - checksum: 5873d303fb36aad875b7538798867da2ae5c9e328d67194b0162a3659a627d22f742fc9c4ae95cd1704132a24b00cae5041fc00c0f6ef937dc17080dc4dbb962 + checksum: 10c0/61cdff9623dabf3568b6445e93e31376bee1cdb93f8ba7033d86022c2a9b1791a1d9510e026e6465ebd701a6dd2f7b0808483ad8838341ac52f003f512e0b4c4 languageName: node linkType: hard "kleur@npm:^3.0.3": version: 3.0.3 resolution: "kleur@npm:3.0.3" - checksum: 0c0ecaf00a5c6173d25059c7db2113850b5457016dfa1d0e3ef26da4704fbb186b4938d7611246d86f0ddf1bccf26828daa5877b1f232a65e7373d0122a83e7f + checksum: 10c0/cd3a0b8878e7d6d3799e54340efe3591ca787d9f95f109f28129bdd2915e37807bf8918bb295ab86afb8c82196beec5a1adcaf29042ce3f2bd932b038fe3aa4b languageName: node linkType: hard "kuler@npm:^2.0.0": version: 2.0.0 resolution: "kuler@npm:2.0.0" - checksum: 9e10b5a1659f9ed8761d38df3c35effabffbd19fc6107324095238e4ef0ff044392cae9ac64a1c2dda26e532426485342226b93806bd97504b174b0dcf04ed81 + checksum: 10c0/0a4e99d92ca373f8f74d1dc37931909c4d0d82aebc94cf2ba265771160fc12c8df34eaaac80805efbda367e2795cb1f1dd4c3d404b6b1cf38aec94035b503d2d languageName: node linkType: hard "language-subtag-registry@npm:^0.3.20": version: 0.3.22 resolution: "language-subtag-registry@npm:0.3.22" - checksum: 5591f4abd775d1ab5945355a5ba894327d2d94c900607bdb69aac1bc5bb921dbeeeb5f616df95e8c0ae875501d19c1cfa0e852ece822121e95048deb34f2b4d2 + checksum: 10c0/d1e09971260a7cd3b9fdeb190d33af0b6e99c8697013537d9aaa15f7856d9d83aee128ba8078e219df0a7cf4b8dd18d1a0c188f6543b500d92a2689d2d114b70 languageName: node linkType: hard @@ -10115,7 +10239,7 @@ __metadata: resolution: "language-tags@npm:1.0.9" dependencies: language-subtag-registry: "npm:^0.3.20" - checksum: d3a7c14b694e67f519153d6df6cb200681648d38d623c3bfa9d6a66a5ec5493628acb88e9df5aceef3cf1902ab263a205e7d59ee4cf1d6bb67e707b83538bd6d + checksum: 10c0/9ab911213c4bd8bd583c850201c17794e52cb0660d1ab6e32558aadc8324abebf6844e46f92b80a5d600d0fbba7eface2c207bfaf270a1c7fd539e4c3a880bff languageName: node linkType: hard @@ -10126,14 +10250,14 @@ __metadata: app-root-dir: "npm:^1.0.2" dotenv: "npm:^16.0.0" dotenv-expand: "npm:^10.0.0" - checksum: 5aa4d1a01d108d1f4a565576b58e728be949ceccecef894d6a9de56cb2b8e2e033abd47424190d0a546cb22b4b4a3ab553346b9710c3294870660d4a3555dd34 + checksum: 10c0/3bc4fe649c46c4a20561ca1fd10cd1df641d2c6c42c61af6c65a5fe0546cb548f449e13e6c7440be445c9fe5b4973c25f499e7d899b8704b7b9bd0ec85bbfe2d languageName: node linkType: hard "leven@npm:^3.1.0": version: 3.1.0 resolution: "leven@npm:3.1.0" - checksum: 638401d534585261b6003db9d99afd244dfe82d75ddb6db5c0df412842d5ab30b2ef18de471aaec70fe69a46f17b4ae3c7f01d8a4e6580ef7adb9f4273ad1e55 + checksum: 10c0/cd778ba3fbab0f4d0500b7e87d1f6e1f041507c56fdcd47e8256a3012c98aaee371d4c15e0a76e0386107af2d42e2b7466160a2d80688aaa03e66e49949f42df languageName: node linkType: hard @@ -10143,14 +10267,55 @@ __metadata: dependencies: prelude-ls: "npm:^1.2.1" type-check: "npm:~0.4.0" - checksum: 2e4720ff79f21ae08d42374b0a5c2f664c5be8b6c8f565bb4e1315c96ed3a8acaa9de788ffed82d7f2378cf36958573de07ef92336cb5255ed74d08b8318c9ee + checksum: 10c0/effb03cad7c89dfa5bd4f6989364bfc79994c2042ec5966cb9b95990e2edee5cd8969ddf42616a0373ac49fac1403437deaf6e9050fbbaa3546093a59b9ac94e + languageName: node + linkType: hard + +"lilconfig@npm:~3.1.2": + version: 3.1.2 + resolution: "lilconfig@npm:3.1.2" + checksum: 10c0/f059630b1a9bddaeba83059db00c672b64dc14074e9f232adce32b38ca1b5686ab737eb665c5ba3c32f147f0002b4bee7311ad0386a9b98547b5623e87071fbe languageName: node linkType: hard "lines-and-columns@npm:^1.1.6": version: 1.2.4 resolution: "lines-and-columns@npm:1.2.4" - checksum: 0c37f9f7fa212b38912b7145e1cd16a5f3cd34d782441c3e6ca653485d326f58b3caccda66efce1c5812bde4961bbde3374fae4b0d11bf1226152337f3894aa5 + checksum: 10c0/3da6ee62d4cd9f03f5dc90b4df2540fb85b352081bee77fe4bbcd12c9000ead7f35e0a38b8d09a9bb99b13223446dd8689ff3c4959807620726d788701a83d2d + languageName: node + linkType: hard + +"lint-staged@npm:15.2.10": + version: 15.2.10 + resolution: "lint-staged@npm:15.2.10" + dependencies: + chalk: "npm:~5.3.0" + commander: "npm:~12.1.0" + debug: "npm:~4.3.6" + execa: "npm:~8.0.1" + lilconfig: "npm:~3.1.2" + listr2: "npm:~8.2.4" + micromatch: "npm:~4.0.8" + pidtree: "npm:~0.6.0" + string-argv: "npm:~0.3.2" + yaml: "npm:~2.5.0" + bin: + lint-staged: bin/lint-staged.js + checksum: 10c0/6ad7b41f5e87a84fa2eb1990080ea3c68a2f2031b4e81edcdc2a458cc878538eedb310e6f98ffd878a1287e1a52ac968e540ee8a0e96c247e04b0cbc36421cdd + languageName: node + linkType: hard + +"listr2@npm:~8.2.4": + version: 8.2.4 + resolution: "listr2@npm:8.2.4" + dependencies: + cli-truncate: "npm:^4.0.0" + colorette: "npm:^2.0.20" + eventemitter3: "npm:^5.0.1" + log-update: "npm:^6.1.0" + rfdc: "npm:^1.4.1" + wrap-ansi: "npm:^9.0.0" + checksum: 10c0/df5b129e9767de1997973cec6103cd4bd6fc3b3367685b7c23048d12b61d5b7e44fecd8a3d3534c0e1c963bd5ac43ca501d14712f46fa101050037be323a5c16 languageName: node linkType: hard @@ -10160,7 +10325,7 @@ __metadata: dependencies: p-locate: "npm:^3.0.0" path-exists: "npm:^3.0.0" - checksum: 53db3996672f21f8b0bf2a2c645ae2c13ffdae1eeecfcd399a583bce8516c0b88dcb4222ca6efbbbeb6949df7e46860895be2c02e8d3219abd373ace3bfb4e11 + checksum: 10c0/3db394b7829a7fe2f4fbdd25d3c4689b85f003c318c5da4052c7e56eed697da8f1bce5294f685c69ff76e32cba7a33629d94396976f6d05fb7f4c755c5e2ae8b languageName: node linkType: hard @@ -10169,7 +10334,7 @@ __metadata: resolution: "locate-path@npm:5.0.0" dependencies: p-locate: "npm:^4.1.0" - checksum: 83e51725e67517287d73e1ded92b28602e3ae5580b301fe54bfb76c0c723e3f285b19252e375712316774cf52006cb236aed5704692c32db0d5d089b69696e30 + checksum: 10c0/33a1c5247e87e022f9713e6213a744557a3e9ec32c5d0b5efb10aa3a38177615bf90221a5592674857039c1a0fd2063b82f285702d37b792d973e9e72ace6c59 languageName: node linkType: hard @@ -10178,35 +10343,35 @@ __metadata: resolution: "locate-path@npm:6.0.0" dependencies: p-locate: "npm:^5.0.0" - checksum: 72eb661788a0368c099a184c59d2fee760b3831c9c1c33955e8a19ae4a21b4116e53fa736dc086cdeb9fce9f7cc508f2f92d2d3aae516f133e16a2bb59a39f5a + checksum: 10c0/d3972ab70dfe58ce620e64265f90162d247e87159b6126b01314dd67be43d50e96a50b517bce2d9452a79409c7614054c277b5232377de50416564a77ac7aad3 languageName: node linkType: hard "lodash.debounce@npm:^4.0.8": version: 4.0.8 resolution: "lodash.debounce@npm:4.0.8" - checksum: cd0b2819786e6e80cb9f5cda26b1a8fc073daaf04e48d4cb462fa4663ec9adb3a5387aa22d7129e48eed1afa05b482e2a6b79bfc99b86886364449500cbb00fd + checksum: 10c0/762998a63e095412b6099b8290903e0a8ddcb353ac6e2e0f2d7e7d03abd4275fe3c689d88960eb90b0dde4f177554d51a690f22a343932ecbc50a5d111849987 languageName: node linkType: hard "lodash.escape@npm:^4.0.1": version: 4.0.1 resolution: "lodash.escape@npm:4.0.1" - checksum: ba1effab9aea7e20ee69b26cbfeb41c73da2eb4d2ab1c261aaf53dd0902ce1afc2f0b34fb24bc69c1d2dd201c332e1d1eb696092fc844a2c5c8e7ccd1ca32014 + checksum: 10c0/90ade409cec05b6869090476952fdfb84d4d87b1ff4a0e03ebd590f980d9a1248d93ba14579f10d80c6429e4d6af13ba137c28db64cae6dadb71442e54a3ad2b languageName: node linkType: hard "lodash.merge@npm:^4.6.2": version: 4.6.2 resolution: "lodash.merge@npm:4.6.2" - checksum: d0ea2dd0097e6201be083865d50c3fb54fbfbdb247d9cc5950e086c991f448b7ab0cdab0d57eacccb43473d3f2acd21e134db39f22dac2d6c9ba6bf26978e3d6 + checksum: 10c0/402fa16a1edd7538de5b5903a90228aa48eb5533986ba7fa26606a49db2572bf414ff73a2c9f5d5fd36b31c46a5d5c7e1527749c07cbcf965ccff5fbdf32c506 languageName: node linkType: hard "lodash@npm:^4.17.15, lodash@npm:^4.17.21": version: 4.17.21 resolution: "lodash@npm:4.17.21" - checksum: c08619c038846ea6ac754abd6dd29d2568aa705feb69339e836dfa8d8b09abbb2f859371e86863eda41848221f9af43714491467b5b0299122431e202bb0c532 + checksum: 10c0/d8cbea072bb08655bb4c989da418994b073a608dffa608b09ac04b43a791b12aeae7cd7ad919aa4c925f33b48490b5cfe6c1f71d827956071dae2e7bb3a6b74c languageName: node linkType: hard @@ -10216,7 +10381,20 @@ __metadata: dependencies: chalk: "npm:^4.1.0" is-unicode-supported: "npm:^0.1.0" - checksum: fce1497b3135a0198803f9f07464165e9eb83ed02ceb2273930a6f8a508951178d8cf4f0378e9d28300a2ed2bc49050995d2bd5f53ab716bb15ac84d58c6ef74 + checksum: 10c0/67f445a9ffa76db1989d0fa98586e5bc2fd5247260dafb8ad93d9f0ccd5896d53fb830b0e54dade5ad838b9de2006c826831a3c528913093af20dff8bd24aca6 + languageName: node + linkType: hard + +"log-update@npm:^6.1.0": + version: 6.1.0 + resolution: "log-update@npm:6.1.0" + dependencies: + ansi-escapes: "npm:^7.0.0" + cli-cursor: "npm:^5.0.0" + slice-ansi: "npm:^7.1.0" + strip-ansi: "npm:^7.1.0" + wrap-ansi: "npm:^9.0.0" + checksum: 10c0/4b350c0a83d7753fea34dcac6cd797d1dc9603291565de009baa4aa91c0447eab0d3815a05c8ec9ac04fdfffb43c82adcdb03ec1fceafd8518e1a8c1cff4ff89 languageName: node linkType: hard @@ -10230,7 +10408,7 @@ __metadata: ms: "npm:^2.1.1" safe-stable-stringify: "npm:^2.3.1" triple-beam: "npm:^1.3.0" - checksum: 92de5696a529a7ccf4359fe65a21fce2398ba20c4b4e5769cba187b8fde01d590a22d3c83f797d31b436f49770fb1b2f28646e7c881d30b8d1f4080a05ae7006 + checksum: 10c0/6e02f8617a03155b2fce451bacf777a2c01da16d32c4c745b3ec85be6c3f2602f2a4953a8bd096441cb4c42c447b52318541d6b6bc335dce903cb9ad77a1749f languageName: node linkType: hard @@ -10241,7 +10419,7 @@ __metadata: js-tokens: "npm:^3.0.0 || ^4.0.0" bin: loose-envify: cli.js - checksum: 6517e24e0cad87ec9888f500c5b5947032cdfe6ef65e1c1936a0c48a524b81e65542c9c3edc91c97d5bddc806ee2a985dbc79be89215d613b1de5db6d1cfe6f4 + checksum: 10c0/655d110220983c1a4b9c0c679a2e8016d4b67f6e9c7b5435ff5979ecdb20d0813f4dec0a08674fcbdd4846a3f07edbb50a36811fd37930b94aaa0d9daceb017e languageName: node linkType: hard @@ -10251,14 +10429,14 @@ __metadata: dependencies: fault: "npm:^1.0.0" highlight.js: "npm:~10.7.0" - checksum: 3294677be15bbc256556f097d9b675f23f14309aceeada7880473c57bdbdd7761f200d903fe26d8fa5e82259f70a39465d1d40754c4c049ad2bbd33d77e2c06f + checksum: 10c0/728bce6f6fe8b157f48d3324e597f452ce0eed2ccff1c0f41a9047380f944e971eb45bceb31f08fbb64d8f338dabb166f10049b35b92c7ec5cf0241d6adb3dea languageName: node linkType: hard "lru-cache@npm:^10.0.1, lru-cache@npm:^9.1.1 || ^10.0.0": version: 10.2.0 resolution: "lru-cache@npm:10.2.0" - checksum: 502ec42c3309c0eae1ce41afca471f831c278566d45a5273a0c51102dee31e0e250a62fa9029c3370988df33a14188a38e682c16143b794de78668de3643e302 + checksum: 10c0/c9847612aa2daaef102d30542a8d6d9b2c2bb36581c1bf0dc3ebf5e5f3352c772a749e604afae2e46873b930a9e9523743faac4e5b937c576ab29196774712ee languageName: node linkType: hard @@ -10267,7 +10445,7 @@ __metadata: resolution: "lru-cache@npm:5.1.1" dependencies: yallist: "npm:^3.0.2" - checksum: 951d2673dcc64a7fb888bf3d13bc2fdf923faca97d89cdb405ba3dfff77e2b26e5798d405e78fcd7094c9e7b8b4dab2ddc5a4f8a11928af24a207b7c738ca3f8 + checksum: 10c0/89b2ef2ef45f543011e38737b8a8622a2f8998cddf0e5437174ef8f1f70a8b9d14a918ab3e232cb3ba343b7abddffa667f0b59075b2b80e6b4d63c3de6127482 languageName: node linkType: hard @@ -10276,7 +10454,7 @@ __metadata: resolution: "lru-cache@npm:6.0.0" dependencies: yallist: "npm:^4.0.0" - checksum: fc1fe2ee205f7c8855fa0f34c1ab0bcf14b6229e35579ec1fd1079f31d6fc8ef8eb6fd17f2f4d99788d7e339f50e047555551ebd5e434dda503696e7c6591825 + checksum: 10c0/cb53e582785c48187d7a188d3379c181b5ca2a9c78d2bce3e7dee36f32761d1c42983da3fe12b55cb74e1779fa94cdc2e5367c028a9b35317184ede0c07a30a9 languageName: node linkType: hard @@ -10285,7 +10463,7 @@ __metadata: resolution: "lz-string@npm:1.5.0" bin: lz-string: bin/bin.js - checksum: e86f0280e99a8d8cd4eef24d8601ddae15ce54e43ac9990dfcb79e1e081c255ad24424a30d78d2ad8e51a8ce82a66a930047fed4b4aa38c6f0b392ff9300edfc + checksum: 10c0/36128e4de34791838abe979b19927c26e67201ca5acf00880377af7d765b38d1c60847e01c5ec61b1a260c48029084ab3893a3925fd6e48a04011364b089991b languageName: node linkType: hard @@ -10294,7 +10472,7 @@ __metadata: resolution: "magic-string@npm:0.27.0" dependencies: "@jridgewell/sourcemap-codec": "npm:^1.4.13" - checksum: 10a18a48d22fb14467d6cb4204aba58d6790ae7ba023835dc7a65e310cf216f042a17fab1155ba43e47117310a9b7c3fd3bb79f40be40f5124d6b1af9e96399b + checksum: 10c0/cddacfea14441ca57ae8a307bc3cf90bac69efaa4138dd9a80804cffc2759bf06f32da3a293fb13eaa96334b7d45b7768a34f1d226afae25d2f05b05a3bb37d8 languageName: node linkType: hard @@ -10303,7 +10481,7 @@ __metadata: resolution: "magic-string@npm:0.30.7" dependencies: "@jridgewell/sourcemap-codec": "npm:^1.4.15" - checksum: 883eaaf6792a3263e44f4bcdcd35ace272268e4b98ed5a770ad711947958d2f9fc683e474945e306e2bdc152b7e44d369ee312690d87025b9879fc63fbe1409c + checksum: 10c0/d1d949f7a53c37c6e685f4ea7b2b151c2fe0cc5af8f1f979ecba916f7d60d58f35309aaf4c8b09ce1aef7c160b957be39a38b52b478a91650750931e4ddd5daf languageName: node linkType: hard @@ -10313,7 +10491,7 @@ __metadata: dependencies: pify: "npm:^4.0.1" semver: "npm:^5.6.0" - checksum: 043548886bfaf1820323c6a2997e6d2fa51ccc2586ac14e6f14634f7458b4db2daf15f8c310e2a0abd3e0cddc64df1890d8fc7263033602c47bb12cbfcf86aab + checksum: 10c0/ada869944d866229819735bee5548944caef560d7a8536ecbc6536edca28c72add47cc4f6fc39c54fb25d06b58da1f8994cf7d9df7dadea047064749efc085d8 languageName: node linkType: hard @@ -10322,7 +10500,7 @@ __metadata: resolution: "make-dir@npm:3.1.0" dependencies: semver: "npm:^6.0.0" - checksum: 484200020ab5a1fdf12f393fe5f385fc8e4378824c940fba1729dcd198ae4ff24867bc7a5646331e50cead8abff5d9270c456314386e629acec6dff4b8016b78 + checksum: 10c0/56aaafefc49c2dfef02c5c95f9b196c4eb6988040cf2c712185c7fe5c99b4091591a7fc4d4eafaaefa70ff763a26f6ab8c3ff60b9e75ea19876f49b18667ecaa languageName: node linkType: hard @@ -10331,7 +10509,7 @@ __metadata: resolution: "make-dir@npm:4.0.0" dependencies: semver: "npm:^7.5.3" - checksum: bf0731a2dd3aab4db6f3de1585cea0b746bb73eb5a02e3d8d72757e376e64e6ada190b1eddcde5b2f24a81b688a9897efd5018737d05e02e2a671dda9cff8a8a + checksum: 10c0/69b98a6c0b8e5c4fe9acb61608a9fbcfca1756d910f51e5dbe7a9e5cfb74fca9b8a0c8a0ffdf1294a740826c1ab4871d5bf3f62f72a3049e5eac6541ddffed68 languageName: node linkType: hard @@ -10350,7 +10528,7 @@ __metadata: negotiator: "npm:^0.6.3" promise-retry: "npm:^2.0.1" ssri: "npm:^10.0.0" - checksum: ded5a91a02b76381b06a4ec4d5c1d23ebbde15d402b3c3e4533b371dac7e2f7ca071ae71ae6dae72aa261182557b7b1b3fd3a705b39252dc17f74fa509d3e76f + checksum: 10c0/43b9f6dcbc6fe8b8604cb6396957c3698857a15ba4dbc38284f7f0e61f248300585ef1eb8cc62df54e9c724af977e45b5cdfd88320ef7f53e45070ed3488da55 languageName: node linkType: hard @@ -10359,14 +10537,14 @@ __metadata: resolution: "makeerror@npm:1.0.12" dependencies: tmpl: "npm:1.0.5" - checksum: 4c66ddfc654537333da952c084f507fa4c30c707b1635344eb35be894d797ba44c901a9cebe914aa29a7f61357543ba09b09dddbd7f65b4aee756b450f169f40 + checksum: 10c0/b0e6e599780ce6bab49cc413eba822f7d1f0dfebd1c103eaa3785c59e43e22c59018323cf9e1708f0ef5329e94a745d163fcbb6bff8e4c6742f9be9e86f3500c languageName: node linkType: hard "map-or-similar@npm:^1.5.0": version: 1.5.0 resolution: "map-or-similar@npm:1.5.0" - checksum: 3cf43bcd0e7af41d7bade5f8b5be6bb9d021cc47e6008ad545d071cf3a709ba782884002f9eec6ccd51f572fc17841e07bf74628e0bc3694c33f4622b03e4b4c + checksum: 10c0/33c6ccfdc272992e33e4e99a69541a3e7faed9de3ac5bc732feb2500a9ee71d3f9d098980a70b7746e7eeb7f859ff7dfb8aa9b5ecc4e34170a32ab78cfb18def languageName: node linkType: hard @@ -10375,7 +10553,7 @@ __metadata: resolution: "markdown-to-jsx@npm:7.4.1" peerDependencies: react: ">= 0.14.0" - checksum: 7f68a6f3ae0855c13d2d54881c1c1e2c1776c4f4149e84e41ce35a76a007b4deb9784fd19018eebf1bba31d7dfd6a92c30ad6815d481dcb38b74da7a20d4cb44 + checksum: 10c0/f40d9ab632a659ef7fd3afcae9c40e11ce77d57ae856275eb9439bf6762738eefb9897cfb65fc0495347cf8fc3b1cfef6cb9dcfe96959ded3ebd122375155323 languageName: node linkType: hard @@ -10384,28 +10562,28 @@ __metadata: resolution: "mdast-util-definitions@npm:4.0.0" dependencies: unist-util-visit: "npm:^2.0.0" - checksum: c76da4b4f1e28f8e7c85bf664ab65060f5aa7e0fd0392a24482980984d4ba878b7635a08bcaccca060d6602f478ac6cadaffbbe65f910f75ce332fd67d0ade69 + checksum: 10c0/d81bb0b702f99878c8e8e4f66dd7f6f673ab341f061b3d9487ba47dad28b584e02f16b4c42df23714eaac8a7dd8544ba7d77308fad8d4a9fd0ac92e2a7f56be9 languageName: node linkType: hard "mdast-util-to-string@npm:^1.0.0": version: 1.1.0 resolution: "mdast-util-to-string@npm:1.1.0" - checksum: eec1eb283f3341376c8398b67ce512a11ab3e3191e3dbd5644d32a26784eac8d5f6d0b0fb81193af00d75a2c545cde765c8b03e966bd890076efb5d357fb4fe2 + checksum: 10c0/5dad9746ec0839792a8a35f504564e8d2b8c30013652410306c111963d33f1ee7b5477aa64ed77b64e13216363a29395809875ffd80e2031a08614657628a121 languageName: node linkType: hard "mdn-data@npm:2.0.14": version: 2.0.14 resolution: "mdn-data@npm:2.0.14" - checksum: 64c629fcf14807e30d6dc79f97cbcafa16db066f53a294299f3932b3beb0eb0d1386d3a7fe408fc67348c449a4e0999360c894ba4c81eb209d7be4e36503de0e + checksum: 10c0/67241f8708c1e665a061d2b042d2d243366e93e5bf1f917693007f6d55111588b952dcbfd3ea9c2d0969fb754aad81b30fdcfdcc24546495fc3b24336b28d4bd languageName: node linkType: hard "media-typer@npm:0.3.0": version: 0.3.0 resolution: "media-typer@npm:0.3.0" - checksum: 38e0984db39139604756903a01397e29e17dcb04207bb3e081412ce725ab17338ecc47220c1b186b6bbe79a658aad1b0d41142884f5a481f36290cdefbe6aa46 + checksum: 10c0/d160f31246907e79fed398470285f21bafb45a62869dc469b1c8877f3f064f5eabc4bcc122f9479b8b605bc5c76187d7871cf84c4ee3ecd3e487da1993279928 languageName: node linkType: hard @@ -10414,35 +10592,35 @@ __metadata: resolution: "memoizerific@npm:1.11.3" dependencies: map-or-similar: "npm:^1.5.0" - checksum: 72b6b80699777d000f03db6e15fdabcd4afe77feb45be51fe195cb230c64a368fcfcfbb976375eac3283bd8193d6b1a67ac3081cae07f64fca73f1aa568d59e3 + checksum: 10c0/661bf69b7afbfad57f0208f0c63324f4c96087b480708115b78ee3f0237d86c7f91347f6db31528740b2776c2e34c709bcb034e1e910edee2270c9603a0a469e languageName: node linkType: hard "merge-descriptors@npm:1.0.1": version: 1.0.1 resolution: "merge-descriptors@npm:1.0.1" - checksum: 5abc259d2ae25bb06d19ce2b94a21632583c74e2a9109ee1ba7fd147aa7362b380d971e0251069f8b3eb7d48c21ac839e21fa177b335e82c76ec172e30c31a26 + checksum: 10c0/b67d07bd44cfc45cebdec349bb6e1f7b077ee2fd5beb15d1f7af073849208cb6f144fe403e29a36571baf3f4e86469ac39acf13c318381e958e186b2766f54ec languageName: node linkType: hard "merge-stream@npm:^2.0.0": version: 2.0.0 resolution: "merge-stream@npm:2.0.0" - checksum: 6fa4dcc8d86629705cea944a4b88ef4cb0e07656ebf223fa287443256414283dd25d91c1cd84c77987f2aec5927af1a9db6085757cb43d90eb170ebf4b47f4f4 + checksum: 10c0/867fdbb30a6d58b011449b8885601ec1690c3e41c759ecd5a9d609094f7aed0096c37823ff4a7190ef0b8f22cc86beb7049196ff68c016e3b3c671d0dac91ce5 languageName: node linkType: hard "merge2@npm:^1.3.0, merge2@npm:^1.4.1": version: 1.4.1 resolution: "merge2@npm:1.4.1" - checksum: 7268db63ed5169466540b6fb947aec313200bcf6d40c5ab722c22e242f651994619bcd85601602972d3c85bd2cc45a358a4c61937e9f11a061919a1da569b0c2 + checksum: 10c0/254a8a4605b58f450308fc474c82ac9a094848081bf4c06778200207820e5193726dc563a0d2c16468810516a5c97d9d3ea0ca6585d23c58ccfff2403e8dbbeb languageName: node linkType: hard "methods@npm:~1.1.2": version: 1.1.2 resolution: "methods@npm:1.1.2" - checksum: a385dd974faa34b5dd021b2bbf78c722881bf6f003bfe6d391d7da3ea1ed625d1ff10ddd13c57531f628b3e785be38d3eed10ad03cebd90b76932413df9a1820 + checksum: 10c0/bdf7cc72ff0a33e3eede03708c08983c4d7a173f91348b4b1e4f47d4cdbf734433ad971e7d1e8c77247d9e5cd8adb81ea4c67b0a2db526b758b2233d7814b8b2 languageName: node linkType: hard @@ -10452,14 +10630,24 @@ __metadata: dependencies: braces: "npm:^3.0.2" picomatch: "npm:^2.3.1" - checksum: a749888789fc15cac0e03273844dbd749f9f8e8d64e70c564bcf06a033129554c789bb9e30d7566d7ff6596611a08e58ac12cf2a05f6e3c9c47c50c4c7e12fa2 + checksum: 10c0/3d6505b20f9fa804af5d8c596cb1c5e475b9b0cd05f652c5b56141cf941bd72adaeb7a436fda344235cef93a7f29b7472efc779fcdb83b478eab0867b95cdeff + languageName: node + linkType: hard + +"micromatch@npm:~4.0.8": + version: 4.0.8 + resolution: "micromatch@npm:4.0.8" + dependencies: + braces: "npm:^3.0.3" + picomatch: "npm:^2.3.1" + checksum: 10c0/166fa6eb926b9553f32ef81f5f531d27b4ce7da60e5baf8c021d043b27a388fb95e46a8038d5045877881e673f8134122b59624d5cecbd16eb50a42e7a6b5ca8 languageName: node linkType: hard "mime-db@npm:1.52.0, mime-db@npm:>= 1.43.0 < 2": version: 1.52.0 resolution: "mime-db@npm:1.52.0" - checksum: 54bb60bf39e6f8689f6622784e668a3d7f8bed6b0d886f5c3c446cb3284be28b30bf707ed05d0fe44a036f8469976b2629bbea182684977b084de9da274694d7 + checksum: 10c0/0557a01deebf45ac5f5777fe7740b2a5c309c6d62d40ceab4e23da9f821899ce7a900b7ac8157d4548ddbb7beffe9abc621250e6d182b0397ec7f10c7b91a5aa languageName: node linkType: hard @@ -10468,7 +10656,7 @@ __metadata: resolution: "mime-types@npm:2.1.35" dependencies: mime-db: "npm:1.52.0" - checksum: 89aa9651b67644035de2784a6e665fc685d79aba61857e02b9c8758da874a754aed4a9aced9265f5ed1171fd934331e5516b84a7f0218031b6fa0270eca1e51a + checksum: 10c0/82fb07ec56d8ff1fc999a84f2f217aa46cb6ed1033fefaabd5785b9a974ed225c90dc72fff460259e66b95b73648596dbcc50d51ed69cdf464af2d237d3149b2 languageName: node linkType: hard @@ -10477,7 +10665,7 @@ __metadata: resolution: "mime@npm:1.6.0" bin: mime: cli.js - checksum: b7d98bb1e006c0e63e2c91b590fe1163b872abf8f7ef224d53dd31499c2197278a6d3d0864c45239b1a93d22feaf6f9477e9fc847eef945838150b8c02d03170 + checksum: 10c0/b92cd0adc44888c7135a185bfd0dddc42c32606401c72896a842ae15da71eb88858f17669af41e498b463cd7eb998f7b48939a25b08374c7924a9c8a6f8a81b0 languageName: node linkType: hard @@ -10486,28 +10674,35 @@ __metadata: resolution: "mime@npm:2.6.0" bin: mime: cli.js - checksum: 7da117808b5cd0203bb1b5e33445c330fe213f4d8ee2402a84d62adbde9716ca4fb90dd6d9ab4e77a4128c6c5c24a9c4c9f6a4d720b095b1b342132d02dba58d + checksum: 10c0/a7f2589900d9c16e3bdf7672d16a6274df903da958c1643c9c45771f0478f3846dcb1097f31eb9178452570271361e2149310931ec705c037210fc69639c8e6c languageName: node linkType: hard "mimic-fn@npm:^2.1.0": version: 2.1.0 resolution: "mimic-fn@npm:2.1.0" - checksum: d2421a3444848ce7f84bd49115ddacff29c15745db73f54041edc906c14b131a38d05298dae3081667627a59b2eb1ca4b436ff2e1b80f69679522410418b478a + checksum: 10c0/b26f5479d7ec6cc2bce275a08f146cf78f5e7b661b18114e2506dd91ec7ec47e7a25bf4360e5438094db0560bcc868079fb3b1fb3892b833c1ecbf63f80c95a4 languageName: node linkType: hard "mimic-fn@npm:^4.0.0": version: 4.0.0 resolution: "mimic-fn@npm:4.0.0" - checksum: 995dcece15ee29aa16e188de6633d43a3db4611bcf93620e7e62109ec41c79c0f34277165b8ce5e361205049766e371851264c21ac64ca35499acb5421c2ba56 + checksum: 10c0/de9cc32be9996fd941e512248338e43407f63f6d497abe8441fa33447d922e927de54d4cc3c1a3c6d652857acd770389d5a3823f311a744132760ce2be15ccbf + languageName: node + linkType: hard + +"mimic-function@npm:^5.0.0": + version: 5.0.1 + resolution: "mimic-function@npm:5.0.1" + checksum: 10c0/f3d9464dd1816ecf6bdf2aec6ba32c0728022039d992f178237d8e289b48764fee4131319e72eedd4f7f094e22ded0af836c3187a7edc4595d28dd74368fd81d languageName: node linkType: hard "min-indent@npm:^1.0.0, min-indent@npm:^1.0.1": version: 1.0.1 resolution: "min-indent@npm:1.0.1" - checksum: bfc6dd03c5eaf623a4963ebd94d087f6f4bbbfd8c41329a7f09706b0cb66969c4ddd336abeb587bc44bc6f08e13bf90f0b374f9d71f9f01e04adc2cd6f083ef1 + checksum: 10c0/7e207bd5c20401b292de291f02913230cb1163abca162044f7db1d951fa245b174dc00869d40dd9a9f32a885ad6a5f3e767ee104cf278f399cb4e92d3f582d5c languageName: node linkType: hard @@ -10516,7 +10711,7 @@ __metadata: resolution: "minimatch@npm:9.0.3" dependencies: brace-expansion: "npm:^2.0.1" - checksum: c81b47d28153e77521877649f4bab48348d10938df9e8147a58111fe00ef89559a2938de9f6632910c4f7bf7bb5cd81191a546167e58d357f0cfb1e18cecc1c5 + checksum: 10c0/85f407dcd38ac3e180f425e86553911d101455ca3ad5544d6a7cec16286657e4f8a9aa6695803025c55e31e35a91a2252b5dc8e7d527211278b8b65b4dbd5eac languageName: node linkType: hard @@ -10525,7 +10720,7 @@ __metadata: resolution: "minimatch@npm:3.1.2" dependencies: brace-expansion: "npm:^1.1.7" - checksum: e0b25b04cd4ec6732830344e5739b13f8690f8a012d73445a4a19fbc623f5dd481ef7a5827fde25954cd6026fede7574cc54dc4643c99d6c6b653d6203f94634 + checksum: 10c0/0262810a8fc2e72cca45d6fd86bd349eee435eb95ac6aa45c9ea2180e7ee875ef44c32b55b5973ceabe95ea12682f6e3725cbb63d7a2d1da3ae1163c8b210311 languageName: node linkType: hard @@ -10534,14 +10729,14 @@ __metadata: resolution: "minimatch@npm:5.1.6" dependencies: brace-expansion: "npm:^2.0.1" - checksum: 126b36485b821daf96d33b5c821dac600cc1ab36c87e7a532594f9b1652b1fa89a1eebcaad4dff17c764dce1a7ac1531327f190fed5f97d8f6e5f889c116c429 + checksum: 10c0/3defdfd230914f22a8da203747c42ee3c405c39d4d37ffda284dac5e45b7e1f6c49aa8be606509002898e73091ff2a3bbfc59c2c6c71d4660609f63aa92f98e3 languageName: node linkType: hard "minimist@npm:^1.2.0, minimist@npm:^1.2.5, minimist@npm:^1.2.6": version: 1.2.8 resolution: "minimist@npm:1.2.8" - checksum: 908491b6cc15a6c440ba5b22780a0ba89b9810e1aea684e253e43c4e3b8d56ec1dcdd7ea96dde119c29df59c936cde16062159eae4225c691e19c70b432b6e6f + checksum: 10c0/19d3fcdca050087b84c2029841a093691a91259a47def2f18222f41e7645a0b7c44ef4b40e88a1e58a40c84d2ef0ee6047c55594d298146d0eb3f6b737c20ce6 languageName: node linkType: hard @@ -10550,7 +10745,7 @@ __metadata: resolution: "minipass-collect@npm:2.0.1" dependencies: minipass: "npm:^7.0.3" - checksum: b251bceea62090f67a6cced7a446a36f4cd61ee2d5cea9aee7fff79ba8030e416327a1c5aa2908dc22629d06214b46d88fdab8c51ac76bacbf5703851b5ad342 + checksum: 10c0/5167e73f62bb74cc5019594709c77e6a742051a647fe9499abf03c71dca75515b7959d67a764bdc4f8b361cf897fbf25e2d9869ee039203ed45240f48b9aa06e languageName: node linkType: hard @@ -10565,7 +10760,7 @@ __metadata: dependenciesMeta: encoding: optional: true - checksum: 3edf72b900e30598567eafe96c30374432a8709e61bb06b87198fa3192d466777e2ec21c52985a0999044fa6567bd6f04651585983a1cbb27e2c1770a07ed2a2 + checksum: 10c0/1b63c1f3313e88eeac4689f1b71c9f086598db9a189400e3ee960c32ed89e06737fa23976c9305c2d57464fb3fcdc12749d3378805c9d6176f5569b0d0ee8a75 languageName: node linkType: hard @@ -10574,7 +10769,7 @@ __metadata: resolution: "minipass-flush@npm:1.0.5" dependencies: minipass: "npm:^3.0.0" - checksum: 56269a0b22bad756a08a94b1ffc36b7c9c5de0735a4dd1ab2b06c066d795cfd1f0ac44a0fcae13eece5589b908ecddc867f04c745c7009be0b566421ea0944cf + checksum: 10c0/2a51b63feb799d2bb34669205eee7c0eaf9dce01883261a5b77410c9408aa447e478efd191b4de6fc1101e796ff5892f8443ef20d9544385819093dbb32d36bd languageName: node linkType: hard @@ -10583,7 +10778,7 @@ __metadata: resolution: "minipass-pipeline@npm:1.2.4" dependencies: minipass: "npm:^3.0.0" - checksum: b14240dac0d29823c3d5911c286069e36d0b81173d7bdf07a7e4a91ecdef92cdff4baaf31ea3746f1c61e0957f652e641223970870e2353593f382112257971b + checksum: 10c0/cbda57cea20b140b797505dc2cac71581a70b3247b84480c1fed5ca5ba46c25ecc25f68bfc9e6dcb1a6e9017dab5c7ada5eab73ad4f0a49d84e35093e0c643f2 languageName: node linkType: hard @@ -10592,7 +10787,7 @@ __metadata: resolution: "minipass-sized@npm:1.0.3" dependencies: minipass: "npm:^3.0.0" - checksum: 40982d8d836a52b0f37049a0a7e5d0f089637298e6d9b45df9c115d4f0520682a78258905e5c8b180fb41b593b0a82cc1361d2c74b45f7ada66334f84d1ecfdd + checksum: 10c0/298f124753efdc745cfe0f2bdfdd81ba25b9f4e753ca4a2066eb17c821f25d48acea607dfc997633ee5bf7b6dfffb4eee4f2051eb168663f0b99fad2fa4829cb languageName: node linkType: hard @@ -10601,21 +10796,21 @@ __metadata: resolution: "minipass@npm:3.3.6" dependencies: yallist: "npm:^4.0.0" - checksum: a5c6ef069f70d9a524d3428af39f2b117ff8cd84172e19b754e7264a33df460873e6eb3d6e55758531580970de50ae950c496256bb4ad3691a2974cddff189f0 + checksum: 10c0/a114746943afa1dbbca8249e706d1d38b85ed1298b530f5808ce51f8e9e941962e2a5ad2e00eae7dd21d8a4aae6586a66d4216d1a259385e9d0358f0c1eba16c languageName: node linkType: hard "minipass@npm:^5.0.0": version: 5.0.0 resolution: "minipass@npm:5.0.0" - checksum: 61682162d29f45d3152b78b08bab7fb32ca10899bc5991ffe98afc18c9e9543bd1e3be94f8b8373ba6262497db63607079dc242ea62e43e7b2270837b7347c93 + checksum: 10c0/a91d8043f691796a8ac88df039da19933ef0f633e3d7f0d35dcd5373af49131cf2399bfc355f41515dc495e3990369c3858cd319e5c2722b4753c90bf3152462 languageName: node linkType: hard "minipass@npm:^5.0.0 || ^6.0.2 || ^7.0.0, minipass@npm:^7.0.2, minipass@npm:^7.0.3": version: 7.0.4 resolution: "minipass@npm:7.0.4" - checksum: e864bd02ceb5e0707696d58f7ce3a0b89233f0d686ef0d447a66db705c0846a8dc6f34865cd85256c1472ff623665f616b90b8ff58058b2ad996c5de747d2d18 + checksum: 10c0/6c7370a6dfd257bf18222da581ba89a5eaedca10e158781232a8b5542a90547540b4b9b7e7f490e4cda43acfbd12e086f0453728ecf8c19e0ef6921bc5958ac5 languageName: node linkType: hard @@ -10625,14 +10820,14 @@ __metadata: dependencies: minipass: "npm:^3.0.0" yallist: "npm:^4.0.0" - checksum: ae0f45436fb51344dcb87938446a32fbebb540d0e191d63b35e1c773d47512e17307bf54aa88326cc6d176594d00e4423563a091f7266c2f9a6872cdc1e234d1 + checksum: 10c0/64fae024e1a7d0346a1102bb670085b17b7f95bf6cfdf5b128772ec8faf9ea211464ea4add406a3a6384a7d87a0cd1a96263692134323477b4fb43659a6cab78 languageName: node linkType: hard "mkdirp-classic@npm:^0.5.2": version: 0.5.3 resolution: "mkdirp-classic@npm:0.5.3" - checksum: 3f4e088208270bbcc148d53b73e9a5bd9eef05ad2cbf3b3d0ff8795278d50dd1d11a8ef1875ff5aea3fa888931f95bfcb2ad5b7c1061cfefd6284d199e6776ac + checksum: 10c0/95371d831d196960ddc3833cc6907e6b8f67ac5501a6582f47dfae5eb0f092e9f8ce88e0d83afcae95d6e2b61a01741ba03714eeafb6f7a6e9dcc158ac85b168 languageName: node linkType: hard @@ -10643,7 +10838,7 @@ __metadata: minimist: "npm:^1.2.6" bin: mkdirp: bin/cmd.js - checksum: 0c91b721bb12c3f9af4b77ebf73604baf350e64d80df91754dc509491ae93bf238581e59c7188360cec7cb62fc4100959245a42cfe01834efedc5e9d068376c2 + checksum: 10c0/e2e2be789218807b58abced04e7b49851d9e46e88a2f9539242cc8a92c9b5c3a0b9bab360bd3014e02a140fc4fbc58e31176c408b493f8a2a6f4986bd7527b01 languageName: node linkType: hard @@ -10652,28 +10847,28 @@ __metadata: resolution: "mkdirp@npm:1.0.4" bin: mkdirp: bin/cmd.js - checksum: d71b8dcd4b5af2fe13ecf3bd24070263489404fe216488c5ba7e38ece1f54daf219e72a833a3a2dc404331e870e9f44963a33399589490956bff003a3404d3b2 + checksum: 10c0/46ea0f3ffa8bc6a5bc0c7081ffc3907777f0ed6516888d40a518c5111f8366d97d2678911ad1a6882bf592fa9de6c784fea32e1687bb94e1f4944170af48a5cf languageName: node linkType: hard "ms@npm:2.0.0": version: 2.0.0 resolution: "ms@npm:2.0.0" - checksum: 0e6a22b8b746d2e0b65a430519934fefd41b6db0682e3477c10f60c76e947c4c0ad06f63ffdf1d78d335f83edee8c0aa928aa66a36c7cd95b69b26f468d527f4 + checksum: 10c0/f8fda810b39fd7255bbdc451c46286e549794fcc700dc9cd1d25658bbc4dc2563a5de6fe7c60f798a16a60c6ceb53f033cb353f493f0cf63e5199b702943159d languageName: node linkType: hard "ms@npm:2.1.2": version: 2.1.2 resolution: "ms@npm:2.1.2" - checksum: 673cdb2c3133eb050c745908d8ce632ed2c02d85640e2edb3ace856a2266a813b30c613569bf3354fdf4ea7d1a1494add3bfa95e2713baa27d0c2c71fc44f58f + checksum: 10c0/a437714e2f90dbf881b5191d35a6db792efbca5badf112f87b9e1c712aace4b4b9b742dd6537f3edf90fd6f684de897cec230abde57e87883766712ddda297cc languageName: node linkType: hard -"ms@npm:2.1.3, ms@npm:^2.1.1": +"ms@npm:2.1.3, ms@npm:^2.1.1, ms@npm:^2.1.3": version: 2.1.3 resolution: "ms@npm:2.1.3" - checksum: aa92de608021b242401676e35cfa5aa42dd70cbdc082b916da7fb925c542173e36bce97ea3e804923fe92c0ad991434e4a38327e15a1b5b5f945d66df615ae6d + checksum: 10c0/d924b57e7312b3b63ad21fc5b3dc0af5e78d61a1fc7cfb5457edaf26326bf62be5307cc87ffb6862ef1c2b33b0233cdb5d4f01c4c958cc0d660948b65a287a48 languageName: node linkType: hard @@ -10682,7 +10877,7 @@ __metadata: resolution: "mustache@npm:4.2.0" bin: mustache: bin/mustache - checksum: 6e668bd5803255ab0779c3983b9412b5c4f4f90e822230e0e8f414f5449ed7a137eed29430e835aa689886f663385cfe05f808eb34b16e1f3a95525889b05cd3 + checksum: 10c0/1f8197e8a19e63645a786581d58c41df7853da26702dbc005193e2437c98ca49b255345c173d50c08fe4b4dbb363e53cb655ecc570791f8deb09887248dd34a2 languageName: node linkType: hard @@ -10691,28 +10886,28 @@ __metadata: resolution: "nanoid@npm:3.3.7" bin: nanoid: bin/nanoid.cjs - checksum: ac1eb60f615b272bccb0e2b9cd933720dad30bf9708424f691b8113826bb91aca7e9d14ef5d9415a6ba15c266b37817256f58d8ce980c82b0ba3185352565679 + checksum: 10c0/e3fb661aa083454f40500473bb69eedb85dc160e763150b9a2c567c7e9ff560ce028a9f833123b618a6ea742e311138b591910e795614a629029e86e180660f3 languageName: node linkType: hard "natural-compare@npm:^1.4.0": version: 1.4.0 resolution: "natural-compare@npm:1.4.0" - checksum: 23ad088b08f898fc9b53011d7bb78ec48e79de7627e01ab5518e806033861bef68d5b0cd0e2205c2f36690ac9571ff6bcb05eb777ced2eeda8d4ac5b44592c3d + checksum: 10c0/f5f9a7974bfb28a91afafa254b197f0f22c684d4a1731763dda960d2c8e375b36c7d690e0d9dc8fba774c537af14a7e979129bca23d88d052fbeb9466955e447 languageName: node linkType: hard "negotiator@npm:0.6.3, negotiator@npm:^0.6.3": version: 0.6.3 resolution: "negotiator@npm:0.6.3" - checksum: 2723fb822a17ad55c93a588a4bc44d53b22855bf4be5499916ca0cab1e7165409d0b288ba2577d7b029f10ce18cf2ed8e703e5af31c984e1e2304277ef979837 + checksum: 10c0/3ec9fd413e7bf071c937ae60d572bc67155262068ed522cf4b3be5edbe6ddf67d095ec03a3a14ebf8fc8e95f8e1d61be4869db0dbb0de696f6b837358bd43fc2 languageName: node linkType: hard "neo-async@npm:^2.5.0, neo-async@npm:^2.6.2": version: 2.6.2 resolution: "neo-async@npm:2.6.2" - checksum: 1a7948fea86f2b33ec766bc899c88796a51ba76a4afc9026764aedc6e7cde692a09067031e4a1bf6db4f978ccd99e7f5b6c03fe47ad9865c3d4f99050d67e002 + checksum: 10c0/c2f5a604a54a8ec5438a342e1f356dff4bc33ccccdb6dc668d94fe8e5eccfc9d2c2eea6064b0967a767ba63b33763f51ccf2cd2441b461a7322656c1f06b3f5d languageName: node linkType: hard @@ -10721,14 +10916,14 @@ __metadata: resolution: "node-dir@npm:0.1.17" dependencies: minimatch: "npm:^3.0.2" - checksum: 281fdea12d9c080a7250e5b5afefa3ab39426d40753ec8126a2d1e67f189b8824723abfed74f5d8549c5d78352d8c489fe08d0b067d7684c87c07283d38374a5 + checksum: 10c0/16222e871708c405079ff8122d4a7e1d522c5b90fc8f12b3112140af871cfc70128c376e845dcd0044c625db0d2efebd2d852414599d240564db61d53402b4c1 languageName: node linkType: hard "node-fetch-native@npm:^1.6.1": version: 1.6.2 resolution: "node-fetch-native@npm:1.6.2" - checksum: 85a3c8fb853d2abbd7e4235742ee0ff5d8ac15f982209989f7150407203dc65ad45e0c11a0f7416c3685e3cdd3d3f9ee2922e7558f201dd6a7e9c9dde3b612fd + checksum: 10c0/2c1e94ce6e5b8a8ca85d5cdb837bc098ba2a54dea07e3509250288bebca8147950e1bef10db30120b389263ec0064e0562effdd085bb49d4e2046ebd963ee98d languageName: node linkType: hard @@ -10742,7 +10937,7 @@ __metadata: peerDependenciesMeta: encoding: optional: true - checksum: b24f8a3dc937f388192e59bcf9d0857d7b6940a2496f328381641cb616efccc9866e89ec43f2ec956bbd6c3d3ee05524ce77fe7b29ccd34692b3a16f237d6676 + checksum: 10c0/b55786b6028208e6fbe594ccccc213cab67a72899c9234eb59dba51062a299ea853210fcf526998eaa2867b0963ad72338824450905679ff0fa304b8c5093ae8 languageName: node linkType: hard @@ -10762,21 +10957,21 @@ __metadata: which: "npm:^4.0.0" bin: node-gyp: bin/node-gyp.js - checksum: 578cf0c821f258ce4b6ebce4461eca4c991a4df2dee163c0624f2fe09c7d6d37240be4942285a0048d307230248ee0b18382d6623b9a0136ce9533486deddfa8 + checksum: 10c0/abddfff7d873312e4ed4a5fb75ce893a5c4fb69e7fcb1dfa71c28a6b92a7f1ef6b62790dffb39181b5a82728ba8f2f32d229cf8cbe66769fe02cea7db4a555aa languageName: node linkType: hard "node-int64@npm:^0.4.0": version: 0.4.0 resolution: "node-int64@npm:0.4.0" - checksum: b7afc2b65e56f7035b1a2eec57ae0fbdee7d742b1cdcd0f4387562b6527a011ab1cbe9f64cc8b3cca61e3297c9637c8bf61cec2e6b8d3a711d4b5267dfafbe02 + checksum: 10c0/a6a4d8369e2f2720e9c645255ffde909c0fbd41c92ea92a5607fc17055955daac99c1ff589d421eee12a0d24e99f7bfc2aabfeb1a4c14742f6c099a51863f31a languageName: node linkType: hard "node-releases@npm:^2.0.14": version: 2.0.14 resolution: "node-releases@npm:2.0.14" - checksum: 0f7607ec7db5ef1dc616899a5f24ae90c869b6a54c2d4f36ff6d84a282ab9343c7ff3ca3670fe4669171bb1e8a9b3e286e1ef1c131f09a83d70554f855d54f24 + checksum: 10c0/199fc93773ae70ec9969bc6d5ac5b2bbd6eb986ed1907d751f411fef3ede0e4bfdb45ceb43711f8078bea237b6036db8b1bf208f6ff2b70c7d615afd157f3ab9 languageName: node linkType: hard @@ -10787,7 +10982,7 @@ __metadata: abbrev: "npm:^2.0.0" bin: nopt: bin/nopt.js - checksum: 1e7489f17cbda452c8acaf596a8defb4ae477d2a9953b76eb96f4ec3f62c6b421cd5174eaa742f88279871fde9586d8a1d38fb3f53fa0c405585453be31dff4c + checksum: 10c0/9bd7198df6f16eb29ff16892c77bcf7f0cc41f9fb5c26280ac0def2cf8cf319f3b821b3af83eba0e74c85807cc430a16efe0db58fe6ae1f41e69519f585b6aff languageName: node linkType: hard @@ -10799,14 +10994,14 @@ __metadata: resolve: "npm:^1.10.0" semver: "npm:2 || 3 || 4 || 5" validate-npm-package-license: "npm:^3.0.1" - checksum: 644f830a8bb9b7cc9bf2f6150618727659ee27cdd0840d1c1f97e8e6cab0803a098a2c19f31c6247ad9d3a0792e61521a13a6e8cd87cc6bb676e3150612c03d4 + checksum: 10c0/357cb1646deb42f8eb4c7d42c4edf0eec312f3628c2ef98501963cc4bbe7277021b2b1d977f982b2edce78f5a1014613ce9cf38085c3df2d76730481357ca504 languageName: node linkType: hard "normalize-path@npm:^3.0.0, normalize-path@npm:~3.0.0": version: 3.0.0 resolution: "normalize-path@npm:3.0.0" - checksum: 88eeb4da891e10b1318c4b2476b6e2ecbeb5ff97d946815ffea7794c31a89017c70d7f34b3c2ebf23ef4e9fc9fb99f7dffe36da22011b5b5c6ffa34f4873ec20 + checksum: 10c0/e008c8142bcc335b5e38cf0d63cfd39d6cf2d97480af9abdbe9a439221fd4d749763bab492a8ee708ce7a194bb00c9da6d0a115018672310850489137b3da046 languageName: node linkType: hard @@ -10815,7 +11010,7 @@ __metadata: resolution: "npm-run-path@npm:4.0.1" dependencies: path-key: "npm:^3.0.0" - checksum: 5374c0cea4b0bbfdfae62da7bbdf1e1558d338335f4cacf2515c282ff358ff27b2ecb91ffa5330a8b14390ac66a1e146e10700440c1ab868208430f56b5f4d23 + checksum: 10c0/6f9353a95288f8455cf64cbeb707b28826a7f29690244c1e4bb61ec573256e021b6ad6651b394eb1ccfd00d6ec50147253aba2c5fe58a57ceb111fad62c519ac languageName: node linkType: hard @@ -10824,7 +11019,7 @@ __metadata: resolution: "npm-run-path@npm:5.3.0" dependencies: path-key: "npm:^4.0.0" - checksum: ae8e7a89da9594fb9c308f6555c73f618152340dcaae423e5fb3620026fefbec463618a8b761920382d666fa7a2d8d240b6fe320e8a6cdd54dc3687e2b659d25 + checksum: 10c0/124df74820c40c2eb9a8612a254ea1d557ddfab1581c3e751f825e3e366d9f00b0d76a3c94ecd8398e7f3eee193018622677e95816e8491f0797b21e30b2deba languageName: node linkType: hard @@ -10833,14 +11028,14 @@ __metadata: resolution: "nth-check@npm:2.1.1" dependencies: boolbase: "npm:^1.0.0" - checksum: 5afc3dafcd1573b08877ca8e6148c52abd565f1d06b1eb08caf982e3fa289a82f2cae697ffb55b5021e146d60443f1590a5d6b944844e944714a5b549675bcd3 + checksum: 10c0/5fee7ff309727763689cfad844d979aedd2204a817fbaaf0e1603794a7c20db28548d7b024692f953557df6ce4a0ee4ae46cd8ebd9b36cfb300b9226b567c479 languageName: node linkType: hard "nwsapi@npm:^2.2.2": version: 2.2.7 resolution: "nwsapi@npm:2.2.7" - checksum: 22c002080f0297121ad138aba5a6509e724774d6701fe2c4777627bd939064ecd9e1b6dc1c2c716bb7ca0b9f16247892ff2f664285202ac7eff6ec9543725320 + checksum: 10c0/44be198adae99208487a1c886c0a3712264f7bbafa44368ad96c003512fed2753d4e22890ca1e6edb2690c3456a169f2a3c33bfacde1905cf3bf01c7722464db languageName: node linkType: hard @@ -10854,21 +11049,21 @@ __metadata: ufo: "npm:^1.3.2" bin: nypm: dist/cli.mjs - checksum: 1cdd1f9476bbd66ffe97de4afadfdf53dc4b273a1be8b49f7cb52470c5331c67bc0dee4952498b96af752831cbd4051fe5ca376e83401b440786ab1f6ef93837 + checksum: 10c0/addc0a0f2eaf33a245972332d2fd8317e8e05051e470138e64398aac15553aff0051cc9424ddb706ec00594cab22b592025a2601d781869f6c98227a0ba32e5d languageName: node linkType: hard "object-assign@npm:^4.1.1": version: 4.1.1 resolution: "object-assign@npm:4.1.1" - checksum: fcc6e4ea8c7fe48abfbb552578b1c53e0d194086e2e6bbbf59e0a536381a292f39943c6e9628af05b5528aa5e3318bb30d6b2e53cadaf5b8fe9e12c4b69af23f + checksum: 10c0/1f4df9945120325d041ccf7b86f31e8bcc14e73d29171e37a7903050e96b81323784ec59f93f102ec635bcf6fa8034ba3ea0a8c7e69fa202b87ae3b6cec5a414 languageName: node linkType: hard "object-inspect@npm:^1.13.1": version: 1.13.1 resolution: "object-inspect@npm:1.13.1" - checksum: 92f4989ed83422d56431bc39656d4c780348eb15d397ce352ade6b7fec08f973b53744bd41b94af021901e61acaf78fcc19e65bf464ecc0df958586a672700f0 + checksum: 10c0/fad603f408e345c82e946abdf4bfd774260a5ed3e5997a0b057c44153ac32c7271ff19e3a5ae39c858da683ba045ccac2f65245c12763ce4e8594f818f4a648d languageName: node linkType: hard @@ -10878,14 +11073,14 @@ __metadata: dependencies: call-bind: "npm:^1.0.7" define-properties: "npm:^1.2.1" - checksum: 4f6f544773a595da21c69a7531e0e1d6250670f4e09c55f47eb02c516035cfcb1b46ceb744edfd3ecb362309dbccb6d7f88e43bf42e4d4595ac10a329061053a + checksum: 10c0/506af444c4dce7f8e31f34fc549e2fb8152d6b9c4a30c6e62852badd7f520b579c679af433e7a072f9d78eb7808d230dc12e1cf58da9154dfbf8813099ea0fe0 languageName: node linkType: hard "object-keys@npm:^1.1.1": version: 1.1.1 resolution: "object-keys@npm:1.1.1" - checksum: 3d81d02674115973df0b7117628ea4110d56042e5326413e4b4313f0bcdf7dd78d4a3acef2c831463fa3796a66762c49daef306f4a0ea1af44877d7086d73bde + checksum: 10c0/b11f7ccdbc6d406d1f186cdadb9d54738e347b2692a14439ca5ac70c225fa6db46db809711b78589866d47b25fc3e8dee0b4c722ac751e11180f9380e3d8601d languageName: node linkType: hard @@ -10897,7 +11092,7 @@ __metadata: define-properties: "npm:^1.2.1" has-symbols: "npm:^1.0.3" object-keys: "npm:^1.1.1" - checksum: dbb22da4cda82e1658349ea62b80815f587b47131b3dd7a4ab7f84190ab31d206bbd8fe7e26ae3220c55b65725ac4529825f6142154211220302aa6b1518045d + checksum: 10c0/60108e1fa2706f22554a4648299b0955236c62b3685c52abf4988d14fffb0e7731e00aa8c6448397e3eb63d087dcc124a9f21e1980f36d0b2667f3c18bacd469 languageName: node linkType: hard @@ -10908,7 +11103,7 @@ __metadata: call-bind: "npm:^1.0.2" define-properties: "npm:^1.2.0" es-abstract: "npm:^1.22.1" - checksum: 03f0bd0f23a8626c94429d15abf26ccda7723f08cd26be2c09c72d436765f8c7468605b5476ca58d4a7cec1ec7eca5be496dbd938fd4236b77ed6d05a8680048 + checksum: 10c0/3ad1899cc7bf14546bf28f4a9b363ae8690b90948fcfbcac4c808395435d760f26193d9cae95337ce0e3c1e5c1f4fa45f7b46b31b68d389e9e117fce38775d86 languageName: node linkType: hard @@ -10919,7 +11114,7 @@ __metadata: call-bind: "npm:^1.0.2" define-properties: "npm:^1.2.0" es-abstract: "npm:^1.22.1" - checksum: 1bfbe42a51f8d84e417d193fae78e4b8eebb134514cdd44406480f8e8a0e075071e0717635d8e3eccd50fec08c1d555fe505c38804cbac0808397187653edd59 + checksum: 10c0/071745c21f6fc9e6c914691f2532c1fb60ad967e5ddc52801d09958b5de926566299d07ae14466452a7efd29015f9145d6c09c573d93a0dc6f1683ee0ec2b93b languageName: node linkType: hard @@ -10932,7 +11127,7 @@ __metadata: define-properties: "npm:^1.2.1" es-abstract: "npm:^1.22.3" es-errors: "npm:^1.0.0" - checksum: 07c1bea1772c45f7967a63358a683ef7b0bd99cabe0563e6fee3e8acc061cc5984d2f01a46472ebf10b2cb439298c46776b2134550dce457fd7240baaaa4f592 + checksum: 10c0/b6266b1cfec7eb784b8bbe0bca5dc4b371cf9dd3e601b0897d72fa97a5934273d8fb05b3fc5222204104dbec32b50e25ba27e05ad681f71fb739cc1c7e9b81b1 languageName: node linkType: hard @@ -10942,7 +11137,7 @@ __metadata: dependencies: define-properties: "npm:^1.2.0" es-abstract: "npm:^1.22.1" - checksum: 735679729c25a4e0d3713adf5df9861d862f0453e87ada4d991b75cd4225365dec61a08435e1127f42c9cc1adfc8e952fa5dca75364ebda6539dadf4721dc9c4 + checksum: 10c0/8a41ba4fb1208a85c2275e9b5098071beacc24345b9a71ab98ef0a1c61b34dc74c6b460ff1e1884c33843d8f2553df64a10eec2b74b3ed009e3b2710c826bd2c languageName: node linkType: hard @@ -10953,14 +11148,14 @@ __metadata: call-bind: "npm:^1.0.2" define-properties: "npm:^1.2.0" es-abstract: "npm:^1.22.1" - checksum: 20ab42c0bbf984405c80e060114b18cf5d629a40a132c7eac4fb79c5d06deb97496311c19297dcf9c61f45c2539cd4c7f7c5d6230e51db360ff297bbc9910162 + checksum: 10c0/e869d6a37fb7afdd0054dea49036d6ccebb84854a8848a093bbd1bc516f53e690bba88f0bc3e83fdfa74c601469ee6989c9b13359cda9604144c6e732fad3b6b languageName: node linkType: hard "ohash@npm:^1.1.3": version: 1.1.3 resolution: "ohash@npm:1.1.3" - checksum: 80a3528285f61588600c8c4f091a67f55fbc141f4eec4b3c30182468053042eef5a9684780e963f98a71ec068f3de56d42920c6417bf8f79ab14aeb75ac0bb39 + checksum: 10c0/928f5bdbd8cd73f90cf544c0533dbda8e0a42d9b8c7454ab89e64e4d11bc85f85242830b4e107426ce13dc4dd3013286f8f5e0c84abd8942a014b907d9692540 languageName: node linkType: hard @@ -10969,14 +11164,14 @@ __metadata: resolution: "on-finished@npm:2.4.1" dependencies: ee-first: "npm:1.1.1" - checksum: 8e81472c5028125c8c39044ac4ab8ba51a7cdc19a9fbd4710f5d524a74c6d8c9ded4dd0eed83f28d3d33ac1d7a6a439ba948ccb765ac6ce87f30450a26bfe2ea + checksum: 10c0/46fb11b9063782f2d9968863d9cbba33d77aa13c17f895f56129c274318b86500b22af3a160fe9995aa41317efcd22941b6eba747f718ced08d9a73afdb087b4 languageName: node linkType: hard "on-headers@npm:~1.0.2": version: 1.0.2 resolution: "on-headers@npm:1.0.2" - checksum: 870766c16345855e2012e9422ba1ab110c7e44ad5891a67790f84610bd70a72b67fdd71baf497295f1d1bf38dd4c92248f825d48729c53c0eae5262fb69fa171 + checksum: 10c0/f649e65c197bf31505a4c0444875db0258e198292f34b884d73c2f751e91792ef96bb5cf89aa0f4fecc2e4dc662461dda606b1274b0e564f539cae5d2f5fc32f languageName: node linkType: hard @@ -10985,7 +11180,7 @@ __metadata: resolution: "once@npm:1.4.0" dependencies: wrappy: "npm:1" - checksum: cd0a88501333edd640d95f0d2700fbde6bff20b3d4d9bdc521bdd31af0656b5706570d6c6afe532045a20bb8dc0849f8332d6f2a416e0ba6d3d3b98806c7db68 + checksum: 10c0/5d48aca287dfefabd756621c5dfce5c91a549a93e9fdb7b8246bc4c4790aa2ec17b34a260530474635147aeb631a2dcc8b32c613df0675f96041cbb8244517d0 languageName: node linkType: hard @@ -10994,7 +11189,7 @@ __metadata: resolution: "one-time@npm:1.0.0" dependencies: fn.name: "npm:1.x.x" - checksum: 64d0160480eeae4e3b2a6fc0a02f452e05bb0cc8373a4ed56a4fc08c3939dcb91bc20075003ed499655bd16919feb63ca56f86eee7932c5251f7d629b55dfc90 + checksum: 10c0/6e4887b331edbb954f4e915831cbec0a7b9956c36f4feb5f6de98c448ac02ff881fd8d9b55a6b1b55030af184c6b648f340a76eb211812f4ad8c9b4b8692fdaa languageName: node linkType: hard @@ -11003,7 +11198,7 @@ __metadata: resolution: "onetime@npm:5.1.2" dependencies: mimic-fn: "npm:^2.1.0" - checksum: e9fd0695a01cf226652f0385bf16b7a24153dbbb2039f764c8ba6d2306a8506b0e4ce570de6ad99c7a6eb49520743afdb66edd95ee979c1a342554ed49a9aadd + checksum: 10c0/ffcef6fbb2692c3c40749f31ea2e22677a876daea92959b8a80b521d95cca7a668c884d8b2045d1d8ee7d56796aa405c405462af112a1477594cc63531baeb8f languageName: node linkType: hard @@ -11012,7 +11207,16 @@ __metadata: resolution: "onetime@npm:6.0.0" dependencies: mimic-fn: "npm:^4.0.0" - checksum: 0846ce78e440841335d4e9182ef69d5762e9f38aa7499b19f42ea1c4cd40f0b4446094c455c713f9adac3f4ae86f613bb5e30c99e52652764d06a89f709b3788 + checksum: 10c0/4eef7c6abfef697dd4479345a4100c382d73c149d2d56170a54a07418c50816937ad09500e1ed1e79d235989d073a9bade8557122aee24f0576ecde0f392bb6c + languageName: node + linkType: hard + +"onetime@npm:^7.0.0": + version: 7.0.0 + resolution: "onetime@npm:7.0.0" + dependencies: + mimic-function: "npm:^5.0.0" + checksum: 10c0/5cb9179d74b63f52a196a2e7037ba2b9a893245a5532d3f44360012005c9cadb60851d56716ebff18a6f47129dab7168022445df47c2aff3b276d92585ed1221 languageName: node linkType: hard @@ -11023,7 +11227,7 @@ __metadata: define-lazy-prop: "npm:^2.0.0" is-docker: "npm:^2.1.1" is-wsl: "npm:^2.2.0" - checksum: acd81a1d19879c818acb3af2d2e8e9d81d17b5367561e623248133deb7dd3aefaed527531df2677d3e6aaf0199f84df57b6b2262babff8bf46ea0029aac536c9 + checksum: 10c0/bb6b3a58401dacdb0aad14360626faf3fb7fba4b77816b373495988b724fb48941cad80c1b65d62bb31a17609b2cd91c41a181602caea597ca80dfbcc27e84c9 languageName: node linkType: hard @@ -11037,7 +11241,7 @@ __metadata: levn: "npm:^0.4.1" prelude-ls: "npm:^1.2.1" type-check: "npm:^0.4.0" - checksum: fa28d3016395974f7fc087d6bbf0ac7f58ac3489f4f202a377e9c194969f329a7b88c75f8152b33fb08794a30dcd5c079db6bb465c28151357f113d80bbf67da + checksum: 10c0/66fba794d425b5be51353035cf3167ce6cfa049059cbb93229b819167687e0f48d2bc4603fcb21b091c99acb516aae1083624675b15c4765b2e4693a085e959c languageName: node linkType: hard @@ -11054,7 +11258,7 @@ __metadata: log-symbols: "npm:^4.1.0" strip-ansi: "npm:^6.0.0" wcwidth: "npm:^1.0.1" - checksum: 8d071828f40090a8e1c6e8f350c6eb065808e9ab2b3e57fa37e0d5ae78cb46dac00117c8f12c3c8b8da2923454afbd8265e08c10b69881170c5b269f451e7fef + checksum: 10c0/10ff14aace236d0e2f044193362b22edce4784add08b779eccc8f8ef97195cae1248db8ec1ec5f5ff076f91acbe573f5f42a98c19b78dba8c54eefff983cae85 languageName: node linkType: hard @@ -11063,7 +11267,7 @@ __metadata: resolution: "p-limit@npm:2.3.0" dependencies: p-try: "npm:^2.0.0" - checksum: 84ff17f1a38126c3314e91ecfe56aecbf36430940e2873dadaa773ffe072dc23b7af8e46d4b6485d302a11673fe94c6b67ca2cfbb60c989848b02100d0594ac1 + checksum: 10c0/8da01ac53efe6a627080fafc127c873da40c18d87b3f5d5492d465bb85ec7207e153948df6b9cbaeb130be70152f874229b8242ee2be84c0794082510af97f12 languageName: node linkType: hard @@ -11072,7 +11276,7 @@ __metadata: resolution: "p-limit@npm:3.1.0" dependencies: yocto-queue: "npm:^0.1.0" - checksum: 7c3690c4dbf62ef625671e20b7bdf1cbc9534e83352a2780f165b0d3ceba21907e77ad63401708145ca4e25bfc51636588d89a8c0aeb715e6c37d1c066430360 + checksum: 10c0/9db675949dbdc9c3763c89e748d0ef8bdad0afbb24d49ceaf4c46c02c77d30db4e0652ed36d0a0a7a95154335fab810d95c86153105bb73b3a90448e2bb14e1a languageName: node linkType: hard @@ -11081,7 +11285,7 @@ __metadata: resolution: "p-locate@npm:3.0.0" dependencies: p-limit: "npm:^2.0.0" - checksum: 83991734a9854a05fe9dbb29f707ea8a0599391f52daac32b86f08e21415e857ffa60f0e120bfe7ce0cc4faf9274a50239c7895fc0d0579d08411e513b83a4ae + checksum: 10c0/7b7f06f718f19e989ce6280ed4396fb3c34dabdee0df948376483032f9d5ec22fdf7077ec942143a75827bb85b11da72016497fc10dac1106c837ed593969ee8 languageName: node linkType: hard @@ -11090,7 +11294,7 @@ __metadata: resolution: "p-locate@npm:4.1.0" dependencies: p-limit: "npm:^2.2.0" - checksum: 513bd14a455f5da4ebfcb819ef706c54adb09097703de6aeaa5d26fe5ea16df92b48d1ac45e01e3944ce1e6aa2a66f7f8894742b8c9d6e276e16cd2049a2b870 + checksum: 10c0/1b476ad69ad7f6059744f343b26d51ce091508935c1dbb80c4e0a2f397ffce0ca3a1f9f5cd3c7ce19d7929a09719d5c65fe70d8ee289c3f267cd36f2881813e9 languageName: node linkType: hard @@ -11099,7 +11303,7 @@ __metadata: resolution: "p-locate@npm:5.0.0" dependencies: p-limit: "npm:^3.0.2" - checksum: 1623088f36cf1cbca58e9b61c4e62bf0c60a07af5ae1ca99a720837356b5b6c5ba3eb1b2127e47a06865fee59dd0453cad7cc844cda9d5a62ac1a5a51b7c86d3 + checksum: 10c0/2290d627ab7903b8b70d11d384fee714b797f6040d9278932754a6860845c4d3190603a0772a663c8cb5a7b21d1b16acb3a6487ebcafa9773094edc3dfe6009a languageName: node linkType: hard @@ -11108,21 +11312,21 @@ __metadata: resolution: "p-map@npm:4.0.0" dependencies: aggregate-error: "npm:^3.0.0" - checksum: 7ba4a2b1e24c05e1fc14bbaea0fc6d85cf005ae7e9c9425d4575550f37e2e584b1af97bcde78eacd7559208f20995988d52881334db16cf77bc1bcf68e48ed7c + checksum: 10c0/592c05bd6262c466ce269ff172bb8de7c6975afca9b50c975135b974e9bdaafbfe80e61aaaf5be6d1200ba08b30ead04b88cfa7e25ff1e3b93ab28c9f62a2c75 languageName: node linkType: hard "p-try@npm:^2.0.0": version: 2.2.0 resolution: "p-try@npm:2.2.0" - checksum: f8a8e9a7693659383f06aec604ad5ead237c7a261c18048a6e1b5b85a5f8a067e469aa24f5bc009b991ea3b058a87f5065ef4176793a200d4917349881216cae + checksum: 10c0/c36c19907734c904b16994e6535b02c36c2224d433e01a2f1ab777237f4d86e6289fd5fd464850491e940379d4606ed850c03e0f9ab600b0ebddb511312e177f languageName: node linkType: hard "pako@npm:~0.2.0": version: 0.2.9 resolution: "pako@npm:0.2.9" - checksum: 627c6842e90af0b3a9ee47345bd66485a589aff9514266f4fa9318557ad819c46fedf97510f2cef9b6224c57913777966a05cb46caf6a9b31177a5401a06fe15 + checksum: 10c0/79c1806ebcf325b60ae599e4d7227c2e346d7b829dc20f5cf24cef07c934079dc3a61c5b3c8278a2f7a190c4a613e343ea11e5302dbe252efd11712df4b6b041 languageName: node linkType: hard @@ -11131,7 +11335,7 @@ __metadata: resolution: "parent-module@npm:1.0.1" dependencies: callsites: "npm:^3.0.0" - checksum: 6ba8b255145cae9470cf5551eb74be2d22281587af787a2626683a6c20fbb464978784661478dd2a3f1dad74d1e802d403e1b03c1a31fab310259eec8ac560ff + checksum: 10c0/c63d6e80000d4babd11978e0d3fee386ca7752a02b035fd2435960ffaa7219dc42146f07069fb65e6e8bf1caef89daf9af7535a39bddf354d78bf50d8294f556 languageName: node linkType: hard @@ -11145,7 +11349,7 @@ __metadata: is-alphanumerical: "npm:^1.0.0" is-decimal: "npm:^1.0.0" is-hexadecimal: "npm:^1.0.0" - checksum: feb46b516722474797d72331421f3e62856750cfb4f70ba098b36447bf0b169e819cc4fdee53e022874d5f0c81b605d86e1912b9842a70e59a54de2fee81589d + checksum: 10c0/f85a22c0ea406ff26b53fdc28641f01cc36fa49eb2e3135f02693286c89ef0bcefc2262d99b3688e20aac2a14fd10b75c518583e875c1b9fe3d1f937795e0854 languageName: node linkType: hard @@ -11161,7 +11365,7 @@ __metadata: is-alphanumerical: "npm:^2.0.0" is-decimal: "npm:^2.0.0" is-hexadecimal: "npm:^2.0.0" - checksum: 71314312d2482422fcf0b6675e020643bab424b11f64c654b7843652cae03842a7802eda1fed194ec435debb5db47a33513eb6b1176888e9e998a0368f01f5c8 + checksum: 10c0/9dfa3b0dc43a913c2558c4bd625b1abcc2d6c6b38aa5724b141ed988471977248f7ad234eed57e1bc70b694dd15b0d710a04f66c2f7c096e35abd91962b7d926 languageName: node linkType: hard @@ -11173,7 +11377,7 @@ __metadata: error-ex: "npm:^1.3.1" json-parse-even-better-errors: "npm:^2.3.0" lines-and-columns: "npm:^1.1.6" - checksum: 62085b17d64da57f40f6afc2ac1f4d95def18c4323577e1eced571db75d9ab59b297d1d10582920f84b15985cbfc6b6d450ccbf317644cfa176f3ed982ad87e2 + checksum: 10c0/77947f2253005be7a12d858aedbafa09c9ae39eb4863adf330f7b416ca4f4a08132e453e08de2db46459256fb66afaac5ee758b44fe6541b7cdaf9d252e59585 languageName: node linkType: hard @@ -11182,56 +11386,56 @@ __metadata: resolution: "parse5@npm:7.1.2" dependencies: entities: "npm:^4.4.0" - checksum: 3c86806bb0fb1e9a999ff3a4c883b1ca243d99f45a619a0898dbf021a95a0189ed955c31b07fe49d342b54e814f33f2c9d7489198e8630dacd5477d413ec5782 + checksum: 10c0/297d7af8224f4b5cb7f6617ecdae98eeaed7f8cbd78956c42785e230505d5a4f07cef352af10d3006fa5c1544b76b57784d3a22d861ae071bbc460c649482bf4 languageName: node linkType: hard "parseurl@npm:~1.3.3": version: 1.3.3 resolution: "parseurl@npm:1.3.3" - checksum: 407cee8e0a3a4c5cd472559bca8b6a45b82c124e9a4703302326e9ab60fc1081442ada4e02628efef1eb16197ddc7f8822f5a91fd7d7c86b51f530aedb17dfa2 + checksum: 10c0/90dd4760d6f6174adb9f20cf0965ae12e23879b5f5464f38e92fce8073354341e4b3b76fa3d878351efe7d01e617121955284cfd002ab087fba1a0726ec0b4f5 languageName: node linkType: hard "path-exists@npm:^3.0.0": version: 3.0.0 resolution: "path-exists@npm:3.0.0" - checksum: 96e92643aa34b4b28d0de1cd2eba52a1c5313a90c6542d03f62750d82480e20bfa62bc865d5cfc6165f5fcd5aeb0851043c40a39be5989646f223300021bae0a + checksum: 10c0/17d6a5664bc0a11d48e2b2127d28a0e58822c6740bde30403f08013da599182289c56518bec89407e3f31d3c2b6b296a4220bc3f867f0911fee6952208b04167 languageName: node linkType: hard "path-exists@npm:^4.0.0": version: 4.0.0 resolution: "path-exists@npm:4.0.0" - checksum: 505807199dfb7c50737b057dd8d351b82c033029ab94cb10a657609e00c1bc53b951cfdbccab8de04c5584d5eff31128ce6afd3db79281874a5ef2adbba55ed1 + checksum: 10c0/8c0bd3f5238188197dc78dced15207a4716c51cc4e3624c44fc97acf69558f5ebb9a2afff486fe1b4ee148e0c133e96c5e11a9aa5c48a3006e3467da070e5e1b languageName: node linkType: hard "path-is-absolute@npm:^1.0.0": version: 1.0.1 resolution: "path-is-absolute@npm:1.0.1" - checksum: 060840f92cf8effa293bcc1bea81281bd7d363731d214cbe5c227df207c34cd727430f70c6037b5159c8a870b9157cba65e775446b0ab06fd5ecc7e54615a3b8 + checksum: 10c0/127da03c82172a2a50099cddbf02510c1791fc2cc5f7713ddb613a56838db1e8168b121a920079d052e0936c23005562059756d653b7c544c53185efe53be078 languageName: node linkType: hard "path-key@npm:^3.0.0, path-key@npm:^3.1.0": version: 3.1.1 resolution: "path-key@npm:3.1.1" - checksum: 55cd7a9dd4b343412a8386a743f9c746ef196e57c823d90ca3ab917f90ab9f13dd0ded27252ba49dbdfcab2b091d998bc446f6220cd3cea65db407502a740020 + checksum: 10c0/748c43efd5a569c039d7a00a03b58eecd1d75f3999f5a28303d75f521288df4823bc057d8784eb72358b2895a05f29a070bc9f1f17d28226cc4e62494cc58c4c languageName: node linkType: hard "path-key@npm:^4.0.0": version: 4.0.0 resolution: "path-key@npm:4.0.0" - checksum: 8e6c314ae6d16b83e93032c61020129f6f4484590a777eed709c4a01b50e498822b00f76ceaf94bc64dbd90b327df56ceadce27da3d83393790f1219e07721d7 + checksum: 10c0/794efeef32863a65ac312f3c0b0a99f921f3e827ff63afa5cb09a377e202c262b671f7b3832a4e64731003fa94af0263713962d317b9887bd1e0c48a342efba3 languageName: node linkType: hard "path-parse@npm:^1.0.7": version: 1.0.7 resolution: "path-parse@npm:1.0.7" - checksum: 49abf3d81115642938a8700ec580da6e830dde670be21893c62f4e10bd7dd4c3742ddc603fe24f898cba7eb0c6bc1777f8d9ac14185d34540c6d4d80cd9cae8a + checksum: 10c0/11ce261f9d294cc7a58d6a574b7f1b935842355ec66fba3c3fd79e0f036462eaf07d0aa95bb74ff432f9afef97ce1926c720988c6a7451d8a584930ae7de86e1 languageName: node linkType: hard @@ -11241,28 +11445,28 @@ __metadata: dependencies: lru-cache: "npm:^9.1.1 || ^10.0.0" minipass: "npm:^5.0.0 || ^6.0.2 || ^7.0.0" - checksum: eebfb8304fef1d4f7e1486df987e4fd77413de4fce16508dea69fcf8eb318c09a6b15a7a2f4c22877cec1cb7ecbd3071d18ca9de79eeece0df874a00f1f0bdc8 + checksum: 10c0/e5dc78a7348d25eec61ab166317e9e9c7b46818aa2c2b9006c507a6ff48c672d011292d9662527213e558f5652ce0afcc788663a061d8b59ab495681840c0c1e languageName: node linkType: hard "path-to-regexp@npm:0.1.7": version: 0.1.7 resolution: "path-to-regexp@npm:0.1.7" - checksum: 701c99e1f08e3400bea4d701cf6f03517474bb1b608da71c78b1eb261415b645c5670dfae49808c89e12cea2dccd113b069f040a80de012da0400191c6dbd1c8 + checksum: 10c0/50a1ddb1af41a9e68bd67ca8e331a705899d16fb720a1ea3a41e310480948387daf603abb14d7b0826c58f10146d49050a1291ba6a82b78a382d1c02c0b8f905 languageName: node linkType: hard "path-type@npm:^4.0.0": version: 4.0.0 resolution: "path-type@npm:4.0.0" - checksum: 5b1e2daa247062061325b8fdbfd1fb56dde0a448fb1455453276ea18c60685bdad23a445dc148cf87bc216be1573357509b7d4060494a6fd768c7efad833ee45 + checksum: 10c0/666f6973f332f27581371efaf303fd6c272cc43c2057b37aa99e3643158c7e4b2626549555d88626e99ea9e046f82f32e41bbde5f1508547e9a11b149b52387c languageName: node linkType: hard "pathe@npm:^1.1.1, pathe@npm:^1.1.2": version: 1.1.2 resolution: "pathe@npm:1.1.2" - checksum: f201d796351bf7433d147b92c20eb154a4e0ea83512017bf4ec4e492a5d6e738fb45798be4259a61aa81270179fce11026f6ff0d3fa04173041de044defe9d80 + checksum: 10c0/64ee0a4e587fb0f208d9777a6c56e4f9050039268faaaaecd50e959ef01bf847b7872785c36483fa5cdcdbdfdb31fef2ff222684d4fc21c330ab60395c681897 languageName: node linkType: hard @@ -11273,42 +11477,51 @@ __metadata: buffer-from: "npm:^1.0.0" duplexify: "npm:^3.5.0" through2: "npm:^2.0.3" - checksum: a0e09d6d1a8a01158a3334f20d6b1cdd91747eba24eb06a1d742eefb620385593121a76d4378cc81f77cdce6a66df0575a41041b1189c510254aec91878afc99 + checksum: 10c0/3c35d1951b8640036f93b1b5628a90f849e49ca4f2e6aba393ff4978413931d9c491c83f71a92f878d5ea4c670af0bba04dfcfb79b310ead22601db7c1420e36 languageName: node linkType: hard "pend@npm:~1.2.0": version: 1.2.0 resolution: "pend@npm:1.2.0" - checksum: 6c72f5243303d9c60bd98e6446ba7d30ae29e3d56fdb6fae8767e8ba6386f33ee284c97efe3230a0d0217e2b1723b8ab490b1bbf34fcbb2180dbc8a9de47850d + checksum: 10c0/8a87e63f7a4afcfb0f9f77b39bb92374afc723418b9cb716ee4257689224171002e07768eeade4ecd0e86f1fa3d8f022994219fb45634f2dbd78c6803e452458 languageName: node linkType: hard "picocolors@npm:^1.0.0": version: 1.0.0 resolution: "picocolors@npm:1.0.0" - checksum: a2e8092dd86c8396bdba9f2b5481032848525b3dc295ce9b57896f931e63fc16f79805144321f72976383fc249584672a75cc18d6777c6b757603f372f745981 + checksum: 10c0/20a5b249e331c14479d94ec6817a182fd7a5680debae82705747b2db7ec50009a5f6648d0621c561b0572703f84dbef0858abcbd5856d3c5511426afcb1961f7 languageName: node linkType: hard "picomatch@npm:^2.0.4, picomatch@npm:^2.2.1, picomatch@npm:^2.2.2, picomatch@npm:^2.2.3, picomatch@npm:^2.3.0, picomatch@npm:^2.3.1": version: 2.3.1 resolution: "picomatch@npm:2.3.1" - checksum: 60c2595003b05e4535394d1da94850f5372c9427ca4413b71210f437f7b2ca091dbd611c45e8b37d10036fa8eade25c1b8951654f9d3973bfa66a2ff4d3b08bc + checksum: 10c0/26c02b8d06f03206fc2ab8d16f19960f2ff9e81a658f831ecb656d8f17d9edc799e8364b1f4a7873e89d9702dff96204be0fa26fe4181f6843f040f819dac4be + languageName: node + linkType: hard + +"pidtree@npm:~0.6.0": + version: 0.6.0 + resolution: "pidtree@npm:0.6.0" + bin: + pidtree: bin/pidtree.js + checksum: 10c0/0829ec4e9209e230f74ebf4265f5ccc9ebfb488334b525cb13f86ff801dca44b362c41252cd43ae4d7653a10a5c6ab3be39d2c79064d6895e0d78dc50a5ed6e9 languageName: node linkType: hard "pify@npm:^4.0.1": version: 4.0.1 resolution: "pify@npm:4.0.1" - checksum: 8b97cbf9dc6d4c1320cc238a2db0fc67547f9dc77011729ff353faf34f1936ea1a4d7f3c63b2f4980b253be77bcc72ea1e9e76ee3fd53cce2aafb6a8854d07ec + checksum: 10c0/6f9d404b0d47a965437403c9b90eca8bb2536407f03de165940e62e72c8c8b75adda5516c6b9b23675a5877cc0bcac6bdfb0ef0e39414cd2476d5495da40e7cf languageName: node linkType: hard "pirates@npm:^4.0.4, pirates@npm:^4.0.6": version: 4.0.6 resolution: "pirates@npm:4.0.6" - checksum: d02dda76f4fec1cbdf395c36c11cf26f76a644f9f9a1bfa84d3167d0d3154d5289aacc72677aa20d599bb4a6937a471de1b65c995e2aea2d8687cbcd7e43ea5f + checksum: 10c0/00d5fa51f8dded94d7429700fb91a0c1ead00ae2c7fd27089f0c5b63e6eca36197fe46384631872690a66f390c5e27198e99006ab77ae472692ab9c2ca903f36 languageName: node linkType: hard @@ -11317,7 +11530,7 @@ __metadata: resolution: "pkg-dir@npm:3.0.0" dependencies: find-up: "npm:^3.0.0" - checksum: 70c9476ffefc77552cc6b1880176b71ad70bfac4f367604b2b04efd19337309a4eec985e94823271c7c0e83946fa5aeb18cd360d15d10a5d7533e19344bfa808 + checksum: 10c0/902a3d0c1f8ac43b1795fa1ba6ffeb37dfd53c91469e969790f6ed5e29ff2bdc50b63ba6115dc056d2efb4a040aa2446d512b3804bdafdf302f734fb3ec21847 languageName: node linkType: hard @@ -11326,7 +11539,7 @@ __metadata: resolution: "pkg-dir@npm:4.2.0" dependencies: find-up: "npm:^4.0.0" - checksum: 9863e3f35132bf99ae1636d31ff1e1e3501251d480336edb1c211133c8d58906bed80f154a1d723652df1fda91e01c7442c2eeaf9dc83157c7ae89087e43c8d6 + checksum: 10c0/c56bda7769e04907a88423feb320babaed0711af8c436ce3e56763ab1021ba107c7b0cafb11cde7529f669cfc22bffcaebffb573645cbd63842ea9fb17cd7728 languageName: node linkType: hard @@ -11335,7 +11548,7 @@ __metadata: resolution: "pkg-dir@npm:5.0.0" dependencies: find-up: "npm:^5.0.0" - checksum: b167bb8dac7bbf22b1d5e30ec223e6b064b84b63010c9d49384619a36734caf95ed23ad23d4f9bd975e8e8082b60a83395f43a89bb192df53a7c25a38ecb57d9 + checksum: 10c0/793a496d685dc55bbbdbbb22d884535c3b29241e48e3e8d37e448113a71b9e42f5481a61fdc672d7322de12fbb2c584dd3a68bf89b18fffce5c48a390f911bc5 languageName: node linkType: hard @@ -11344,14 +11557,14 @@ __metadata: resolution: "polished@npm:4.3.1" dependencies: "@babel/runtime": "npm:^7.17.8" - checksum: 0902fe2eb16aecde1587a00efee7db8081b1331ac7bcfb6e61214d266388723a84858d732ad9395028e0aecd2bb8d0c39cc03d14b4c24c22329a0e40c38141eb + checksum: 10c0/45480d4c7281a134281cef092f6ecc202a868475ff66a390fee6e9261386e16f3047b4de46a2f2e1cf7fb7aa8f52d30b4ed631a1e3bcd6f303ca31161d4f07fe languageName: node linkType: hard "possible-typed-array-names@npm:^1.0.0": version: 1.0.0 resolution: "possible-typed-array-names@npm:1.0.0" - checksum: 8ed3e96dfeea1c5880c1f4c9cb707e5fb26e8be22f14f82ef92df20fd2004e635c62ba47fbe8f2bb63bfd80dac1474be2fb39798da8c2feba2815435d1f749af + checksum: 10c0/d9aa22d31f4f7680e20269db76791b41c3a32c01a373e25f8a4813b4d45f7456bfc2b6d68f752dc4aab0e0bb0721cb3d76fb678c9101cb7a16316664bc2c73fd languageName: node linkType: hard @@ -11362,14 +11575,14 @@ __metadata: nanoid: "npm:^3.3.7" picocolors: "npm:^1.0.0" source-map-js: "npm:^1.0.2" - checksum: 93a7ce50cd6188f5f486a9ca98950ad27c19dfed996c45c414fa242944497e4d084a8760d3537f078630226f2bd3c6ab84b813b488740f4432e7c7039cd73a20 + checksum: 10c0/e8dd04e48001eb5857abc9475365bf08f4e508ddf9bc0b8525449a95d190f10d025acebc5b56ac2e94b3c7146790e4ae78989bb9633cb7ee20d1cc9b7dc909b2 languageName: node linkType: hard "prelude-ls@npm:^1.2.1": version: 1.2.1 resolution: "prelude-ls@npm:1.2.1" - checksum: 0b9d2c76801ca652a7f64892dd37b7e3fab149a37d2424920099bf894acccc62abb4424af2155ab36dea8744843060a2d8ddc983518d0b1e22265a22324b72ed + checksum: 10c0/b00d617431e7886c520a6f498a2e14c75ec58f6d93ba48c3b639cf241b54232d90daa05d83a9e9b9fef6baa63cb7e1e4602c2372fea5bc169668401eb127d0cd languageName: node linkType: hard @@ -11378,7 +11591,7 @@ __metadata: resolution: "prettier-linter-helpers@npm:1.0.0" dependencies: fast-diff: "npm:^1.1.2" - checksum: 00ce8011cf6430158d27f9c92cfea0a7699405633f7f1d4a45f07e21bf78e99895911cbcdc3853db3a824201a7c745bd49bfea8abd5fb9883e765a90f74f8392 + checksum: 10c0/81e0027d731b7b3697ccd2129470ed9913ecb111e4ec175a12f0fcfab0096516373bf0af2fef132af50cafb0a905b74ff57996d615f59512bb9ac7378fcc64ab languageName: node linkType: hard @@ -11387,7 +11600,7 @@ __metadata: resolution: "prettier@npm:3.2.5" bin: prettier: bin/prettier.cjs - checksum: d509f9da0b70e8cacc561a1911c0d99ec75117faed27b95cc8534cb2349667dee6351b0ca83fa9d5703f14127faa52b798de40f5705f02d843da133fc3aa416a + checksum: 10c0/ea327f37a7d46f2324a34ad35292af2ad4c4c3c3355da07313339d7e554320f66f65f91e856add8530157a733c6c4a897dc41b577056be5c24c40f739f5ee8c6 languageName: node linkType: hard @@ -11396,7 +11609,7 @@ __metadata: resolution: "prettier@npm:2.8.8" bin: prettier: bin-prettier.js - checksum: 00cdb6ab0281f98306cd1847425c24cbaaa48a5ff03633945ab4c701901b8e96ad558eb0777364ffc312f437af9b5a07d0f45346266e8245beaf6247b9c62b24 + checksum: 10c0/463ea8f9a0946cd5b828d8cf27bd8b567345cf02f56562d5ecde198b91f47a76b7ac9eae0facd247ace70e927143af6135e8cf411986b8cb8478784a4d6d724a languageName: node linkType: hard @@ -11407,7 +11620,7 @@ __metadata: ansi-regex: "npm:^5.0.1" ansi-styles: "npm:^5.0.0" react-is: "npm:^17.0.1" - checksum: 248990cbef9e96fb36a3e1ae6b903c551ca4ddd733f8d0912b9cc5141d3d0b3f9f8dfb4d799fb1c6723382c9c2083ffbfa4ad43ff9a0e7535d32d41fd5f01da6 + checksum: 10c0/0cbda1031aa30c659e10921fa94e0dd3f903ecbbbe7184a729ad66f2b6e7f17891e8c7d7654c458fa4ccb1a411ffb695b4f17bbcd3fe075fabe181027c4040ed languageName: node linkType: hard @@ -11418,63 +11631,63 @@ __metadata: "@jest/schemas": "npm:^29.6.3" ansi-styles: "npm:^5.0.0" react-is: "npm:^18.0.0" - checksum: dea96bc83c83cd91b2bfc55757b6b2747edcaac45b568e46de29deee80742f17bc76fe8898135a70d904f4928eafd8bb693cd1da4896e8bdd3c5e82cadf1d2bb + checksum: 10c0/edc5ff89f51916f036c62ed433506b55446ff739358de77207e63e88a28ca2894caac6e73dcb68166a606e51c8087d32d400473e6a9fdd2dbe743f46c9c0276f languageName: node linkType: hard "pretty-hrtime@npm:^1.0.3": version: 1.0.3 resolution: "pretty-hrtime@npm:1.0.3" - checksum: 0a462e88a0a3fd3320288fd8307f488974326ae8e13eea8c27f590f8ee767ccb59cf35bcae1cadff241cd8b72f3e373fc76ff1be95243649899bf8c816874af9 + checksum: 10c0/67cb3fc283a72252b49ac488647e6a01b78b7aa1b8f2061834aa1650691229081518ef3ca940f77f41cc8a8f02ba9eeb74b843481596670209e493062f2e89e0 languageName: node linkType: hard "prettysize@npm:^2.0.0": version: 2.0.0 resolution: "prettysize@npm:2.0.0" - checksum: 2f9b3129c307d276d924565cf0766c3b660ea1156c63261331b1a88d5e6a4e0757254706826c36a0e9d14ffcce53feb667fe2aa558ce142aa07ce18b315e7a3a + checksum: 10c0/b5ff8d54844a133d09b582540b731d721af4b86c3d8a9322f204e9e4cb08f891d076ad29acf1ad4091a0515920dd8bf26c96435dcf6ce248131ca4a3f8a1ec89 languageName: node linkType: hard "prismjs@npm:^1.27.0": version: 1.29.0 resolution: "prismjs@npm:1.29.0" - checksum: 2080db382c2dde0cfc7693769e89b501ef1bfc8ff4f8d25c07fd4c37ca31bc443f6133d5b7c145a73309dc396e829ddb7cc18560026d862a887ae08864ef6b07 + checksum: 10c0/d906c4c4d01b446db549b4f57f72d5d7e6ccaca04ecc670fb85cea4d4b1acc1283e945a9cbc3d81819084a699b382f970e02f9d1378e14af9808d366d9ed7ec6 languageName: node linkType: hard "prismjs@npm:~1.27.0": version: 1.27.0 resolution: "prismjs@npm:1.27.0" - checksum: dc83e2e09170b53526182f5435fae056fc200b109cac39faa88eb48d992311c7f59b94990318962fa93299190a9b33a404920ed150e5b364ce48c897f2ba1e8e + checksum: 10c0/841cbf53e837a42df9155c5ce1be52c4a0a8967ac916b52a27d066181a3578186c634e52d06d0547fb62b65c486b99b95f826dd54966619f9721b884f486b498 languageName: node linkType: hard "proc-log@npm:^3.0.0": version: 3.0.0 resolution: "proc-log@npm:3.0.0" - checksum: 02b64e1b3919e63df06f836b98d3af002b5cd92655cab18b5746e37374bfb73e03b84fe305454614b34c25b485cc687a9eebdccf0242cda8fda2475dd2c97e02 + checksum: 10c0/f66430e4ff947dbb996058f6fd22de2c66612ae1a89b097744e17fb18a4e8e7a86db99eda52ccf15e53f00b63f4ec0b0911581ff2aac0355b625c8eac509b0dc languageName: node linkType: hard "process-nextick-args@npm:^2.0.0, process-nextick-args@npm:~2.0.0": version: 2.0.1 resolution: "process-nextick-args@npm:2.0.1" - checksum: 1d38588e520dab7cea67cbbe2efdd86a10cc7a074c09657635e34f035277b59fbb57d09d8638346bf7090f8e8ebc070c96fa5fd183b777fff4f5edff5e9466cf + checksum: 10c0/bec089239487833d46b59d80327a1605e1c5287eaad770a291add7f45fda1bb5e28b38e0e061add0a1d0ee0984788ce74fa394d345eed1c420cacf392c554367 languageName: node linkType: hard "process@npm:^0.11.10": version: 0.11.10 resolution: "process@npm:0.11.10" - checksum: dbaa7e8d1d5cf375c36963ff43116772a989ef2bb47c9bdee20f38fd8fc061119cf38140631cf90c781aca4d3f0f0d2c834711952b728953f04fd7d238f59f5b + checksum: 10c0/40c3ce4b7e6d4b8c3355479df77aeed46f81b279818ccdc500124e6a5ab882c0cc81ff7ea16384873a95a74c4570b01b120f287abbdd4c877931460eca6084b3 languageName: node linkType: hard "progress@npm:^2.0.1": version: 2.0.3 resolution: "progress@npm:2.0.3" - checksum: e6f0bcb71f716eee9dfac0fe8a2606e3704d6a64dd93baaf49fbadbc8499989a610fe14cf1bc6f61b6d6653c49408d94f4a94e124538084efd8e4cf525e0293d + checksum: 10c0/1697e07cb1068055dbe9fe858d242368ff5d2073639e652b75a7eb1f2a1a8d4afd404d719de23c7b48481a6aa0040686310e2dac2f53d776daa2176d3f96369c languageName: node linkType: hard @@ -11484,7 +11697,7 @@ __metadata: dependencies: err-code: "npm:^2.0.2" retry: "npm:^0.12.0" - checksum: 96e1a82453c6c96eef53a37a1d6134c9f2482f94068f98a59145d0986ca4e497bf110a410adf73857e588165eab3899f0ebcf7b3890c1b3ce802abc0d65967d4 + checksum: 10c0/9c7045a1a2928094b5b9b15336dcd2a7b1c052f674550df63cc3f36cd44028e5080448175b6f6ca32b642de81150f5e7b1a98b728f15cb069f2dd60ac2616b96 languageName: node linkType: hard @@ -11494,7 +11707,7 @@ __metadata: dependencies: kleur: "npm:^3.0.3" sisteransi: "npm:^1.0.5" - checksum: c52536521a4d21eff4f2f2aa4572446cad227464066365a7167e52ccf8d9839c099f9afec1aba0eed3d5a2514b3e79e0b3e7a1dc326b9acde6b75d27ed74b1a9 + checksum: 10c0/16f1ac2977b19fe2cf53f8411cc98db7a3c8b115c479b2ca5c82b5527cd937aa405fa04f9a5960abeb9daef53191b53b4d13e35c1f5d50e8718c76917c5f1ea4 languageName: node linkType: hard @@ -11505,7 +11718,7 @@ __metadata: loose-envify: "npm:^1.4.0" object-assign: "npm:^4.1.1" react-is: "npm:^16.13.1" - checksum: 7d959caec002bc964c86cdc461ec93108b27337dabe6192fb97d69e16a0c799a03462713868b40749bfc1caf5f57ef80ac3e4ffad3effa636ee667582a75e2c0 + checksum: 10c0/59ece7ca2fb9838031d73a48d4becb9a7cc1ed10e610517c7d8f19a1e02fa47f7c27d557d8a5702bec3cfeccddc853579832b43f449e54635803f277b1c78077 languageName: node linkType: hard @@ -11514,14 +11727,14 @@ __metadata: resolution: "property-information@npm:5.6.0" dependencies: xtend: "npm:^4.0.0" - checksum: e4f45b100fec5968126b08102f9567f1b5fc3442aecbb5b4cdeca401f1f447672e7638a08c81c05dd3979c62d084e0cc6acbe2d8b053c05280ac5abaaf666a68 + checksum: 10c0/d54b77c31dc13bb6819559080b2c67d37d94be7dc271f404f139a16a57aa96fcc0b3ad806d4a5baef9e031744853e4afe3df2e37275aacb1f78079bbb652c5af languageName: node linkType: hard "property-information@npm:^6.0.0": version: 6.4.1 resolution: "property-information@npm:6.4.1" - checksum: 6aa680371ed55b73b0859b2ab9626444a2c201bb52a77a420ce3660293ed6c17256b2be0f1d8672856553fc68c92a47060e1816153790f1b22883f7b3d8db88f + checksum: 10c0/fc8cb86b0040f1be93437ad52cd815c4744343686852b116e2231997b92e160f3540498beacc953ad1509461d6f70ba9020766083aacdffcede2d87ca8b48a18 languageName: node linkType: hard @@ -11531,21 +11744,21 @@ __metadata: dependencies: forwarded: "npm:0.2.0" ipaddr.js: "npm:1.9.1" - checksum: f24a0c80af0e75d31e3451398670d73406ec642914da11a2965b80b1898ca6f66a0e3e091a11a4327079b2b268795f6fa06691923fef91887215c3d0e8ea3f68 + checksum: 10c0/c3eed999781a35f7fd935f398b6d8920b6fb00bbc14287bc6de78128ccc1a02c89b95b56742bf7cf0362cc333c61d138532049c7dedc7a328ef13343eff81210 languageName: node linkType: hard "proxy-from-env@npm:^1.0.0, proxy-from-env@npm:^1.1.0": version: 1.1.0 resolution: "proxy-from-env@npm:1.1.0" - checksum: f0bb4a87cfd18f77bc2fba23ae49c3b378fb35143af16cc478171c623eebe181678f09439707ad80081d340d1593cd54a33a0113f3ccb3f4bc9451488780ee23 + checksum: 10c0/fe7dd8b1bdbbbea18d1459107729c3e4a2243ca870d26d34c2c1bcd3e4425b7bcc5112362df2d93cc7fb9746f6142b5e272fd1cc5c86ddf8580175186f6ad42b languageName: node linkType: hard "psl@npm:^1.1.33": version: 1.9.0 resolution: "psl@npm:1.9.0" - checksum: d07879d4bfd0ac74796306a8e5a36a93cfb9c4f4e8ee8e63fbb909066c192fe1008cd8f12abd8ba2f62ca28247949a20c8fb32e1d18831d9e71285a1569720f9 + checksum: 10c0/6a3f805fdab9442f44de4ba23880c4eba26b20c8e8e0830eff1cb31007f6825dace61d17203c58bfe36946842140c97a1ba7f67bc63ca2d88a7ee052b65d97ab languageName: node linkType: hard @@ -11555,7 +11768,7 @@ __metadata: dependencies: end-of-stream: "npm:^1.1.0" once: "npm:^1.3.1" - checksum: e9f26a17be00810bff37ad0171edb35f58b242487b0444f92fb7d78bc7d61442fa9b9c5bd93a43fd8fd8ddd3cc75f1221f5e04c790f42907e5baab7cf5e2b931 + checksum: 10c0/f1fe8960f44d145f8617ea4c67de05392da4557052980314c8f85081aee26953bdcab64afad58a2b1df0e8ff7203e3710e848cbe81a01027978edc6e264db355 languageName: node linkType: hard @@ -11565,7 +11778,7 @@ __metadata: dependencies: end-of-stream: "npm:^1.1.0" once: "npm:^1.3.1" - checksum: e42e9229fba14732593a718b04cb5e1cfef8254544870997e0ecd9732b189a48e1256e4e5478148ecb47c8511dca2b09eae56b4d0aad8009e6fac8072923cfc9 + checksum: 10c0/bbdeda4f747cdf47db97428f3a135728669e56a0ae5f354a9ac5b74556556f5446a46f720a8f14ca2ece5be9b4d5d23c346db02b555f46739934cc6c093a5478 languageName: node linkType: hard @@ -11576,14 +11789,14 @@ __metadata: duplexify: "npm:^3.6.0" inherits: "npm:^2.0.3" pump: "npm:^2.0.0" - checksum: 5d11a99f320dc2a052610399bac6d03db0a23bc23b23aa2a7d0adf879da3065a55134b975db66dc46bc79f54af3dd575d8119113a0a5b311a00580e1f053896b + checksum: 10c0/0bcabf9e3dbf2d0cc1f9b84ac80d3c75386111caf8963bfd98817a1e2192000ac0ccc804ca6ccd5b2b8430fdb71347b20fb2f014fe3d41adbacb1b502a841c45 languageName: node linkType: hard "punycode@npm:^2.1.0, punycode@npm:^2.1.1": version: 2.3.1 resolution: "punycode@npm:2.3.1" - checksum: febdc4362bead22f9e2608ff0171713230b57aff9dddc1c273aa2a651fbd366f94b7d6a71d78342a7c0819906750351ca7f2edd26ea41b626d87d6a13d1bd059 + checksum: 10c0/14f76a8206bc3464f794fb2e3d3cc665ae416c01893ad7a02b23766eb07159144ee612ad67af5e84fa4479ccfe67678c4feb126b0485651b302babf66f04f9e9 languageName: node linkType: hard @@ -11601,14 +11814,14 @@ __metadata: proxy-from-env: "npm:^1.0.0" rimraf: "npm:^2.6.1" ws: "npm:^6.1.0" - checksum: fcbf80c954f9562f88b53886dc377595bf478abbb47c005f9131a56b6704cdd0a26b60f2646d2340866ed9f5059aae2b9f06a0f04310f5f14520ec94a687fbe6 + checksum: 10c0/29a73c2327e208e6528bac05f841b3340ee1a8d7bd59e7b235c9d8b3c0bf266804ad1aa901a0e4a1d66ce4202646f242988c3c5c4dfb105e9ad082bf4aae69be languageName: node linkType: hard "pure-rand@npm:^6.0.0": version: 6.0.4 resolution: "pure-rand@npm:6.0.4" - checksum: 34fed0abe99d3db7ddc459c12e1eda6bff05db6a17f2017a1ae12202271ccf276fb223b442653518c719671c1b339bbf97f27ba9276dba0997c89e45c4e6a3bf + checksum: 10c0/0fe7b12f25b10ea5b804598a6f37e4bcf645d2be6d44fe963741f014bf0095bdb6ff525106d6da6e76addc8142358fd380f1a9b8c62ea4d5516bf26a96a37c95 languageName: node linkType: hard @@ -11617,7 +11830,7 @@ __metadata: resolution: "qs@npm:6.11.0" dependencies: side-channel: "npm:^1.0.4" - checksum: 5a3bfea3e2f359ede1bfa5d2f0dbe54001aa55e40e27dc3e60fab814362d83a9b30758db057c2011b6f53a2d4e4e5150194b5bac45372652aecb3e3c0d4b256e + checksum: 10c0/4e4875e4d7c7c31c233d07a448e7e4650f456178b9dd3766b7cfa13158fdb24ecb8c4f059fa91e820dc6ab9f2d243721d071c9c0378892dcdad86e9e9a27c68f languageName: node linkType: hard @@ -11626,35 +11839,35 @@ __metadata: resolution: "qs@npm:6.11.2" dependencies: side-channel: "npm:^1.0.4" - checksum: f2321d0796664d0f94e92447ccd3bdfd6b6f3a50b6b762aa79d7f5b1ea3a7a9f94063ba896b82bc2a877ed6a7426d4081e4f16568fdb04f0ee188cca9d8505b4 + checksum: 10c0/4f95d4ff18ed480befcafa3390022817ffd3087fc65f146cceb40fc5edb9fa96cb31f648cae2fa96ca23818f0798bd63ad4ca369a0e22702fcd41379b3ab6571 languageName: node linkType: hard "querystringify@npm:^2.1.1": version: 2.2.0 resolution: "querystringify@npm:2.2.0" - checksum: 46ab16f252fd892fc29d6af60966d338cdfeea68a231e9457631ffd22d67cec1e00141e0a5236a2eb16c0d7d74175d9ec1d6f963660c6f2b1c2fc85b194c5680 + checksum: 10c0/3258bc3dbdf322ff2663619afe5947c7926a6ef5fb78ad7d384602974c467fadfc8272af44f5eb8cddd0d011aae8fabf3a929a8eee4b86edcc0a21e6bd10f9aa languageName: node linkType: hard "queue-microtask@npm:^1.2.2": version: 1.2.3 resolution: "queue-microtask@npm:1.2.3" - checksum: 72900df0616e473e824202113c3df6abae59150dfb73ed13273503127235320e9c8ca4aaaaccfd58cf417c6ca92a6e68ee9a5c3182886ae949a768639b388a7b + checksum: 10c0/900a93d3cdae3acd7d16f642c29a642aea32c2026446151f0778c62ac089d4b8e6c986811076e1ae180a694cedf077d453a11b58ff0a865629a4f82ab558e102 languageName: node linkType: hard "ramda@npm:0.29.0": version: 0.29.0 resolution: "ramda@npm:0.29.0" - checksum: b156660f2c58b4a13bcc4f1a0eabc1145d8db11d33d26a2fb03cd6adf3983a1c1f2bbaaf708c421029e9b09684262d056752623f7e62b79a503fb9217dec69d4 + checksum: 10c0/b00eaaf1c62b06a99affa1d583e256bd65ad27ab9d0ef512f55d7d93b842e7cd244a4a09179f61fdd8548362e409323867a2b0477cbd0626b5644eb6ac7c53da languageName: node linkType: hard "range-parser@npm:~1.2.1": version: 1.2.1 resolution: "range-parser@npm:1.2.1" - checksum: ce21ef2a2dd40506893157970dc76e835c78cf56437e26e19189c48d5291e7279314477b06ac38abd6a401b661a6840f7b03bd0b1249da9b691deeaa15872c26 + checksum: 10c0/96c032ac2475c8027b7a4e9fe22dc0dfe0f6d90b85e496e0f016fbdb99d6d066de0112e680805075bd989905e2123b3b3d002765149294dce0c1f7f01fcc2ea0 languageName: node linkType: hard @@ -11666,7 +11879,7 @@ __metadata: http-errors: "npm:2.0.0" iconv-lite: "npm:0.4.24" unpipe: "npm:1.0.0" - checksum: 280bedc12db3490ecd06f740bdcf66093a07535374b51331242382c0e130bb273ebb611b7bc4cba1b4b4e016cc7b1f4b05a6df885a6af39c2bc3b94c02291c84 + checksum: 10c0/5dad5a3a64a023b894ad7ab4e5c7c1ce34d3497fc7138d02f8c88a3781e68d8a55aa7d4fd3a458616fa8647cc228be314a1c03fb430a07521de78b32c4dd09d2 languageName: node linkType: hard @@ -11678,7 +11891,7 @@ __metadata: peerDependencies: react: ^16.9.0 || ^17 || ^18 react-dom: ^16.9.0 || ^17 || ^18 - checksum: f3cdf4bc00739196bad3ba7aa4837688a446cba3cd11c9ac115c86f19f42e4db68a84c0fe2fc740973669afd88aa7f6da39fea7093ddf34a0a24ccdaaf29fe73 + checksum: 10c0/53d2a695976e8380f9ae314cf4f4f0e894edef1858a5f13da58431ed6bbe63f0b9d4db9f0e25303b7ef55049f00d77bac247f39d13f8c9ac3bca0c459c573d5b languageName: node linkType: hard @@ -11688,7 +11901,7 @@ __metadata: peerDependencies: react: ">=16.8.0" react-dom: ">=16.8.0" - checksum: 3e02ba013454818d0c323949bd961fb2c19ac18130dfc67a4032aa5b03787c5ffe7ff159c4b97dc3475072d576828ca0c4b8e8ce85b55eaf484180596cdf0403 + checksum: 10c0/48eb73cf71e10841c2a61b6b06ab81da9fffa9876134c239bfdebcf348ce2a47e56b146338e35dfb03512c85966bfc9a53844fc56bc50154e71f8daee59ff6f0 languageName: node linkType: hard @@ -11700,7 +11913,7 @@ __metadata: prop-types: "npm:^15.8.1" peerDependencies: react: ^15.3.0 || 16 || 17 || 18 - checksum: 56a8b11a268a19d4e4ec409327f1c17d68c4f13a54330b9c0e3271acb44bb6886b72e04d77399c9945968851e8532dd192bbccffd1b2f8b010f4bb47e5743b3b + checksum: 10c0/de70d9f9c2d17cee207888ed791d4a042c300e5ca732503434d49e6745cff56c0d5ebcc82ab86237e9c2248e636d1d031b9f9cf9913ecec61d82a0e5ebc93881 languageName: node linkType: hard @@ -11709,7 +11922,7 @@ __metadata: resolution: "react-docgen-typescript@npm:2.2.2" peerDependencies: typescript: ">= 4.3.x" - checksum: 081fc3a876f53b9eeffcff357e5b6c190db799d50edcf11b187857d8cb8cce28000ed777ed16dd52a1c955f332612ef6b1f02cf8adcbcb084b8da9ff1ae5fd13 + checksum: 10c0/d31a061a21b5d4b67d4af7bc742541fd9e16254bd32861cd29c52565bc2175f40421a3550d52b6a6b0d0478e7cc408558eb0060a0bdd2957b02cfceeb0ee1e88 languageName: node linkType: hard @@ -11727,7 +11940,7 @@ __metadata: doctrine: "npm:^3.0.0" resolve: "npm:^1.22.1" strip-indent: "npm:^4.0.0" - checksum: 53eaed76cceb55606584c6ab603f04ec78c066cfb9ed983e1f7b388a75bfb8c2fc9c6b7ab299bac311b3daeca95adb8076b58ca96b41907b33c518299268831f + checksum: 10c0/74622750e60b287d2897a6887a2bd88303fadd84540247e162e9e970430864ae7b49152de043233d873a0aa7cffa406e5cd8fc1e8e2c277b8da73198b570f16b languageName: node linkType: hard @@ -11739,7 +11952,7 @@ __metadata: scheduler: "npm:^0.23.0" peerDependencies: react: ^18.2.0 - checksum: ca5e7762ec8c17a472a3605b6f111895c9f87ac7d43a610ab7024f68cd833d08eda0625ce02ec7178cc1f3c957cf0b9273cdc17aa2cd02da87544331c43b1d21 + checksum: 10c0/66dfc5f93e13d0674e78ef41f92ed21dfb80f9c4ac4ac25a4b51046d41d4d2186abc915b897f69d3d0ebbffe6184e7c5876f2af26bfa956f179225d921be713a languageName: node linkType: hard @@ -11753,35 +11966,35 @@ __metadata: peerDependencies: react: ^0.14.8 || ^15.0.1 || ^16.0.0 || ^17.0.1 || ^18.0.0 react-dom: ^0.14.8 || ^15.0.1 || ^16.0.0 || ^17.0.1 || ^18.0.0 - checksum: 9a874b2f16b4624a72c4b766b096d693a382b9dc7f2264f802395852ae3435ccde8e9e47bbe45cf5f30eba70f8126af6aca832190e285b0096af3ecade994df1 + checksum: 10c0/0d60a0ea758529c32a706d0c69d70b69fb94de3c46442fffdee34f08f51ffceddbb5395b41dfd1565895653e9f60f98ca525835be9d5db1f16d6b22be12f4cd4 languageName: node linkType: hard "react-is@npm:18.1.0": version: 18.1.0 resolution: "react-is@npm:18.1.0" - checksum: fe09c86d5e12a8531bf3e748660f3dffbe900a6da0b488c7efaf0a866e16b74ecc1b0011b0960b13594f8719f39f87a987c0c85edff0b2d3e2f14b87e7230ad2 + checksum: 10c0/558874e4c3bd9805a9294426e090919ee6901be3ab07f80b997c36b5a01a8d691112802e7438d146f6c82fd6495d8c030f276ef05ec3410057f8740a8d723f8c languageName: node linkType: hard "react-is@npm:^16.13.1, react-is@npm:^16.7.0": version: 16.13.1 resolution: "react-is@npm:16.13.1" - checksum: 5aa564a1cde7d391ac980bedee21202fc90bdea3b399952117f54fb71a932af1e5902020144fb354b4690b2414a0c7aafe798eb617b76a3d441d956db7726fdf + checksum: 10c0/33977da7a5f1a287936a0c85639fec6ca74f4f15ef1e59a6bc20338fc73dc69555381e211f7a3529b8150a1f71e4225525b41b60b52965bda53ce7d47377ada1 languageName: node linkType: hard "react-is@npm:^17.0.1": version: 17.0.2 resolution: "react-is@npm:17.0.2" - checksum: 73b36281e58eeb27c9cc6031301b6ae19ecdc9f18ae2d518bdb39b0ac564e65c5779405d623f1df9abf378a13858b79442480244bd579968afc1faf9a2ce5e05 + checksum: 10c0/2bdb6b93fbb1820b024b496042cce405c57e2f85e777c9aabd55f9b26d145408f9f74f5934676ffdc46f3dcff656d78413a6e43968e7b3f92eea35b3052e9053 languageName: node linkType: hard "react-is@npm:^18.0.0": version: 18.2.0 resolution: "react-is@npm:18.2.0" - checksum: 200cd65bf2e0be7ba6055f647091b725a45dd2a6abef03bf2380ce701fd5edccee40b49b9d15edab7ac08a762bf83cb4081e31ec2673a5bfb549a36ba21570df + checksum: 10c0/6eb5e4b28028c23e2bfcf73371e72cd4162e4ac7ab445ddae2afe24e347a37d6dc22fae6e1748632cd43c6d4f9b8f86dcf26bf9275e1874f436d129952528ae0 languageName: node linkType: hard @@ -11793,7 +12006,7 @@ __metadata: peerDependencies: react: ^16.14.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.14.0 || ^17.0.0 || ^18.0.0 - checksum: 44066948d662e3aab9c9bf581e0f9310e7b2379e63e6b0d2e96ff8e663af6da6ca5152def56892dc15c9c6b9aa596ef93e755c21ee8f7e380b9f9c33092802da + checksum: 10c0/ebcf895352f1f4a3c1ed0c9ff7a67886146723b02b8137f5f21b3a5b522992418dfea5cf8e40a55b1a55828a0721f7954a50cc59264f00a2cd2caa3416d34440 languageName: node linkType: hard @@ -11803,21 +12016,21 @@ __metadata: peerDependencies: react: ^16.14.0 || ^17.0.0 react-dom: ^16.14.0 || ^17.0.0 - checksum: da2a7786a2014e99669b99de05cd9e7deb46f8bce04c3c193bf7a2c927a656d03735e50c7310a4f59d853abfc9fa1b76f8965c9afe82f4943fe672a619c766de + checksum: 10c0/5c61654a0a20086eef17f2f356bdcb17571bbc2eb94a084d97f3e4087878f2b038f1040e98e73a40e3caf3cf418175a23502a4e86488e186b3b0c349d1d8ca1a languageName: node linkType: hard "react-merge-refs@npm:2.0.2": version: 2.0.2 resolution: "react-merge-refs@npm:2.0.2" - checksum: 64758870d79ad52e6666d1d30cdecd5a72722edfd5c89808b41acdbd81a039f0c78b8b576f7ae247010468fc45cb57dd31f402693c64224439dbe0127f4389f3 + checksum: 10c0/ebadd9517c442419f30091b5fd89808d8d906569e7b90b247d3940f3165c91c7efce4b57aca15ed6eaf41c5507fa344d613a9380d76c50e3540f422d71265d26 languageName: node linkType: hard "react-property@npm:2.0.2": version: 2.0.2 resolution: "react-property@npm:2.0.2" - checksum: 3a4bc1951b2b7992cb8a2d3f12016dd0920d1c06eb58b456204a6ae1210401d62baece098d3200ed8a0513dde247a5d96ffdb24f354e32ce5a9b26fbd8552668 + checksum: 10c0/27a3dfa68d29d45fc3582552715203291d26c6f1b228fdb6775e7ca19b10753141dbe98a0aa3a4da745b39fcd7427dc2d623055e63742062231ee18692a6f0fa languageName: node linkType: hard @@ -11849,14 +12062,14 @@ __metadata: optional: true redux: optional: true - checksum: 2998af1870dadc1a5c39566712481cc087af259198c419840b6b966d311ba23bb95b31441440ff4c61ac710024914ebb9c71fbd4290e6fa25d255e6f20ae737a + checksum: 10c0/0efeeb228ebd1c20b7f127b010959f6531608a9e7d7c0680f3f5801fe9e912a60e3735b85d004aceed6a12740cb9dd5594cd1ab227b8c2aa91aeb8d87b0dbe1e languageName: node linkType: hard "react-refresh@npm:^0.14.0": version: 0.14.0 resolution: "react-refresh@npm:0.14.0" - checksum: 75941262ce3ed4fc79b52492943fd59692f29b84f30f3822713b7e920f28e85c62a4386f85cbfbaea95ed62d3e74209f0a0bb065904b7ab2f166a74ac3812e2a + checksum: 10c0/b8ae07ad153357d77830928a7f1fc2df837aabefee907fa273ba04c7643f3b860e986f1d4b7ada9b721c8d79b8c24b5b911a314a1a2398b105f1b13d19ea2b8d languageName: node linkType: hard @@ -11872,7 +12085,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: 6d05e74ee8049b322ba0aeb398e092ae284a5b04013bc07f0c1f283824b088fd5c1b1f1514a0e0e501c063a9c3b5899373039329d0266a21121222c814052053 + checksum: 10c0/21b2b02818b04f2c755c5062c90385420adb244107ac90ec87d43cd338760d3cc1cae6eeb59ab198bbc9e388e1a5909551e0b8a708b0d87ce221cf50951bb1fc languageName: node linkType: hard @@ -11891,7 +12104,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: f0646ac384ce3852d1f41e30a9f9e251b11cf3b430d1d114c937c8fa7f90a895c06378d0d6b6ff0b2d00cbccf15e845921944fd6074ae67a0fb347a718106d88 + checksum: 10c0/4952657e6a7b9d661d4ad4dfcef81b9c7fa493e35164abff99c35c0b27b3d172ef7ad70c09416dc44dd14ff2e6b38a5ec7da27e27e90a15cbad36b8fd2fd8054 languageName: node linkType: hard @@ -11904,7 +12117,7 @@ __metadata: peerDependencies: react: ">=16.8" react-dom: ">=16.8" - checksum: 85575793cbdb84b05e9c33fef6f81e6b09e9f2606d2ba03392f83689dbb240212e5b22634b95049fc19364e9b44d45a519387d1bff4eba8a163548aa3376bc0f + checksum: 10c0/be7433bc290e56c0dd3e1008d53a76cc9866bf460980658501880876420086f11810ec3355a3abcd79ac537d6a1351eda009fade841c266456d0e8df60967b76 languageName: node linkType: hard @@ -11915,7 +12128,7 @@ __metadata: "@remix-run/router": "npm:1.6.2" peerDependencies: react: ">=16.8" - checksum: a40d1ea78e3b5b3167ed6cbaf74b2e60592fd1822b9f94a2499933bf699130a81f669bc06bdf34f38489a96d31510848c21254a48e49038b18ecbf42993eaa34 + checksum: 10c0/a437606078d6096a6dfa322adf80d00ce153f20cd470ad888088c8da99f44477b963425c53f5461a540b909fc274154292ed80d636482dcdc58a423915ca1433 languageName: node linkType: hard @@ -11932,7 +12145,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: 80c58fd6aac3594e351e2e7b048d8a5b09508adb21031a38b3c40911fe58295572eddc640d4b20a7be364842c8ed1120fe30097e22ea055316b375b88d4ff02a + checksum: 10c0/6d66f3bdb65e1ec79089f80314da97c9a005087a04ee034255a5de129a4c0d9fd0bf99fa7bf642781ac2dc745ca687aae3de082bd8afdd0d117bc953241e15ad languageName: node linkType: hard @@ -11947,7 +12160,7 @@ __metadata: refractor: "npm:^3.6.0" peerDependencies: react: ">= 0.14.0" - checksum: 14291a92672a79cf167e6cf2dba2547b920c24573729a95ae24035bece43f7e00e3429477be7b87455e8ce018682c8992545c405a915421eb772c5cd07c00576 + checksum: 10c0/2bf57a1ea151f688efc7eba355677577c9bb55f05f9df7ef86627aae42f63f505486cddf3f4a628aecc51ec75e89beb9533201570d03201c4bf7d69d61d2545d languageName: node linkType: hard @@ -11956,7 +12169,7 @@ __metadata: resolution: "react@npm:18.2.0" dependencies: loose-envify: "npm:^1.1.0" - checksum: b9214a9bd79e99d08de55f8bef2b7fc8c39630be97c4e29d7be173d14a9a10670b5325e94485f74cd8bff4966ef3c78ee53c79a7b0b9b70cba20aa8973acc694 + checksum: 10c0/b562d9b569b0cb315e44b48099f7712283d93df36b19a39a67c254c6686479d3980b7f013dc931f4a5a3ae7645eae6386b4aa5eea933baa54ecd0f9acb0902b8 languageName: node linkType: hard @@ -11967,7 +12180,7 @@ __metadata: find-up: "npm:^4.1.0" read-pkg: "npm:^5.2.0" type-fest: "npm:^0.8.1" - checksum: e4e93ce70e5905b490ca8f883eb9e48b5d3cebc6cd4527c25a0d8f3ae2903bd4121c5ab9c5a3e217ada0141098eeb661313c86fa008524b089b8ed0b7f165e44 + checksum: 10c0/82b3ac9fd7c6ca1bdc1d7253eb1091a98ff3d195ee0a45386582ce3e69f90266163c34121e6a0a02f1630073a6c0585f7880b3865efcae9c452fa667f02ca385 languageName: node linkType: hard @@ -11979,7 +12192,7 @@ __metadata: normalize-package-data: "npm:^2.5.0" parse-json: "npm:^5.0.0" type-fest: "npm:^0.6.0" - checksum: eb696e60528b29aebe10e499ba93f44991908c57d70f2d26f369e46b8b9afc208ef11b4ba64f67630f31df8b6872129e0a8933c8c53b7b4daf0eace536901222 + checksum: 10c0/b51a17d4b51418e777029e3a7694c9bd6c578a5ab99db544764a0b0f2c7c0f58f8a6bc101f86a6fceb8ba6d237d67c89acf6170f6b98695d0420ddc86cf109fb languageName: node linkType: hard @@ -11994,7 +12207,7 @@ __metadata: safe-buffer: "npm:~5.1.1" string_decoder: "npm:~1.1.1" util-deprecate: "npm:~1.0.1" - checksum: 8500dd3a90e391d6c5d889256d50ec6026c059fadee98ae9aa9b86757d60ac46fff24fafb7a39fa41d54cb39d8be56cc77be202ebd4cd8ffcf4cb226cbaa40d4 + checksum: 10c0/7efdb01f3853bc35ac62ea25493567bf588773213f5f4a79f9c365e1ad13bab845ac0dae7bc946270dc40c3929483228415e92a3fc600cc7e4548992f41ee3fa languageName: node linkType: hard @@ -12005,7 +12218,7 @@ __metadata: inherits: "npm:^2.0.3" string_decoder: "npm:^1.1.1" util-deprecate: "npm:^1.0.1" - checksum: d9e3e53193adcdb79d8f10f2a1f6989bd4389f5936c6f8b870e77570853561c362bee69feca2bbb7b32368ce96a85504aa4cedf7cf80f36e6a9de30d64244048 + checksum: 10c0/e37be5c79c376fdd088a45fa31ea2e423e5d48854be7a22a58869b4e84d25047b193f6acb54f1012331e1bcd667ffb569c01b99d36b0bd59658fb33f513511b7 languageName: node linkType: hard @@ -12014,7 +12227,7 @@ __metadata: resolution: "readdirp@npm:3.6.0" dependencies: picomatch: "npm:^2.2.1" - checksum: 196b30ef6ccf9b6e18c4e1724b7334f72a093d011a99f3b5920470f0b3406a51770867b3e1ae9711f227ef7a7065982f6ee2ce316746b2cb42c88efe44297fe7 + checksum: 10c0/6fa848cf63d1b82ab4e985f4cf72bd55b7dcfd8e0a376905804e48c3634b7e749170940ba77b32804d5fe93b3cc521aa95a8d7e7d725f830da6d93f3669ce66b languageName: node linkType: hard @@ -12027,7 +12240,7 @@ __metadata: esprima: "npm:~4.0.0" source-map: "npm:~0.6.1" tslib: "npm:^2.0.1" - checksum: a82e388ded2154697ea54e6d65d060143c9cf4b521f770232a7483e253d45bdd9080b44dc5874d36fe720ba1a10cb20b95375896bd89f5cab631a751e93979f5 + checksum: 10c0/d719633be8029e28f23b8191d4a525c5dbdac721792ab3cb5e9dfcf1694fb93f3c147b186916195a9c7fa0711f1e4990ba457cdcee02faed3899d4a80da1bd1f languageName: node linkType: hard @@ -12037,7 +12250,7 @@ __metadata: dependencies: indent-string: "npm:^4.0.0" strip-indent: "npm:^3.0.0" - checksum: fa1ef20404a2d399235e83cc80bd55a956642e37dd197b4b612ba7327bf87fa32745aeb4a1634b2bab25467164ab4ed9c15be2c307923dd08b0fe7c52431ae6b + checksum: 10c0/d64a6b5c0b50eb3ddce3ab770f866658a2b9998c678f797919ceb1b586bab9259b311407280bd80b804e2a7c7539b19238ae6a2a20c843f1a7fcff21d48c2eae languageName: node linkType: hard @@ -12046,7 +12259,7 @@ __metadata: resolution: "redux-thunk@npm:2.4.2" peerDependencies: redux: ^4 - checksum: 9bcb1193835128ecebf1e1a1b1a37bc15e8dfbdf6b6ee1b5566dd4c8e4ca05a81175f0c6dda34ab47f87053cd13b74d9f881d59446691d7b192831852b5d7a72 + checksum: 10c0/e202d6ef7dfa7df08ed24cb221aa89d6c84dbaa7d65fe90dbd8e826d0c10d801f48388f9a7598a4fd970ecbc93d335014570a61ca7bc8bf569eab5de77b31a3c languageName: node linkType: hard @@ -12055,7 +12268,7 @@ __metadata: resolution: "redux@npm:4.2.1" dependencies: "@babel/runtime": "npm:^7.9.2" - checksum: 371e4833b671193303a7dea7803c8fdc8e0d566740c78f580e0a3b77b4161da25037626900a2205a5d616117fa6ad09a4232e5a110bd437186b5c6355a041750 + checksum: 10c0/136d98b3d5dbed1cd6279c8c18a6a74c416db98b8a432a46836bdd668475de6279a2d4fd9d1363f63904e00f0678a8a3e7fa532c897163340baf1e71bb42c742 languageName: node linkType: hard @@ -12070,7 +12283,7 @@ __metadata: get-intrinsic: "npm:^1.2.3" globalthis: "npm:^1.0.3" which-builtin-type: "npm:^1.1.3" - checksum: 14560efa54b4b8549f5e0961ee4dfa9f034bd4b85c7805d487da30eb520ea252b566bc4098a7cb1bc2219e4d9cb095db43c05b27205bd6299bb141294cea2d14 + checksum: 10c0/68f2a21494a9f4f5acc19bda5213236aa7fc02f9953ce2b18670c63b9ca3dec294dcabbb9d394d98cd2fc0de46b7cd6354614a60a33cabdbb5de9a6f7115f9a6 languageName: node linkType: hard @@ -12081,7 +12294,7 @@ __metadata: hastscript: "npm:^6.0.0" parse-entities: "npm:^2.0.0" prismjs: "npm:~1.27.0" - checksum: 671bbcf5ae1b4e207f98b9a3dc2cbae215be30effe9f3bdcfd10f565f45fecfe97334cf38c8e4f52d6cc012ff2ec7fb627d3d5678efc388751c8b1e1f7ca2a6c + checksum: 10c0/63ab62393c8c2fd7108c2ea1eff721c0ad2a1a6eee60fdd1b47f4bb25cf298667dc97d041405b3e718b0817da12b37a86ed07ebee5bd2ca6405611f1bae456db languageName: node linkType: hard @@ -12093,7 +12306,7 @@ __metadata: "@types/prismjs": "npm:^1.0.0" hastscript: "npm:^7.0.0" parse-entities: "npm:^4.0.0" - checksum: d41cdd3b7e2ee4dbe33cd06f5860fbd1219c4b60cb80ac606337ec9ff2f06b0fdd1b712a5f0e9dbf840193f37870546b5c145ca4772e961a5a117d5137ae8cc4 + checksum: 10c0/b072db8e77bee2416b516cda21c8ddff34662ae6840a88653a3a002905adf044b08ab4d0fa2fd4f69f40edd8b0f859ffa11f799a0a339df4c34612aad8584207 languageName: node linkType: hard @@ -12102,21 +12315,21 @@ __metadata: resolution: "regenerate-unicode-properties@npm:10.1.1" dependencies: regenerate: "npm:^1.4.2" - checksum: b855152efdcca0ecc37ceb0cb6647a544344555fc293af3b57191b918e1bc9c95ee404a9a64a1d692bf66d45850942c29d93f2740c0d1980d3a8ea2ca63b184e + checksum: 10c0/89adb5ee5ba081380c78f9057c02e156a8181969f6fcca72451efc45612e0c3df767b4333f8d8479c274d9c6fe52ec4854f0d8a22ef95dccbe87da8e5f2ac77d languageName: node linkType: hard "regenerate@npm:^1.4.2": version: 1.4.2 resolution: "regenerate@npm:1.4.2" - checksum: dc6c95ae4b3ba6adbd7687cafac260eee4640318c7a95239d5ce847d9b9263979758389e862fe9c93d633b5792ea4ada5708df75885dc5aa05a309fa18140a87 + checksum: 10c0/f73c9eba5d398c818edc71d1c6979eaa05af7a808682749dd079f8df2a6d91a9b913db216c2c9b03e0a8ba2bba8701244a93f45211afbff691c32c7b275db1b8 languageName: node linkType: hard "regenerator-runtime@npm:^0.14.0": version: 0.14.1 resolution: "regenerator-runtime@npm:0.14.1" - checksum: 5db3161abb311eef8c45bcf6565f4f378f785900ed3945acf740a9888c792f75b98ecb77f0775f3bf95502ff423529d23e94f41d80c8256e8fa05ed4b07cf471 + checksum: 10c0/1b16eb2c4bceb1665c89de70dcb64126a22bc8eb958feef3cd68fe11ac6d2a4899b5cd1b80b0774c7c03591dc57d16631a7f69d2daa2ec98100e2f29f7ec4cc4 languageName: node linkType: hard @@ -12125,7 +12338,7 @@ __metadata: resolution: "regenerator-transform@npm:0.15.2" dependencies: "@babel/runtime": "npm:^7.8.4" - checksum: c4fdcb46d11bbe32605b4b9ed76b21b8d3f241a45153e9dc6f5542fed4c7744fed459f42701f650d5d5956786bf7de57547329d1c05a9df2ed9e367b9d903302 + checksum: 10c0/7cfe6931ec793269701994a93bab89c0cc95379191fad866270a7fea2adfec67ea62bb5b374db77058b60ba4509319d9b608664d0d288bd9989ca8dbd08fae90 languageName: node linkType: hard @@ -12137,7 +12350,7 @@ __metadata: define-properties: "npm:^1.2.1" es-errors: "npm:^1.3.0" set-function-name: "npm:^2.0.1" - checksum: 9fffc01da9c4e12670ff95bc5204364615fcc12d86fc30642765af908675678ebb0780883c874b2dbd184505fb52fa603d80073ecf69f461ce7f56b15d10be9c + checksum: 10c0/0f3fc4f580d9c349f8b560b012725eb9c002f36daa0041b3fbf6f4238cb05932191a4d7d5db3b5e2caa336d5150ad0402ed2be81f711f9308fe7e1a9bf9bd552 languageName: node linkType: hard @@ -12151,7 +12364,7 @@ __metadata: regjsparser: "npm:^0.9.1" unicode-match-property-ecmascript: "npm:^2.0.0" unicode-match-property-value-ecmascript: "npm:^2.1.0" - checksum: ed0d7c66d84c633fbe8db4939d084c780190eca11f6920807dfb8ebac59e2676952cd8f2008d9c86ae8cf0463ea5fd12c5cff09ef2ce7d51ee6b420a5eb4d177 + checksum: 10c0/7945d5ab10c8bbed3ca383d4274687ea825aee4ab93a9c51c6e31e1365edd5ea807f6908f800ba017b66c462944ba68011164e7055207747ab651f8111ef3770 languageName: node linkType: hard @@ -12162,7 +12375,7 @@ __metadata: jsesc: "npm:~0.5.0" bin: regjsparser: bin/parser - checksum: be7757ef76e1db10bf6996001d1021048b5fb12f5cb470a99b8cf7f3ff943f0f0e2291c0dcdbb418b458ddc4ac10e48680a822b69ef487a0284c8b6b77beddc3 + checksum: 10c0/fe44fcf19a99fe4f92809b0b6179530e5ef313ff7f87df143b08ce9a2eb3c4b6189b43735d645be6e8f4033bfb015ed1ca54f0583bc7561bed53fd379feb8225 languageName: node linkType: hard @@ -12175,7 +12388,7 @@ __metadata: mdast-util-definitions: "npm:^4.0.0" space-separated-tokens: "npm:^1.0.0" unist-util-visit: "npm:^2.0.0" - checksum: 48c4a41fe38916f79febb390b0c4deefe82b554dd36dc534262d851860d17fb6d15d78d515f29194e5fa48db5f01f4405a6f6dd077aaf32812a2efffb01700d7 + checksum: 10c0/5f0affc97e18ad3247e3b29449f4df98be5a75950cf0f0f13dd1755c4ef1065f9ab44626bba34d913d32bb92afd6f06a8e2f8068e83b48337f0b7a5d1f0cecfe languageName: node linkType: hard @@ -12186,56 +12399,56 @@ __metadata: github-slugger: "npm:^1.0.0" mdast-util-to-string: "npm:^1.0.0" unist-util-visit: "npm:^2.0.0" - checksum: 8c90815a0f1f0568450e923391de0183205e18befb7a7e19e111c75ad08cabf7daebe62fccc82b6fbf9f54148dd311b87463632299dbf9fdfe412f6a0a9ab3ea + checksum: 10c0/7cc2857936fce9c9c00b9c7d70de46d594cedf93bd8560fd006164dee7aacccdf472654ee35b33f4fb4bd0af882d89998c6d0c9088c2e95702a9fc15ebae002a languageName: node linkType: hard "remove-trailing-separator@npm:^1.0.1": version: 1.1.0 resolution: "remove-trailing-separator@npm:1.1.0" - checksum: d3c20b5a2d987db13e1cca9385d56ecfa1641bae143b620835ac02a6b70ab88f68f117a0021838db826c57b31373d609d52e4f31aca75fc490c862732d595419 + checksum: 10c0/3568f9f8f5af3737b4aee9e6e1e8ec4be65a92da9cb27f989e0893714d50aa95ed2ff02d40d1fa35e1b1a234dc9c2437050ef356704a3999feaca6667d9e9bfc languageName: node linkType: hard "replace-ext@npm:^1.0.0": version: 1.0.1 resolution: "replace-ext@npm:1.0.1" - checksum: 4994ea1aaa3d32d152a8d98ff638988812c4fa35ba55485630008fe6f49e3384a8a710878e6fd7304b42b38d1b64c1cd070e78ece411f327735581a79dd88571 + checksum: 10c0/9a9c3d68d0d31f20533ed23e9f6990cff8320cf357eebfa56c0d7b63746ae9f2d6267f3321e80e0bffcad854f710fc9a48dbcf7615579d767db69e9cd4a43168 languageName: node linkType: hard "require-directory@npm:^2.1.1": version: 2.1.1 resolution: "require-directory@npm:2.1.1" - checksum: a72468e2589270d91f06c7d36ec97a88db53ae5d6fe3787fadc943f0b0276b10347f89b363b2a82285f650bdcc135ad4a257c61bdd4d00d6df1fa24875b0ddaf + checksum: 10c0/83aa76a7bc1531f68d92c75a2ca2f54f1b01463cb566cf3fbc787d0de8be30c9dbc211d1d46be3497dac5785fe296f2dd11d531945ac29730643357978966e99 languageName: node linkType: hard "require-from-string@npm:^2.0.2": version: 2.0.2 resolution: "require-from-string@npm:2.0.2" - checksum: 839a3a890102a658f4cb3e7b2aa13a1f80a3a976b512020c3d1efc418491c48a886b6e481ea56afc6c4cb5eef678f23b2a4e70575e7534eccadf5e30ed2e56eb + checksum: 10c0/aaa267e0c5b022fc5fd4eef49d8285086b15f2a1c54b28240fdf03599cbd9c26049fee3eab894f2e1f6ca65e513b030a7c264201e3f005601e80c49fb2937ce2 languageName: node linkType: hard "requireindex@npm:^1.2.0": version: 1.2.0 resolution: "requireindex@npm:1.2.0" - checksum: 266d1cb31f6cbc4b6cf2e898f5bbc45581f7919bcf61bba5c45d0adb69b722b9ff5a13727be3350cde4520d7cd37f39df45d58a29854baaa4552cd6b05ae4a1a + checksum: 10c0/7fb42aed73bf8de9acc4d6716cf07acc7fbe180e58729433bafcf702e76e7bb10e54f8266c06bfec62d752e0ac14d50e8758833de539e6f4e2cd642077866153 languageName: node linkType: hard "requires-port@npm:^1.0.0": version: 1.0.0 resolution: "requires-port@npm:1.0.0" - checksum: 878880ee78ccdce372784f62f52a272048e2d0827c29ae31e7f99da18b62a2b9463ea03a75f277352f4697c100183debb0532371ad515a2d49d4bfe596dd4c20 + checksum: 10c0/b2bfdd09db16c082c4326e573a82c0771daaf7b53b9ce8ad60ea46aa6e30aaf475fe9b164800b89f93b748d2c234d8abff945d2551ba47bf5698e04cd7713267 languageName: node linkType: hard "reselect@npm:^4.1.8": version: 4.1.8 resolution: "reselect@npm:4.1.8" - checksum: 199984d9872f71cd207f4aa6e6fd2bd48d95154f7aa9b3aee3398335f39f5491059e732f28c12e9031d5d434adab2c458dc8af5afb6564d0ad37e1644445e09c + checksum: 10c0/06a305a504affcbb67dd0561ddc8306b35796199c7e15b38934c80606938a021eadcf68cfd58e7bb5e17786601c37602a3362a4665c7bf0a96c1041ceee9d0b7 languageName: node linkType: hard @@ -12244,28 +12457,28 @@ __metadata: resolution: "resolve-cwd@npm:3.0.0" dependencies: resolve-from: "npm:^5.0.0" - checksum: 546e0816012d65778e580ad62b29e975a642989108d9a3c5beabfb2304192fa3c9f9146fbdfe213563c6ff51975ae41bac1d3c6e047dd9572c94863a057b4d81 + checksum: 10c0/e608a3ebd15356264653c32d7ecbc8fd702f94c6703ea4ac2fb81d9c359180cba0ae2e6b71faa446631ed6145454d5a56b227efc33a2d40638ac13f8beb20ee4 languageName: node linkType: hard "resolve-from@npm:^4.0.0": version: 4.0.0 resolution: "resolve-from@npm:4.0.0" - checksum: 91eb76ce83621eea7bbdd9b55121a5c1c4a39e54a9ce04a9ad4517f102f8b5131c2cf07622c738a6683991bf54f2ce178f5a42803ecbd527ddc5105f362cc9e3 + checksum: 10c0/8408eec31a3112ef96e3746c37be7d64020cda07c03a920f5024e77290a218ea758b26ca9529fd7b1ad283947f34b2291c1c0f6aa0ed34acfdda9c6014c8d190 languageName: node linkType: hard "resolve-from@npm:^5.0.0": version: 5.0.0 resolution: "resolve-from@npm:5.0.0" - checksum: be18a5e4d76dd711778664829841cde690971d02b6cbae277735a09c1c28f407b99ef6ef3cd585a1e6546d4097b28df40ed32c4a287b9699dcf6d7f208495e23 + checksum: 10c0/b21cb7f1fb746de8107b9febab60095187781137fd803e6a59a76d421444b1531b641bba5857f5dc011974d8a5c635d61cec49e6bd3b7fc20e01f0fafc4efbf2 languageName: node linkType: hard "resolve.exports@npm:^2.0.0": version: 2.0.2 resolution: "resolve.exports@npm:2.0.2" - checksum: f1cc0b6680f9a7e0345d783e0547f2a5110d8336b3c2a4227231dd007271ffd331fd722df934f017af90bae0373920ca0d4005da6f76cb3176c8ae426370f893 + checksum: 10c0/cc4cffdc25447cf34730f388dca5021156ba9302a3bad3d7f168e790dc74b2827dff603f1bc6ad3d299bac269828dca96dd77e036dc9fba6a2a1807c47ab5c98 languageName: node linkType: hard @@ -12278,7 +12491,7 @@ __metadata: supports-preserve-symlinks-flag: "npm:^1.0.0" bin: resolve: bin/resolve - checksum: c473506ee01eb45cbcfefb68652ae5759e092e6b0fb64547feadf9736a6394f258fbc6f88e00c5ca36d5477fbb65388b272432a3600fa223062e54333c156753 + checksum: 10c0/07e179f4375e1fd072cfb72ad66d78547f86e6196c4014b31cb0b8bb1db5f7ca871f922d08da0fbc05b94e9fd42206f819648fa3b5b873ebbc8e1dc68fec433a languageName: node linkType: hard @@ -12291,7 +12504,7 @@ __metadata: supports-preserve-symlinks-flag: "npm:^1.0.0" bin: resolve: bin/resolve - checksum: 2d6fd28699f901744368e6f2032b4268b4c7b9185fd8beb64f68c93ac6b22e52ae13560ceefc96241a665b985edf9ffd393ae26d2946a7d3a07b7007b7d51e79 + checksum: 10c0/a6c33555e3482ea2ec4c6e3d3bf0d78128abf69dca99ae468e64f1e30acaa318fd267fb66c8836b04d558d3e2d6ed875fe388067e7d8e0de647d3c21af21c43a languageName: node linkType: hard @@ -12304,7 +12517,7 @@ __metadata: supports-preserve-symlinks-flag: "npm:^1.0.0" bin: resolve: bin/resolve - checksum: f345cd37f56a2c0275e3fe062517c650bb673815d885e7507566df589375d165bbbf4bdb6aa95600a9bc55f4744b81f452b5a63f95b9f10a72787dba3c90890a + checksum: 10c0/0446f024439cd2e50c6c8fa8ba77eaa8370b4180f401a96abf3d1ebc770ac51c1955e12764cde449fde3fff480a61f84388e3505ecdbab778f4bef5f8212c729 languageName: node linkType: hard @@ -12317,7 +12530,7 @@ __metadata: supports-preserve-symlinks-flag: "npm:^1.0.0" bin: resolve: bin/resolve - checksum: 05fa778de9d0347c8b889eb7a18f1f06bf0f801b0eb4610b4871a4b2f22e220900cf0ad525e94f990bb8d8921c07754ab2122c0c225ab4cdcea98f36e64fa4c2 + checksum: 10c0/78ad6edb8309a2bfb720c2c1898f7907a37f858866ce11a5974643af1203a6a6e05b2fa9c53d8064a673a447b83d42569260c306d43628bff5bb101969708355 languageName: node linkType: hard @@ -12327,21 +12540,38 @@ __metadata: dependencies: onetime: "npm:^5.1.0" signal-exit: "npm:^3.0.2" - checksum: f877dd8741796b909f2a82454ec111afb84eb45890eb49ac947d87991379406b3b83ff9673a46012fca0d7844bb989f45cc5b788254cf1a39b6b5a9659de0630 + checksum: 10c0/8051a371d6aa67ff21625fa94e2357bd81ffdc96267f3fb0fc4aaf4534028343836548ef34c240ffa8c25b280ca35eb36be00b3cb2133fa4f51896d7e73c6b4f + languageName: node + linkType: hard + +"restore-cursor@npm:^5.0.0": + version: 5.1.0 + resolution: "restore-cursor@npm:5.1.0" + dependencies: + onetime: "npm:^7.0.0" + signal-exit: "npm:^4.1.0" + checksum: 10c0/c2ba89131eea791d1b25205bdfdc86699767e2b88dee2a590b1a6caa51737deac8bad0260a5ded2f7c074b7db2f3a626bcf1fcf3cdf35974cbeea5e2e6764f60 languageName: node linkType: hard "retry@npm:^0.12.0": version: 0.12.0 resolution: "retry@npm:0.12.0" - checksum: 1f914879f97e7ee931ad05fe3afa629bd55270fc6cf1c1e589b6a99fab96d15daad0fa1a52a00c729ec0078045fe3e399bd4fd0c93bcc906957bdc17f89cb8e6 + checksum: 10c0/59933e8501727ba13ad73ef4a04d5280b3717fd650408460c987392efe9d7be2040778ed8ebe933c5cbd63da3dcc37919c141ef8af0a54a6e4fca5a2af177bfe languageName: node linkType: hard "reusify@npm:^1.0.4": version: 1.0.4 resolution: "reusify@npm:1.0.4" - checksum: 14222c9e1d3f9ae01480c50d96057228a8524706db79cdeb5a2ce5bb7070dd9f409a6f84a02cbef8cdc80d39aef86f2dd03d155188a1300c599b05437dcd2ffb + checksum: 10c0/c19ef26e4e188f408922c46f7ff480d38e8dfc55d448310dfb518736b23ed2c4f547fb64a6ed5bdba92cd7e7ddc889d36ff78f794816d5e71498d645ef476107 + languageName: node + linkType: hard + +"rfdc@npm:^1.4.1": + version: 1.4.1 + resolution: "rfdc@npm:1.4.1" + checksum: 10c0/4614e4292356cafade0b6031527eea9bc90f2372a22c012313be1dcc69a3b90c7338158b414539be863fa95bfcb2ddcd0587be696841af4e6679d85e62c060c7 languageName: node linkType: hard @@ -12352,7 +12582,7 @@ __metadata: glob: "npm:^7.1.3" bin: rimraf: ./bin.js - checksum: 4586c296c736483e297da7cffd19475e4a3e41d07b1ae124aad5d687c79e4ffa716bdac8732ed1db942caf65271cee9dd39f8b639611de161a2753e2112ffe1d + checksum: 10c0/4eef73d406c6940927479a3a9dee551e14a54faf54b31ef861250ac815172bade86cc6f7d64a4dc5e98b65e4b18a2e1c9ff3b68d296be0c748413f092bb0dd40 languageName: node linkType: hard @@ -12363,7 +12593,7 @@ __metadata: glob: "npm:^7.1.3" bin: rimraf: bin.js - checksum: 063ffaccaaaca2cfd0ef3beafb12d6a03dd7ff1260d752d62a6077b5dfff6ae81bea571f655bb6b589d366930ec1bdd285d40d560c0dae9b12f125e54eb743d5 + checksum: 10c0/9cb7757acb489bd83757ba1a274ab545eafd75598a9d817e0c3f8b164238dd90eba50d6b848bd4dcc5f3040912e882dc7ba71653e35af660d77b25c381d402e8 languageName: node linkType: hard @@ -12374,7 +12604,7 @@ __metadata: glob: "npm:^7.1.3" bin: rimraf: ./bin.js - checksum: 756419f2fa99aa119c46a9fc03e09d84ecf5421a80a72d1944c5088c9e4671e77128527a900a313ed9d3fdbdd37e2ae05486cd7e9116d5812d8c31f2399d7c86 + checksum: 10c0/f1e646f8c567795f2916aef7aadf685b543da6b9a53e482bb04b07472c7eef2b476045ba1e29f401c301c66b630b22b815ab31fdd60c5e1ae6566ff523debf45 languageName: node linkType: hard @@ -12388,7 +12618,7 @@ __metadata: optional: true bin: rollup: dist/bin/rollup - checksum: 9e39d54e23731a4c4067e9c02910cdf7479a0f9a7584796e2dc6efaa34bb1e5e015c062c87d1e64d96038baca76cefd47681ff22604fae5827147f54123dc6d0 + checksum: 10c0/65eddf84bf389ea8e4d4c1614b1c6a298d08f8ae785c0c087e723a879190c8aaddbab4aa3b8a0524551b9036750c9f8bfea27b377798accfd2ba5084ceff5aaa languageName: node linkType: hard @@ -12402,7 +12632,7 @@ __metadata: optional: true bin: rollup: dist/bin/rollup - checksum: df087b701304432f30922bbee5f534ab189aa6938bd383b5686c03147e0d00cd1789ea10a462361326ce6b6ebe448ce272ad3f3cc40b82eeb3157df12f33663c + checksum: 10c0/421418687f5dcd7324f4387f203c6bfc7118b7ace789e30f5da022471c43e037a76f5fd93837052754eeeae798a4fb266ac05ccee1e594406d912a59af98dde9 languageName: node linkType: hard @@ -12411,7 +12641,7 @@ __metadata: resolution: "run-parallel@npm:1.2.0" dependencies: queue-microtask: "npm:^1.2.2" - checksum: cb4f97ad25a75ebc11a8ef4e33bb962f8af8516bb2001082ceabd8902e15b98f4b84b4f8a9b222e5d57fc3bd1379c483886ed4619367a7680dad65316993021d + checksum: 10c0/200b5ab25b5b8b7113f9901bfe3afc347e19bb7475b267d55ad0eb86a62a46d77510cb0f232507c9e5d497ebda569a08a9867d0d14f57a82ad5564d991588b39 languageName: node linkType: hard @@ -12423,21 +12653,21 @@ __metadata: get-intrinsic: "npm:^1.2.2" has-symbols: "npm:^1.0.3" isarray: "npm:^2.0.5" - checksum: 41ac35ce46c44e2e8637b1805b0697d5269507779e3082b7afb92c01605fd73ab813bbc799510c56e300cfc941b1447fd98a338205db52db7fd1322ab32d7c9f + checksum: 10c0/833d3d950fc7507a60075f9bfaf41ec6dac7c50c7a9d62b1e6b071ecc162185881f92e594ff95c1a18301c881352dd6fd236d56999d5819559db7b92da9c28af languageName: node linkType: hard "safe-buffer@npm:5.1.2, safe-buffer@npm:~5.1.0, safe-buffer@npm:~5.1.1": version: 5.1.2 resolution: "safe-buffer@npm:5.1.2" - checksum: 7eb5b48f2ed9a594a4795677d5a150faa7eb54483b2318b568dc0c4fc94092a6cce5be02c7288a0500a156282f5276d5688bce7259299568d1053b2150ef374a + checksum: 10c0/780ba6b5d99cc9a40f7b951d47152297d0e260f0df01472a1b99d4889679a4b94a13d644f7dbc4f022572f09ae9005fa2fbb93bbbd83643316f365a3e9a45b21 languageName: node linkType: hard "safe-buffer@npm:5.2.1, safe-buffer@npm:~5.2.0": version: 5.2.1 resolution: "safe-buffer@npm:5.2.1" - checksum: 32872cd0ff68a3ddade7a7617b8f4c2ae8764d8b7d884c651b74457967a9e0e886267d3ecc781220629c44a865167b61c375d2da6c720c840ecd73f45d5d9451 + checksum: 10c0/6501914237c0a86e9675d4e51d89ca3c21ffd6a31642efeba25ad65720bce6921c9e7e974e5be91a786b25aa058b5303285d3c15dbabf983a919f5f630d349f3 languageName: node linkType: hard @@ -12448,21 +12678,21 @@ __metadata: call-bind: "npm:^1.0.6" es-errors: "npm:^1.3.0" is-regex: "npm:^1.1.4" - checksum: b04de61114b10274d92e25b6de7ccb5de07f11ea15637ff636de4b5190c0f5cd8823fe586dde718504cf78055437d70fd8804976894df502fcf5a210c970afb3 + checksum: 10c0/900bf7c98dc58f08d8523b7012b468e4eb757afa624f198902c0643d7008ba777b0bdc35810ba0b758671ce887617295fb742b3f3968991b178ceca54cb07603 languageName: node linkType: hard "safe-stable-stringify@npm:^2.3.1": version: 2.4.3 resolution: "safe-stable-stringify@npm:2.4.3" - checksum: a6c192bbefe47770a11072b51b500ed29be7b1c15095371c1ee1dc13e45ce48ee3c80330214c56764d006c485b88bd0b24940d868948170dddc16eed312582d8 + checksum: 10c0/81dede06b8f2ae794efd868b1e281e3c9000e57b39801c6c162267eb9efda17bd7a9eafa7379e1f1cacd528d4ced7c80d7460ad26f62ada7c9e01dec61b2e768 languageName: node linkType: hard "safer-buffer@npm:>= 2.1.2 < 3, safer-buffer@npm:>= 2.1.2 < 3.0.0": version: 2.1.2 resolution: "safer-buffer@npm:2.1.2" - checksum: 7eaf7a0cf37cc27b42fb3ef6a9b1df6e93a1c6d98c6c6702b02fe262d5fcbd89db63320793b99b21cb5348097d0a53de81bd5f4e8b86e20cc9412e3f1cfb4e83 + checksum: 10c0/7e3c8b2e88a1841c9671094bbaeebd94448111dd90a81a1f606f3f67708a6ec57763b3b47f06da09fc6054193e0e6709e77325415dc8422b04497a8070fa02d4 languageName: node linkType: hard @@ -12475,7 +12705,7 @@ __metadata: source-map-js: "npm:>=0.6.2 <2.0.0" bin: sass: sass.js - checksum: 51e3c667e262a80db9c80f31109dabd8d5b9a6f79e8e8aa627d83564607036ee0b13b1921d14fd317437d8cf7030d7c8cf1c3b7e11b1f4537a4a4029f6cb63a3 + checksum: 10c0/59d79a6e106747746792b0c71908ae0aecdaf9b794d5724ee64e5249412f0d8ebe7ee2bf12946618848f14f949c4f6b530d82da3e62ab31c71198c6f73002130 languageName: node linkType: hard @@ -12484,7 +12714,7 @@ __metadata: resolution: "saxes@npm:6.0.0" dependencies: xmlchars: "npm:^2.2.0" - checksum: 97b50daf6ca3a153e89842efa18a862e446248296622b7473c169c84c823ee8a16e4a43bac2f73f11fc8cb9168c73fbb0d73340f26552bac17970e9052367aa9 + checksum: 10c0/3847b839f060ef3476eb8623d099aa502ad658f5c40fd60c105ebce86d244389b0d76fcae30f4d0c728d7705ceb2f7e9b34bb54717b6a7dbedaf5dad2d9a4b74 languageName: node linkType: hard @@ -12493,7 +12723,7 @@ __metadata: resolution: "scheduler@npm:0.23.0" dependencies: loose-envify: "npm:^1.1.0" - checksum: 0c4557aa37bafca44ff21dc0ea7c92e2dbcb298bc62eae92b29a39b029134f02fb23917d6ebc8b1fa536b4184934314c20d8864d156a9f6357f3398aaf7bfda8 + checksum: 10c0/b777f7ca0115e6d93e126ac490dbd82642d14983b3079f58f35519d992fa46260be7d6e6cede433a92db70306310c6f5f06e144f0e40c484199e09c1f7be53dd languageName: node linkType: hard @@ -12502,7 +12732,7 @@ __metadata: resolution: "semver@npm:5.7.2" bin: semver: bin/semver - checksum: fca14418a174d4b4ef1fecb32c5941e3412d52a4d3d85165924ce3a47fbc7073372c26faf7484ceb4bbc2bde25880c6b97e492473dc7e9708fdfb1c6a02d546e + checksum: 10c0/e4cf10f86f168db772ae95d86ba65b3fd6c5967c94d97c708ccb463b778c2ee53b914cd7167620950fc07faf5a564e6efe903836639e512a1aa15fbc9667fa25 languageName: node linkType: hard @@ -12511,7 +12741,7 @@ __metadata: resolution: "semver@npm:6.3.1" bin: semver: bin/semver.js - checksum: 1ef3a85bd02a760c6ef76a45b8c1ce18226de40831e02a00bad78485390b98b6ccaa31046245fc63bba4a47a6a592b6c7eedc65cc47126e60489f9cc1ce3ed7e + checksum: 10c0/e3d79b609071caa78bcb6ce2ad81c7966a46a7431d9d58b8800cfa9cb6a63699b3899a0e4bcce36167a284578212d9ae6942b6929ba4aa5015c079a67751d42d languageName: node linkType: hard @@ -12522,7 +12752,7 @@ __metadata: lru-cache: "npm:^6.0.0" bin: semver: bin/semver.js - checksum: 1b41018df2d8aca5a1db4729985e8e20428c650daea60fcd16e926e9383217d00f574fab92d79612771884a98d2ee2a1973f49d630829a8d54d6570defe62535 + checksum: 10c0/fbfe717094ace0aa8d6332d7ef5ce727259815bd8d8815700853f4faf23aacbd7192522f0dc5af6df52ef4fa85a355ebd2f5d39f554bd028200d6cf481ab9b53 languageName: node linkType: hard @@ -12543,7 +12773,7 @@ __metadata: on-finished: "npm:2.4.1" range-parser: "npm:~1.2.1" statuses: "npm:2.0.1" - checksum: ec66c0ad109680ad8141d507677cfd8b4e40b9559de23191871803ed241718e99026faa46c398dcfb9250676076573bd6bfe5d0ec347f88f4b7b8533d1d391cb + checksum: 10c0/0eb134d6a51fc13bbcb976a1f4214ea1e33f242fae046efc311e80aff66c7a43603e26a79d9d06670283a13000e51be6e0a2cb80ff0942eaf9f1cd30b7ae736a languageName: node linkType: hard @@ -12555,7 +12785,7 @@ __metadata: escape-html: "npm:~1.0.3" parseurl: "npm:~1.3.3" send: "npm:0.18.0" - checksum: 699b2d4c29807a51d9b5e0f24955346911437aebb0178b3c4833ad30d3eca93385ff9927254f5c16da345903cad39d9cd4a532198c95a5129cc4ed43911b15a4 + checksum: 10c0/fa9f0e21a540a28f301258dfe1e57bb4f81cd460d28f0e973860477dd4acef946a1f41748b5bd41c73b621bea2029569c935faa38578fd34cd42a9b4947088ba languageName: node linkType: hard @@ -12569,7 +12799,7 @@ __metadata: get-intrinsic: "npm:^1.2.3" gopd: "npm:^1.0.1" has-property-descriptors: "npm:^1.0.1" - checksum: 9ab1d200149574ab27c1a7acae56d6235e02568fc68655fe8afe63e4e02ccad3c27665f55c32408bd1ff40705939dbb7539abfb9c3a07fda27ecad1ab9e449f5 + checksum: 10c0/1927e296599f2c04d210c1911f1600430a5e49e04a6d8bb03dca5487b95a574da9968813a2ced9a774bd3e188d4a6208352c8f64b8d4674cdb021dca21e190ca languageName: node linkType: hard @@ -12581,14 +12811,14 @@ __metadata: es-errors: "npm:^1.3.0" functions-have-names: "npm:^1.2.3" has-property-descriptors: "npm:^1.0.2" - checksum: c7614154a53ebf8c0428a6c40a3b0b47dac30587c1a19703d1b75f003803f73cdfa6a93474a9ba678fa565ef5fbddc2fae79bca03b7d22ab5fd5163dbe571a74 + checksum: 10c0/fce59f90696c450a8523e754abb305e2b8c73586452619c2bad5f7bf38c7b6b4651895c9db895679c5bef9554339cf3ef1c329b66ece3eda7255785fbe299316 languageName: node linkType: hard "setprototypeof@npm:1.2.0": version: 1.2.0 resolution: "setprototypeof@npm:1.2.0" - checksum: fde1630422502fbbc19e6844346778f99d449986b2f9cdcceb8326730d2f3d9964dbcb03c02aaadaefffecd0f2c063315ebea8b3ad895914bf1afc1747fc172e + checksum: 10c0/68733173026766fa0d9ecaeb07f0483f4c2dc70ca376b3b7c40b7cda909f94b0918f6c5ad5ce27a9160bdfb475efaa9d5e705a11d8eaae18f9835d20976028bc languageName: node linkType: hard @@ -12597,7 +12827,7 @@ __metadata: resolution: "shallow-clone@npm:3.0.1" dependencies: kind-of: "npm:^6.0.2" - checksum: e066bd540cfec5e1b0f78134853e0d892d1c8945fb9a926a579946052e7cb0c70ca4fc34f875a8083aa7910d751805d36ae64af250a6de6f3d28f9fa7be6c21b + checksum: 10c0/7bab09613a1b9f480c85a9823aebec533015579fa055ba6634aa56ba1f984380670eaf33b8217502931872aa1401c9fcadaa15f9f604d631536df475b05bcf1e languageName: node linkType: hard @@ -12606,14 +12836,14 @@ __metadata: resolution: "shebang-command@npm:2.0.0" dependencies: shebang-regex: "npm:^3.0.0" - checksum: 6b52fe87271c12968f6a054e60f6bde5f0f3d2db483a1e5c3e12d657c488a15474121a1d55cd958f6df026a54374ec38a4a963988c213b7570e1d51575cea7fa + checksum: 10c0/a41692e7d89a553ef21d324a5cceb5f686d1f3c040759c50aab69688634688c5c327f26f3ecf7001ebfd78c01f3c7c0a11a7c8bfd0a8bc9f6240d4f40b224e4e languageName: node linkType: hard "shebang-regex@npm:^3.0.0": version: 3.0.0 resolution: "shebang-regex@npm:3.0.0" - checksum: 1a2bcae50de99034fcd92ad4212d8e01eedf52c7ec7830eedcf886622804fe36884278f2be8be0ea5fde3fd1c23911643a4e0f726c8685b61871c8908af01222 + checksum: 10c0/1dbed0726dd0e1152a92696c76c7f06084eb32a90f0528d11acd764043aacf76994b2fb30aa1291a21bd019d6699164d048286309a278855ee7bec06cf6fb690 languageName: node linkType: hard @@ -12625,21 +12855,21 @@ __metadata: es-errors: "npm:^1.3.0" get-intrinsic: "npm:^1.2.4" object-inspect: "npm:^1.13.1" - checksum: 27708b70b5d81bf18dc8cc23f38f1b6c9511691a64abc4aaf17956e67d132c855cf8b46f931e2fc5a6262b29371eb60da7755c1b9f4f862eccea8562b469f8f6 + checksum: 10c0/31312fecb68997ce2893b1f6d1fd07d6dd41e05cc938e82004f056f7de96dd9df599ef9418acdf730dda948e867e933114bd2efe4170c0146d1ed7009700c252 languageName: node linkType: hard "signal-exit@npm:^3.0.2, signal-exit@npm:^3.0.3, signal-exit@npm:^3.0.7": version: 3.0.7 resolution: "signal-exit@npm:3.0.7" - checksum: a2f098f247adc367dffc27845853e9959b9e88b01cb301658cfe4194352d8d2bb32e18467c786a7fe15f1d44b233ea35633d076d5e737870b7139949d1ab6318 + checksum: 10c0/25d272fa73e146048565e08f3309d5b942c1979a6f4a58a8c59d5fa299728e9c2fcd1a759ec870863b1fd38653670240cd420dad2ad9330c71f36608a6a1c912 languageName: node linkType: hard "signal-exit@npm:^4.0.1, signal-exit@npm:^4.1.0": version: 4.1.0 resolution: "signal-exit@npm:4.1.0" - checksum: c9fa63bbbd7431066174a48ba2dd9986dfd930c3a8b59de9c29d7b6854ec1c12a80d15310869ea5166d413b99f041bfa3dd80a7947bcd44ea8e6eb3ffeabfa1f + checksum: 10c0/41602dce540e46d599edba9d9860193398d135f7ff72cab629db5171516cfae628d21e7bfccde1bbfdf11c48726bc2a6d1a8fb8701125852fbfda7cf19c6aa83 languageName: node linkType: hard @@ -12648,28 +12878,48 @@ __metadata: resolution: "simple-swizzle@npm:0.2.2" dependencies: is-arrayish: "npm:^0.3.1" - checksum: c6dffff17aaa383dae7e5c056fbf10cf9855a9f79949f20ee225c04f06ddde56323600e0f3d6797e82d08d006e93761122527438ee9531620031c08c9e0d73cc + checksum: 10c0/df5e4662a8c750bdba69af4e8263c5d96fe4cd0f9fe4bdfa3cbdeb45d2e869dff640beaaeb1ef0e99db4d8d2ec92f85508c269f50c972174851bc1ae5bd64308 languageName: node linkType: hard "sisteransi@npm:^1.0.5": version: 1.0.5 resolution: "sisteransi@npm:1.0.5" - checksum: aba6438f46d2bfcef94cf112c835ab395172c75f67453fe05c340c770d3c402363018ae1ab4172a1026a90c47eaccf3af7b6ff6fa749a680c2929bd7fa2b37a4 + checksum: 10c0/230ac975cca485b7f6fe2b96a711aa62a6a26ead3e6fb8ba17c5a00d61b8bed0d7adc21f5626b70d7c33c62ff4e63933017a6462942c719d1980bb0b1207ad46 languageName: node linkType: hard "slash@npm:^3.0.0": version: 3.0.0 resolution: "slash@npm:3.0.0" - checksum: 94a93fff615f25a999ad4b83c9d5e257a7280c90a32a7cb8b4a87996e4babf322e469c42b7f649fd5796edd8687652f3fb452a86dc97a816f01113183393f11c + checksum: 10c0/e18488c6a42bdfd4ac5be85b2ced3ccd0224773baae6ad42cfbb9ec74fc07f9fa8396bd35ee638084ead7a2a0818eb5e7151111544d4731ce843019dab4be47b + languageName: node + linkType: hard + +"slice-ansi@npm:^5.0.0": + version: 5.0.0 + resolution: "slice-ansi@npm:5.0.0" + dependencies: + ansi-styles: "npm:^6.0.0" + is-fullwidth-code-point: "npm:^4.0.0" + checksum: 10c0/2d4d40b2a9d5cf4e8caae3f698fe24ae31a4d778701724f578e984dcb485ec8c49f0c04dab59c401821e80fcdfe89cace9c66693b0244e40ec485d72e543914f + languageName: node + linkType: hard + +"slice-ansi@npm:^7.1.0": + version: 7.1.0 + resolution: "slice-ansi@npm:7.1.0" + dependencies: + ansi-styles: "npm:^6.2.1" + is-fullwidth-code-point: "npm:^5.0.0" + checksum: 10c0/631c971d4abf56cf880f034d43fcc44ff883624867bf11ecbd538c47343911d734a4656d7bc02362b40b89d765652a7f935595441e519b59e2ad3f4d5d6fe7ca languageName: node linkType: hard "smart-buffer@npm:^4.2.0": version: 4.2.0 resolution: "smart-buffer@npm:4.2.0" - checksum: 927484aa0b1640fd9473cee3e0a0bcad6fce93fd7bbc18bac9ad0c33686f5d2e2c422fba24b5899c184524af01e11dd2bd051c2bf2b07e47aff8ca72cbfc60d2 + checksum: 10c0/a16775323e1404dd43fabafe7460be13a471e021637bc7889468eb45ce6a6b207261f454e4e530a19500cc962c4cc5348583520843b363f4193cee5c00e1e539 languageName: node linkType: hard @@ -12680,7 +12930,7 @@ __metadata: agent-base: "npm:^7.0.2" debug: "npm:^4.3.4" socks: "npm:^2.7.1" - checksum: ea727734bd5b2567597aa0eda14149b3b9674bb44df5937bbb9815280c1586994de734d965e61f1dd45661183d7b41f115fb9e432d631287c9063864cfcc2ecc + checksum: 10c0/a842402fc9b8848a31367f2811ca3cd14c4106588b39a0901cd7a69029998adfc6456b0203617c18ed090542ad0c24ee4e9d4c75a0c4b75071e214227c177eb7 languageName: node linkType: hard @@ -12690,14 +12940,14 @@ __metadata: dependencies: ip-address: "npm:^9.0.5" smart-buffer: "npm:^4.2.0" - checksum: a3cc38e0716ab53a2db3fa00c703ca682ad54dbbc9ed4c7461624a999be6fa7cdc79fc904c411618e698d5eff55a55aa6d9329169a7db11636d0200814a2b5aa + checksum: 10c0/ac77b515c260473cc7c4452f09b20939e22510ce3ae48385c516d1d5784374d5cc75be3cb18ff66cc985a7f4f2ef8fef84e984c5ec70aad58355ed59241f40a8 languageName: node linkType: hard "source-map-js@npm:>=0.6.2 <2.0.0, source-map-js@npm:^1.0.2": version: 1.0.2 resolution: "source-map-js@npm:1.0.2" - checksum: 38e2d2dd18d2e331522001fc51b54127ef4a5d473f53b1349c5cca2123562400e0986648b52e9407e348eaaed53bce49248b6e2641e6d793ca57cb2c360d6d51 + checksum: 10c0/32f2dfd1e9b7168f9a9715eb1b4e21905850f3b50cf02cf476e47e4eebe8e6b762b63a64357896aa29b37e24922b4282df0f492e0d2ace572b43d15525976ff8 languageName: node linkType: hard @@ -12707,7 +12957,7 @@ __metadata: dependencies: buffer-from: "npm:^1.0.0" source-map: "npm:^0.6.0" - checksum: d1514a922ac9c7e4786037eeff6c3322f461cd25da34bb9fefb15387b3490531774e6e31d95ab6d5b84a3e139af9c3a570ccaee6b47bd7ea262691ed3a8bc34e + checksum: 10c0/137539f8c453fa0f496ea42049ab5da4569f96781f6ac8e5bfda26937be9494f4e8891f523c5f98f0e85f71b35d74127a00c46f83f6a4f54672b58d53202565e languageName: node linkType: hard @@ -12717,28 +12967,28 @@ __metadata: dependencies: buffer-from: "npm:^1.0.0" source-map: "npm:^0.6.0" - checksum: 8317e12d84019b31e34b86d483dd41d6f832f389f7417faf8fc5c75a66a12d9686e47f589a0554a868b8482f037e23df9d040d29387eb16fa14cb85f091ba207 + checksum: 10c0/9ee09942f415e0f721d6daad3917ec1516af746a8120bba7bb56278707a37f1eb8642bde456e98454b8a885023af81a16e646869975f06afc1a711fb90484e7d languageName: node linkType: hard "source-map@npm:^0.6.0, source-map@npm:^0.6.1, source-map@npm:~0.6.1": version: 0.6.1 resolution: "source-map@npm:0.6.1" - checksum: 59ef7462f1c29d502b3057e822cdbdae0b0e565302c4dd1a95e11e793d8d9d62006cdc10e0fd99163ca33ff2071360cf50ee13f90440806e7ed57d81cba2f7ff + checksum: 10c0/ab55398007c5e5532957cb0beee2368529618ac0ab372d789806f5718123cc4367d57de3904b4e6a4170eb5a0b0f41373066d02ca0735a0c4d75c7d328d3e011 languageName: node linkType: hard "space-separated-tokens@npm:^1.0.0": version: 1.1.5 resolution: "space-separated-tokens@npm:1.1.5" - checksum: 8ef68f1cfa8ccad316b7f8d0df0919d0f1f6d32101e8faeee34ea3a923ce8509c1ad562f57388585ee4951e92d27afa211ed0a077d3d5995b5ba9180331be708 + checksum: 10c0/3ee0a6905f89e1ffdfe474124b1ade9fe97276a377a0b01350bc079b6ec566eb5b219e26064cc5b7f3899c05bde51ffbc9154290b96eaf82916a1e2c2c13ead9 languageName: node linkType: hard "space-separated-tokens@npm:^2.0.0": version: 2.0.2 resolution: "space-separated-tokens@npm:2.0.2" - checksum: 202e97d7ca1ba0758a0aa4fe226ff98142073bcceeff2da3aad037968878552c3bbce3b3231970025375bbba5aee00c5b8206eda408da837ab2dc9c0f26be990 + checksum: 10c0/6173e1d903dca41dcab6a2deed8b4caf61bd13b6d7af8374713500570aa929ff9414ae09a0519f4f8772df993300305a395d4871f35bc4ca72b6db57e1f30af8 languageName: node linkType: hard @@ -12748,14 +12998,14 @@ __metadata: dependencies: spdx-expression-parse: "npm:^3.0.0" spdx-license-ids: "npm:^3.0.0" - checksum: cc2e4dbef822f6d12142116557d63f5facf3300e92a6bd24e907e4865e17b7e1abd0ee6b67f305cae6790fc2194175a24dc394bfcc01eea84e2bdad728e9ae9a + checksum: 10c0/49208f008618b9119208b0dadc9208a3a55053f4fd6a0ae8116861bd22696fc50f4142a35ebfdb389e05ccf2de8ad142573fefc9e26f670522d899f7b2fe7386 languageName: node linkType: hard "spdx-exceptions@npm:^2.1.0": version: 2.5.0 resolution: "spdx-exceptions@npm:2.5.0" - checksum: bb127d6e2532de65b912f7c99fc66097cdea7d64c10d3ec9b5e96524dbbd7d20e01cba818a6ddb2ae75e62bb0c63d5e277a7e555a85cbc8ab40044984fa4ae15 + checksum: 10c0/37217b7762ee0ea0d8b7d0c29fd48b7e4dfb94096b109d6255b589c561f57da93bf4e328c0290046115961b9209a8051ad9f525e48d433082fc79f496a4ea940 languageName: node linkType: hard @@ -12765,28 +13015,28 @@ __metadata: dependencies: spdx-exceptions: "npm:^2.1.0" spdx-license-ids: "npm:^3.0.0" - checksum: a1c6e104a2cbada7a593eaa9f430bd5e148ef5290d4c0409899855ce8b1c39652bcc88a725259491a82601159d6dc790bedefc9016c7472f7de8de7361f8ccde + checksum: 10c0/6f8a41c87759fa184a58713b86c6a8b028250f158159f1d03ed9d1b6ee4d9eefdc74181c8ddc581a341aa971c3e7b79e30b59c23b05d2436d5de1c30bdef7171 languageName: node linkType: hard "spdx-license-ids@npm:^3.0.0": version: 3.0.17 resolution: "spdx-license-ids@npm:3.0.17" - checksum: 8f6c6ae02ebb25b4ca658b8990d9e8a8f8d8a95e1d8b9fd84d87eed80a7dc8f8073d6a8d50b8a0295c0e8399e1f8814f5c00e2985e6bf3731540a16f7241cbf1 + checksum: 10c0/ddf9477b5afc70f1a7d3bf91f0b8e8a1c1b0fa65d2d9a8b5c991b1a2ba91b693d8b9749700119d5ce7f3fbf307ac421087ff43d321db472605e98a5804f80eac languageName: node linkType: hard "sprintf-js@npm:^1.1.3": version: 1.1.3 resolution: "sprintf-js@npm:1.1.3" - checksum: e7587128c423f7e43cc625fe2f87e6affdf5ca51c1cc468e910d8aaca46bb44a7fbcfa552f787b1d3987f7043aeb4527d1b99559e6621e01b42b3f45e5a24cbb + checksum: 10c0/09270dc4f30d479e666aee820eacd9e464215cdff53848b443964202bf4051490538e5dd1b42e1a65cf7296916ca17640aebf63dae9812749c7542ee5f288dec languageName: node linkType: hard "sprintf-js@npm:~1.0.2": version: 1.0.3 resolution: "sprintf-js@npm:1.0.3" - checksum: c34828732ab8509c2741e5fd1af6b767c3daf2c642f267788f933a65b1614943c282e74c4284f4fa749c264b18ee016a0d37a3e5b73aee446da46277d3a85daa + checksum: 10c0/ecadcfe4c771890140da5023d43e190b7566d9cf8b2d238600f31bec0fc653f328da4450eb04bd59a431771a8e9cc0e118f0aa3974b683a4981b4e07abc2a5bb languageName: node linkType: hard @@ -12795,21 +13045,21 @@ __metadata: resolution: "ssri@npm:10.0.5" dependencies: minipass: "npm:^7.0.3" - checksum: 453f9a1c241c13f5dfceca2ab7b4687bcff354c3ccbc932f35452687b9ef0ccf8983fd13b8a3baa5844c1a4882d6e3ddff48b0e7fd21d743809ef33b80616d79 + checksum: 10c0/b091f2ae92474183c7ac5ed3f9811457e1df23df7a7e70c9476eaa9a0c4a0c8fc190fb45acefbf023ca9ee864dd6754237a697dc52a0fb182afe65d8e77443d8 languageName: node linkType: hard "stable@npm:^0.1.8": version: 0.1.8 resolution: "stable@npm:0.1.8" - checksum: 2ff482bb100285d16dd75cd8f7c60ab652570e8952c0bfa91828a2b5f646a0ff533f14596ea4eabd48bb7f4aeea408dce8f8515812b975d958a4cc4fa6b9dfeb + checksum: 10c0/df74b5883075076e78f8e365e4068ecd977af6c09da510cfc3148a303d4b87bc9aa8f7c48feb67ed4ef970b6140bd9eabba2129e28024aa88df5ea0114cba39d languageName: node linkType: hard "stack-trace@npm:0.0.x": version: 0.0.10 resolution: "stack-trace@npm:0.0.10" - checksum: 7bd633f0e9ac46e81a0b0fe6538482c1d77031959cf94478228731709db4672fbbed59176f5b9a9fd89fec656b5dae03d084ef2d1b0c4c2f5683e05f2dbb1405 + checksum: 10c0/9ff3dabfad4049b635a85456f927a075c9d0c210e3ea336412d18220b2a86cbb9b13ec46d6c37b70a302a4ea4d49e30e5d4944dd60ae784073f1cde778ac8f4b languageName: node linkType: hard @@ -12818,14 +13068,14 @@ __metadata: resolution: "stack-utils@npm:2.0.6" dependencies: escape-string-regexp: "npm:^2.0.0" - checksum: cdc988acbc99075b4b036ac6014e5f1e9afa7e564482b687da6384eee6a1909d7eaffde85b0a17ffbe186c5247faf6c2b7544e802109f63b72c7be69b13151bb + checksum: 10c0/651c9f87667e077584bbe848acaecc6049bc71979f1e9a46c7b920cad4431c388df0f51b8ad7cfd6eed3db97a2878d0fc8b3122979439ea8bac29c61c95eec8a languageName: node linkType: hard "statuses@npm:2.0.1": version: 2.0.1 resolution: "statuses@npm:2.0.1" - checksum: 18c7623fdb8f646fb213ca4051be4df7efb3484d4ab662937ca6fbef7ced9b9e12842709872eb3020cc3504b93bde88935c9f6417489627a7786f24f8031cbcb + checksum: 10c0/34378b207a1620a24804ce8b5d230fea0c279f00b18a7209646d5d47e419d1cc23e7cbf33a25a1e51ac38973dc2ac2e1e9c647a8e481ef365f77668d72becfd0 languageName: node linkType: hard @@ -12834,14 +13084,14 @@ __metadata: resolution: "stop-iteration-iterator@npm:1.0.0" dependencies: internal-slot: "npm:^1.0.4" - checksum: 2a23a36f4f6bfa63f46ae2d53a3f80fe8276110b95a55345d8ed3d92125413494033bc8697eb774e8f7aeb5725f70e3d69753caa2ecacdac6258c16fa8aa8b0f + checksum: 10c0/c4158d6188aac510d9e92925b58709207bd94699e9c31186a040c80932a687f84a51356b5895e6dc72710aad83addb9411c22171832c9ae0e6e11b7d61b0dfb9 languageName: node linkType: hard "store2@npm:^2.14.2": version: 2.14.3 resolution: "store2@npm:2.14.3" - checksum: f95f6fbacff14cc3bb9e5e16ced2f29e2d706e30b248c16cf19abed8b2bb31d8f3907c8ccf1a5284d806fdcaf06e96710e4f4f52195e51522a452536beaf7af9 + checksum: 10c0/22e1096e6d69590672ca0b7f891d82b060837ef4c3e5df0d4563e6cbed14c52ddf2589fa94b79f4311b6ec41d95d6142e5d01d194539e0175c3fb4090cca8244 languageName: node linkType: hard @@ -12853,14 +13103,21 @@ __metadata: bin: sb: ./index.js storybook: ./index.js - checksum: 8809d72714322b32c4b03724b6dd4e01d0a282e7b8dca590bfc89e79e34c6bb82869d9dbb6c6c6c39381144e508f6b74ff8a605480fb4d914e30d5116a51f5ef + checksum: 10c0/256b8ff26b69f622889488605e786c0742350a901037139dd469ec20f2e7031c326d65f2a202a5ee7baa407ff407a6746af2f01d91c0c617eda2013679a65271 languageName: node linkType: hard "stream-shift@npm:^1.0.0": version: 1.0.3 resolution: "stream-shift@npm:1.0.3" - checksum: a24c0a3f66a8f9024bd1d579a533a53be283b4475d4e6b4b3211b964031447bdf6532dd1f3c2b0ad66752554391b7c62bd7ca4559193381f766534e723d50242 + checksum: 10c0/939cd1051ca750d240a0625b106a2b988c45fb5a3be0cebe9a9858cb01bc1955e8c7b9fac17a9462976bea4a7b704e317c5c2200c70f0ca715a3363b9aa4fd3b + languageName: node + linkType: hard + +"string-argv@npm:~0.3.2": + version: 0.3.2 + resolution: "string-argv@npm:0.3.2" + checksum: 10c0/75c02a83759ad1722e040b86823909d9a2fc75d15dd71ec4b537c3560746e33b5f5a07f7332d1e3f88319909f82190843aa2f0a0d8c8d591ec08e93d5b8dec82 languageName: node linkType: hard @@ -12870,7 +13127,7 @@ __metadata: dependencies: char-regex: "npm:^1.0.2" strip-ansi: "npm:^6.0.0" - checksum: ce85533ef5113fcb7e522bcf9e62cb33871aa99b3729cec5595f4447f660b0cefd542ca6df4150c97a677d58b0cb727a3fe09ac1de94071d05526c73579bf505 + checksum: 10c0/1cd77409c3d7db7bc59406f6bcc9ef0783671dcbabb23597a1177c166906ef2ee7c8290f78cae73a8aec858768f189d2cb417797df5e15ec4eb5e16b3346340c languageName: node linkType: hard @@ -12881,7 +13138,7 @@ __metadata: emoji-regex: "npm:^8.0.0" is-fullwidth-code-point: "npm:^3.0.0" strip-ansi: "npm:^6.0.1" - checksum: e52c10dc3fbfcd6c3a15f159f54a90024241d0f149cf8aed2982a2d801d2e64df0bf1dc351cf8e95c3319323f9f220c16e740b06faecd53e2462df1d2b5443fb + checksum: 10c0/1e525e92e5eae0afd7454086eed9c818ee84374bb80328fc41217ae72ff5f065ef1c9d7f72da41de40c75fa8bb3dee63d92373fd492c84260a552c636392a47b languageName: node linkType: hard @@ -12892,7 +13149,18 @@ __metadata: eastasianwidth: "npm:^0.2.0" emoji-regex: "npm:^9.2.2" strip-ansi: "npm:^7.0.1" - checksum: 7369deaa29f21dda9a438686154b62c2c5f661f8dda60449088f9f980196f7908fc39fdd1803e3e01541970287cf5deae336798337e9319a7055af89dafa7193 + checksum: 10c0/ab9c4264443d35b8b923cbdd513a089a60de339216d3b0ed3be3ba57d6880e1a192b70ae17225f764d7adbf5994e9bb8df253a944736c15a0240eff553c678ca + languageName: node + linkType: hard + +"string-width@npm:^7.0.0": + version: 7.2.0 + resolution: "string-width@npm:7.2.0" + dependencies: + emoji-regex: "npm:^10.3.0" + get-east-asian-width: "npm:^1.0.0" + strip-ansi: "npm:^7.1.0" + checksum: 10c0/eb0430dd43f3199c7a46dcbf7a0b34539c76fe3aa62763d0b0655acdcbdf360b3f66f3d58ca25ba0205f42ea3491fa00f09426d3b7d3040e506878fc7664c9b9 languageName: node linkType: hard @@ -12909,7 +13177,7 @@ __metadata: regexp.prototype.flags: "npm:^1.5.0" set-function-name: "npm:^2.0.0" side-channel: "npm:^1.0.4" - checksum: 0f7a1a7f91790cd45f804039a16bc6389c8f4f25903e648caa3eea080b019a5c7b0cac2ca83976646140c2332b159042140bf389f23675609d869dd52450cddc + checksum: 10c0/cd7495fb0de16d43efeee3887b98701941f3817bd5f09351ad1825b023d307720c86394d56d56380563d97767ab25bf5448db239fcecbb85c28e2180f23e324a languageName: node linkType: hard @@ -12920,7 +13188,7 @@ __metadata: call-bind: "npm:^1.0.2" define-properties: "npm:^1.2.0" es-abstract: "npm:^1.22.1" - checksum: 9301f6cb2b6c44f069adde1b50f4048915985170a20a1d64cf7cb2dc53c5cd6b9525b92431f1257f894f94892d6c4ae19b5aa7f577c3589e7e51772dffc9d5a4 + checksum: 10c0/4f76c583908bcde9a71208ddff38f67f24c9ec8093631601666a0df8b52fad44dad2368c78895ce83eb2ae8e7068294cc96a02fc971ab234e4d5c9bb61ea4e34 languageName: node linkType: hard @@ -12931,7 +13199,7 @@ __metadata: call-bind: "npm:^1.0.2" define-properties: "npm:^1.2.0" es-abstract: "npm:^1.22.1" - checksum: 3f0d3397ab9bd95cd98ae2fe0943bd3e7b63d333c2ab88f1875cf2e7c958c75dc3355f6fe19ee7c8fca28de6f39f2475e955e103821feb41299a2764a7463ffa + checksum: 10c0/53c24911c7c4d8d65f5ef5322de23a3d5b6b4db73273e05871d5ab4571ae5638f38f7f19d71d09116578fb060e5a145cc6a208af2d248c8baf7a34f44d32ce57 languageName: node linkType: hard @@ -12942,7 +13210,7 @@ __metadata: call-bind: "npm:^1.0.2" define-properties: "npm:^1.2.0" es-abstract: "npm:^1.22.1" - checksum: 6e594d3a61b127d243b8be1312e9f78683abe452cfe0bcafa3e0dc62ad6f030ccfb64d87ed3086fb7cb540fda62442c164d237cc5cc4d53c6e3eb659c29a0aeb + checksum: 10c0/0bcf391b41ea16d4fda9c9953d0a7075171fe090d33b4cf64849af94944c50862995672ac03e0c5dba2940a213ad7f53515a668dac859ce22a0276289ae5cf4f languageName: node linkType: hard @@ -12951,7 +13219,7 @@ __metadata: resolution: "string_decoder@npm:1.3.0" dependencies: safe-buffer: "npm:~5.2.0" - checksum: 54d23f4a6acae0e93f999a585e673be9e561b65cd4cca37714af1e893ab8cd8dfa52a9e4f58f48f87b4a44918d3a9254326cb80ed194bf2e4c226e2b21767e56 + checksum: 10c0/810614ddb030e271cd591935dcd5956b2410dd079d64ff92a1844d6b7588bf992b3e1b69b0f4d34a3e06e0bd73046ac646b5264c1987b20d0601f81ef35d731d languageName: node linkType: hard @@ -12960,7 +13228,7 @@ __metadata: resolution: "string_decoder@npm:1.1.1" dependencies: safe-buffer: "npm:~5.1.0" - checksum: 7c41c17ed4dea105231f6df208002ebddd732e8e9e2d619d133cecd8e0087ddfd9587d2feb3c8caf3213cbd841ada6d057f5142cae68a4e62d3540778d9819b4 + checksum: 10c0/b4f89f3a92fd101b5653ca3c99550e07bdf9e13b35037e9e2a1c7b47cec4e55e06ff3fc468e314a0b5e80bfbaf65c1ca5a84978764884ae9413bec1fc6ca924e languageName: node linkType: hard @@ -12969,44 +13237,44 @@ __metadata: resolution: "strip-ansi@npm:6.0.1" dependencies: ansi-regex: "npm:^5.0.1" - checksum: ae3b5436d34fadeb6096367626ce987057713c566e1e7768818797e00ac5d62023d0f198c4e681eae9e20701721980b26a64a8f5b91238869592a9c6800719a2 + checksum: 10c0/1ae5f212a126fe5b167707f716942490e3933085a5ff6c008ab97ab2f272c8025d3aa218b7bd6ab25729ca20cc81cddb252102f8751e13482a5199e873680952 languageName: node linkType: hard -"strip-ansi@npm:^7.0.1": +"strip-ansi@npm:^7.0.1, strip-ansi@npm:^7.1.0": version: 7.1.0 resolution: "strip-ansi@npm:7.1.0" dependencies: ansi-regex: "npm:^6.0.1" - checksum: 475f53e9c44375d6e72807284024ac5d668ee1d06010740dec0b9744f2ddf47de8d7151f80e5f6190fc8f384e802fdf9504b76a7e9020c9faee7103623338be2 + checksum: 10c0/a198c3762e8832505328cbf9e8c8381de14a4fa50a4f9b2160138158ea88c0f5549fb50cb13c651c3088f47e63a108b34622ec18c0499b6c8c3a5ddf6b305ac4 languageName: node linkType: hard "strip-bom@npm:^3.0.0": version: 3.0.0 resolution: "strip-bom@npm:3.0.0" - checksum: 8d50ff27b7ebe5ecc78f1fe1e00fcdff7af014e73cf724b46fb81ef889eeb1015fc5184b64e81a2efe002180f3ba431bdd77e300da5c6685d702780fbf0c8d5b + checksum: 10c0/51201f50e021ef16672593d7434ca239441b7b760e905d9f33df6e4f3954ff54ec0e0a06f100d028af0982d6f25c35cd5cda2ce34eaebccd0250b8befb90d8f1 languageName: node linkType: hard "strip-bom@npm:^4.0.0": version: 4.0.0 resolution: "strip-bom@npm:4.0.0" - checksum: 9dbcfbaf503c57c06af15fe2c8176fb1bf3af5ff65003851a102749f875a6dbe0ab3b30115eccf6e805e9d756830d3e40ec508b62b3f1ddf3761a20ebe29d3f3 + checksum: 10c0/26abad1172d6bc48985ab9a5f96c21e440f6e7e476686de49be813b5a59b3566dccb5c525b831ec54fe348283b47f3ffb8e080bc3f965fde12e84df23f6bb7ef languageName: node linkType: hard "strip-final-newline@npm:^2.0.0": version: 2.0.0 resolution: "strip-final-newline@npm:2.0.0" - checksum: 69412b5e25731e1938184b5d489c32e340605bb611d6140344abc3421b7f3c6f9984b21dff296dfcf056681b82caa3bb4cc996a965ce37bcfad663e92eae9c64 + checksum: 10c0/bddf8ccd47acd85c0e09ad7375409d81653f645fda13227a9d459642277c253d877b68f2e5e4d819fe75733b0e626bac7e954c04f3236f6d196f79c94fa4a96f languageName: node linkType: hard "strip-final-newline@npm:^3.0.0": version: 3.0.0 resolution: "strip-final-newline@npm:3.0.0" - checksum: 23ee263adfa2070cd0f23d1ac14e2ed2f000c9b44229aec9c799f1367ec001478469560abefd00c5c99ee6f0b31c137d53ec6029c53e9f32a93804e18c201050 + checksum: 10c0/a771a17901427bac6293fd416db7577e2bc1c34a19d38351e9d5478c3c415f523f391003b42ed475f27e33a78233035df183525395f731d3bfb8cdcbd4da08ce languageName: node linkType: hard @@ -13015,7 +13283,7 @@ __metadata: resolution: "strip-indent@npm:3.0.0" dependencies: min-indent: "npm:^1.0.0" - checksum: 18f045d57d9d0d90cd16f72b2313d6364fd2cb4bf85b9f593523ad431c8720011a4d5f08b6591c9d580f446e78855c5334a30fb91aa1560f5d9f95ed1b4a0530 + checksum: 10c0/ae0deaf41c8d1001c5d4fbe16cb553865c1863da4fae036683b474fa926af9fc121e155cb3fc57a68262b2ae7d5b8420aa752c97a6428c315d00efe2a3875679 languageName: node linkType: hard @@ -13024,14 +13292,14 @@ __metadata: resolution: "strip-indent@npm:4.0.0" dependencies: min-indent: "npm:^1.0.1" - checksum: 06cbcd93da721c46bc13caeb1c00af93a9b18146a1c95927672d2decab6a25ad83662772417cea9317a2507fb143253ecc23c4415b64f5828cef9b638a744598 + checksum: 10c0/6b1fb4e22056867f5c9e7a6f3f45922d9a2436cac758607d58aeaac0d3b16ec40b1c43317de7900f1b8dd7a4107352fa47fb960f2c23566538c51e8585c8870e languageName: node linkType: hard "strip-json-comments@npm:^3.0.1, strip-json-comments@npm:^3.1.1": version: 3.1.1 resolution: "strip-json-comments@npm:3.1.1" - checksum: 492f73e27268f9b1c122733f28ecb0e7e8d8a531a6662efbd08e22cccb3f9475e90a1b82cab06a392f6afae6d2de636f977e231296400d0ec5304ba70f166443 + checksum: 10c0/9681a6257b925a7fa0f285851c0e613cc934a50661fa7bb41ca9cbbff89686bb4a0ee366e6ecedc4daafd01e83eee0720111ab294366fe7c185e935475ebcecd languageName: node linkType: hard @@ -13040,7 +13308,7 @@ __metadata: resolution: "style-to-js@npm:1.1.10" dependencies: style-to-object: "npm:1.0.5" - checksum: d89274efc4b59ba742f93967380e316370c8a5f0ba1d3481712d3b8d5278f3f9a496e69b95464043058b56bd8c673427da03a98b407b2147e042052f3cca001d + checksum: 10c0/bb0603f50be1ac19cb2b1acd1175d9203ca9858112a851aa05ed6af2cf64f4f2dfe765e2fefce5e64f59b5993e7d8411cdb5eae6812c0f2992b0f04cfd52fbc8 languageName: node linkType: hard @@ -13049,7 +13317,7 @@ __metadata: resolution: "style-to-object@npm:1.0.5" dependencies: inline-style-parser: "npm:0.2.2" - checksum: 8bedb6aa2e4e82b675cc414fa3436017fbfbf689f9ce3efc76bfc9d75fbe105bea08afc2f9cca1beee73f016e4847712789847efd888ae2cce915af74085e76b + checksum: 10c0/39bbc5e9f82a80d6a84c134bf49ba50402bf90304af4281fdd317c9792436c166b2f3a2a3d9a65e3f2a3360b35fe4e352932ec9a51513b9864bfd80b7f5a82e1 languageName: node linkType: hard @@ -13058,7 +13326,7 @@ __metadata: resolution: "supports-color@npm:5.5.0" dependencies: has-flag: "npm:^3.0.0" - checksum: 5f505c6fa3c6e05873b43af096ddeb22159831597649881aeb8572d6fe3b81e798cc10840d0c9735e0026b250368851b7f77b65e84f4e4daa820a4f69947f55b + checksum: 10c0/6ae5ff319bfbb021f8a86da8ea1f8db52fac8bd4d499492e30ec17095b58af11f0c55f8577390a749b1c4dde691b6a0315dab78f5f54c9b3d83f8fb5905c1c05 languageName: node linkType: hard @@ -13067,7 +13335,7 @@ __metadata: resolution: "supports-color@npm:7.2.0" dependencies: has-flag: "npm:^4.0.0" - checksum: c8bb7afd564e3b26b50ca6ee47572c217526a1389fe018d00345856d4a9b08ffbd61fadaf283a87368d94c3dcdb8f5ffe2650a5a65863e21ad2730ca0f05210a + checksum: 10c0/afb4c88521b8b136b5f5f95160c98dee7243dc79d5432db7efc27efb219385bbc7d9427398e43dd6cc730a0f87d5085ce1652af7efbe391327bc0a7d0f7fc124 languageName: node linkType: hard @@ -13076,21 +13344,21 @@ __metadata: resolution: "supports-color@npm:8.1.1" dependencies: has-flag: "npm:^4.0.0" - checksum: 157b534df88e39c5518c5e78c35580c1eca848d7dbaf31bbe06cdfc048e22c7ff1a9d046ae17b25691128f631a51d9ec373c1b740c12ae4f0de6e292037e4282 + checksum: 10c0/ea1d3c275dd604c974670f63943ed9bd83623edc102430c05adb8efc56ba492746b6e95386e7831b872ec3807fd89dd8eb43f735195f37b5ec343e4234cc7e89 languageName: node linkType: hard "supports-preserve-symlinks-flag@npm:^1.0.0": version: 1.0.0 resolution: "supports-preserve-symlinks-flag@npm:1.0.0" - checksum: a9dc19ae2220c952bd2231d08ddeecb1b0328b61e72071ff4000c8384e145cc07c1c0bdb3b5a1cb06e186a7b2790f1dee793418b332f6ddf320de25d9125be7e + checksum: 10c0/6c4032340701a9950865f7ae8ef38578d8d7053f5e10518076e6554a9381fa91bd9c6850193695c141f32b21f979c985db07265a758867bac95de05f7d8aeb39 languageName: node linkType: hard "svg-parser@npm:^2.0.4": version: 2.0.4 resolution: "svg-parser@npm:2.0.4" - checksum: ec196da6ea21481868ab26911970e35488361c39ead1c6cdd977ba16c885c21a91ddcbfd113bfb01f79a822e2a751ef85b2f7f95e2cb9245558ebce12c34af1f + checksum: 10c0/02f6cb155dd7b63ebc2f44f36365bc294543bebb81b614b7628f1af3c54ab64f7e1cec20f06e252bf95bdde78441ae295a412c68ad1678f16a6907d924512b7a languageName: node linkType: hard @@ -13117,7 +13385,7 @@ __metadata: yargs: "npm:^17.7.2" bin: svg-sprite: bin/svg-sprite.js - checksum: ddcffb23af23d6ddc37a0f5e7e57838c578f37efd8ddb377112a8abba1c5ebc17e24cffc1279720509895cef79db455e2dfd1439ea0f42fc45df38329000faa3 + checksum: 10c0/486118448e1627a36da0da82b0ab1e329b6bdda1e666b761d28f633e054735bcc56ff414a521e84e19cf1ad502de8f24689b43a56c49bc52aa0a3ad884233b88 languageName: node linkType: hard @@ -13134,21 +13402,21 @@ __metadata: stable: "npm:^0.1.8" bin: svgo: bin/svgo - checksum: 2b74544da1a9521852fe2784252d6083b336e32528d0e424ee54d1613f17312edc7020c29fa399086560e96cba42ede4a2205328a08edeefa26de84cd769a64a + checksum: 10c0/0741f5d5cad63111a90a0ce7a1a5a9013f6d293e871b75efe39addb57f29a263e45294e485a4d2ff9cc260a5d142c8b5937b2234b4ef05efdd2706fb2d360ecc languageName: node linkType: hard "symbol-tree@npm:^3.2.4": version: 3.2.4 resolution: "symbol-tree@npm:3.2.4" - checksum: c09a00aadf279d47d0c5c46ca3b6b2fbaeb45f0a184976d599637d412d3a70bbdc043ff33effe1206dea0e36e0ad226cb957112e7ce9a4bf2daedf7fa4f85c53 + checksum: 10c0/dfbe201ae09ac6053d163578778c53aa860a784147ecf95705de0cd23f42c851e1be7889241495e95c37cabb058edb1052f141387bef68f705afc8f9dd358509 languageName: node linkType: hard "synchronous-promise@npm:^2.0.15": version: 2.0.17 resolution: "synchronous-promise@npm:2.0.17" - checksum: dd74b1c05caab8ea34e26c8b52a0966efd70b0229ad39447ce066501dd6931d4d97a3f88b0f306880a699660cd334180a24d9738b385aed0bd0104a5be207ec1 + checksum: 10c0/1babe643d8417789ef6e5a2f3d4b8abcda2de236acd09bbe2c98f6be82c0a2c92ed21a6e4f934845fa8de18b1435a9cba1e8c3d945032e8a532f076224c024b1 languageName: node linkType: hard @@ -13158,14 +13426,14 @@ __metadata: dependencies: "@pkgr/core": "npm:^0.1.0" tslib: "npm:^2.6.2" - checksum: 2864a5c3e689ad5b991bebbd8a583c5682c4fa08a4f39986b510b6b5d160c08fc3672444069f8f96ed6a9d12772879c674c1f61e728573eadfa90af40a765b74 + checksum: 10c0/c3d3aa8e284f3f84f2f868b960c9f49239b364e35f6d20825a448449a3e9c8f49fe36cdd5196b30615682f007830d46f2ea354003954c7336723cb821e4b6519 languageName: node linkType: hard "tabbable@npm:^6.0.1": version: 6.2.0 resolution: "tabbable@npm:6.2.0" - checksum: 980fa73476026e99dcacfc0d6e000d41d42c8e670faf4682496d30c625495e412c4369694f2a15cf1e5252d22de3c396f2b62edbe8d60b5dadc40d09e3f2dde3 + checksum: 10c0/ced8b38f05f2de62cd46836d77c2646c42b8c9713f5bd265daf0e78ff5ac73d3ba48a7ca45f348bafeef29b23da7187c72250742d37627883ef89cbd7fa76898 languageName: node linkType: hard @@ -13177,7 +13445,7 @@ __metadata: mkdirp-classic: "npm:^0.5.2" pump: "npm:^3.0.0" tar-stream: "npm:^2.1.4" - checksum: 526deae025453e825f87650808969662fbb12eb0461d033e9b447de60ec951c6c4607d0afe7ce057defe9d4e45cf80399dd74bc15f9d9e0773d5e990a78ce4ac + checksum: 10c0/871d26a934bfb7beeae4c4d8a09689f530b565f79bd0cf489823ff0efa3705da01278160da10bb006d1a793fa0425cf316cec029b32a9159eacbeaff4965fb6d languageName: node linkType: hard @@ -13190,7 +13458,7 @@ __metadata: fs-constants: "npm:^1.0.0" inherits: "npm:^2.0.3" readable-stream: "npm:^3.1.1" - checksum: 1a52a51d240c118cbcd30f7368ea5e5baef1eac3e6b793fb1a41e6cd7319296c79c0264ccc5859f5294aa80f8f00b9239d519e627b9aade80038de6f966fec6a + checksum: 10c0/2f4c910b3ee7196502e1ff015a7ba321ec6ea837667220d7bcb8d0852d51cb04b87f7ae471008a6fb8f5b1a1b5078f62f3a82d30c706f20ada1238ac797e7692 languageName: node linkType: hard @@ -13204,7 +13472,7 @@ __metadata: minizlib: "npm:^2.1.1" mkdirp: "npm:^1.0.3" yallist: "npm:^4.0.0" - checksum: 2042bbb14830b5cd0d584007db0eb0a7e933e66d1397e72a4293768d2332449bc3e312c266a0887ec20156dea388d8965e53b4fc5097f42d78593549016da089 + checksum: 10c0/02ca064a1a6b4521fef88c07d389ac0936730091f8c02d30ea60d472e0378768e870769ab9e986d87807bfee5654359cf29ff4372746cc65e30cbddc352660d8 languageName: node linkType: hard @@ -13213,14 +13481,14 @@ __metadata: resolution: "telejson@npm:7.2.0" dependencies: memoizerific: "npm:^1.11.3" - checksum: 6e89b3d3c45b5a2aced9132f6a968fcdf758c00be4c3acb115d7d81e95c9e04083a7a4a9b43057fcf48b101156c1607a38f5491615956acb28d4d1f78a4bda20 + checksum: 10c0/d26e6cc93e54bfdcdb207b49905508c5db45862e811a2e2193a735409e47b14530e1c19351618a3e03ad2fd4ffc3759364fcd72851aba2df0300fab574b6151c languageName: node linkType: hard "temp-dir@npm:^2.0.0": version: 2.0.0 resolution: "temp-dir@npm:2.0.0" - checksum: cc4f0404bf8d6ae1a166e0e64f3f409b423f4d1274d8c02814a59a5529f07db6cd070a749664141b992b2c1af337fa9bb451a460a43bb9bcddc49f235d3115aa + checksum: 10c0/b1df969e3f3f7903f3426861887ed76ba3b495f63f6d0c8e1ce22588679d9384d336df6064210fda14e640ed422e2a17d5c40d901f60e161c99482d723f4d309 languageName: node linkType: hard @@ -13229,7 +13497,7 @@ __metadata: resolution: "temp@npm:0.8.4" dependencies: rimraf: "npm:~2.6.2" - checksum: 0a7f76b49637415bc391c3f6e69377cc4c38afac95132b4158fa711e77b70b082fe56fd886f9d11ffab9d148df181a105a93c8b618fb72266eeaa5e5ddbfe37f + checksum: 10c0/7f071c963031bfece37e13c5da11e9bb451e4ddfc4653e23e327a2f91594102dc826ef6a693648e09a6e0eb856f507967ec759ae55635e0878091eccf411db37 languageName: node linkType: hard @@ -13242,7 +13510,7 @@ __metadata: temp-dir: "npm:^2.0.0" type-fest: "npm:^0.16.0" unique-string: "npm:^2.0.0" - checksum: e3a3857cd102db84c484b8e878203b496f0e927025b7c60dd118c0c9a0962f4589321c6b3093185d529576af5c58be65d755e72c2a6ad009ff340ab8cbbe4d33 + checksum: 10c0/864a1cf1b5536dc21e84ae45dbbc3ba4dd2c7ec1674d895f99c349cf209df959a53d797ca38d0b2cf69c7684d565fde5cfc67faaa63b7208ffb21d454b957472 languageName: node linkType: hard @@ -13253,21 +13521,21 @@ __metadata: "@istanbuljs/schema": "npm:^0.1.2" glob: "npm:^7.1.4" minimatch: "npm:^3.0.4" - checksum: 8fccb2cb6c8fcb6bb4115394feb833f8b6cf4b9503ec2485c2c90febf435cac62abe882a0c5c51a37b9bbe70640cdd05acf5f45e486ac4583389f4b0855f69e5 + checksum: 10c0/019d33d81adff3f9f1bfcff18125fb2d3c65564f437d9be539270ee74b994986abb8260c7c2ce90e8f30162178b09dbbce33c6389273afac4f36069c48521f57 languageName: node linkType: hard "text-hex@npm:1.0.x": version: 1.0.0 resolution: "text-hex@npm:1.0.0" - checksum: 1138f68adc97bf4381a302a24e2352f04992b7b1316c5003767e9b0d3367ffd0dc73d65001ea02b07cd0ecc2a9d186de0cf02f3c2d880b8a522d4ccb9342244a + checksum: 10c0/57d8d320d92c79d7c03ffb8339b825bb9637c2cbccf14304309f51d8950015c44464b6fd1b6820a3d4821241c68825634f09f5a2d9d501e84f7c6fd14376860d languageName: node linkType: hard "text-table@npm:^0.2.0": version: 0.2.0 resolution: "text-table@npm:0.2.0" - checksum: 4383b5baaeffa9bb4cda2ac33a4aa2e6d1f8aaf811848bf73513a9b88fd76372dc461f6fd6d2e9cb5100f48b473be32c6f95bd983509b7d92bb4d92c10747452 + checksum: 10c0/02805740c12851ea5982686810702e2f14369a5f4c5c40a836821e3eefc65ffeec3131ba324692a37608294b0fd8c1e55a2dd571ffed4909822787668ddbee5c languageName: node linkType: hard @@ -13277,35 +13545,35 @@ __metadata: dependencies: readable-stream: "npm:~2.3.6" xtend: "npm:~4.0.1" - checksum: cd71f7dcdc7a8204fea003a14a433ef99384b7d4e31f5497e1f9f622b3cf3be3691f908455f98723bdc80922a53af7fa10c3b7abbe51c6fd3d536dbc7850e2c4 + checksum: 10c0/cbfe5b57943fa12b4f8c043658c2a00476216d79c014895cef1ac7a1d9a8b31f6b438d0e53eecbb81054b93128324a82ecd59ec1a4f91f01f7ac113dcb14eade languageName: node linkType: hard "tiny-invariant@npm:^1.3.1": version: 1.3.3 resolution: "tiny-invariant@npm:1.3.3" - checksum: 5e185c8cc2266967984ce3b352a4e57cb89dad5a8abb0dea21468a6ecaa67cd5bb47a3b7a85d08041008644af4f667fb8b6575ba38ba5fb00b3b5068306e59fe + checksum: 10c0/65af4a07324b591a059b35269cd696aba21bef2107f29b9f5894d83cc143159a204b299553435b03874ebb5b94d019afa8b8eff241c8a4cfee95872c2e1c1c4a languageName: node linkType: hard "tiny-warning@npm:^1.0.3": version: 1.0.3 resolution: "tiny-warning@npm:1.0.3" - checksum: da62c4acac565902f0624b123eed6dd3509bc9a8d30c06e017104bedcf5d35810da8ff72864400ad19c5c7806fc0a8323c68baf3e326af7cb7d969f846100d71 + checksum: 10c0/ef8531f581b30342f29670cb41ca248001c6fd7975ce22122bd59b8d62b4fc84ad4207ee7faa95cde982fa3357cd8f4be650142abc22805538c3b1392d7084fa languageName: node linkType: hard "tmpl@npm:1.0.5": version: 1.0.5 resolution: "tmpl@npm:1.0.5" - checksum: cd922d9b853c00fe414c5a774817be65b058d54a2d01ebb415840960406c669a0fc632f66df885e24cb022ec812739199ccbdb8d1164c3e513f85bfca5ab2873 + checksum: 10c0/f935537799c2d1922cb5d6d3805f594388f75338fe7a4a9dac41504dd539704ca4db45b883b52e7b0aa5b2fd5ddadb1452bf95cd23a69da2f793a843f9451cc9 languageName: node linkType: hard "to-fast-properties@npm:^2.0.0": version: 2.0.0 resolution: "to-fast-properties@npm:2.0.0" - checksum: be2de62fe58ead94e3e592680052683b1ec986c72d589e7b21e5697f8744cdbf48c266fa72f6c15932894c10187b5f54573a3bcf7da0bfd964d5caf23d436168 + checksum: 10c0/b214d21dbfb4bce3452b6244b336806ffea9c05297148d32ebb428d5c43ce7545bdfc65a1ceb58c9ef4376a65c0cb2854d645f33961658b3e3b4f84910ddcdd7 languageName: node linkType: hard @@ -13314,28 +13582,28 @@ __metadata: resolution: "to-regex-range@npm:5.0.1" dependencies: is-number: "npm:^7.0.0" - checksum: 10dda13571e1f5ad37546827e9b6d4252d2e0bc176c24a101252153ef435d83696e2557fe128c4678e4e78f5f01e83711c703eef9814eb12dab028580d45980a + checksum: 10c0/487988b0a19c654ff3e1961b87f471702e708fa8a8dd02a298ef16da7206692e8552a0250e8b3e8759270f62e9d8314616f6da274734d3b558b1fc7b7724e892 languageName: node linkType: hard "tocbot@npm:^4.20.1": version: 4.25.0 resolution: "tocbot@npm:4.25.0" - checksum: fcbe6299ec26322f51e62d54d1281b31370efab89b7a7e58c90fa431a51548e1a09b8aafd7314ed2500694bee8451713f59ecddafa7242e6bf626134b0e3cce6 + checksum: 10c0/b6ca2ca5a3549e7c4007843e0b41cb113656006a59f5aa2328bcccdc8b780f25b642c1d0d64e02255c97a3ba5072ae5085a403cd2b560fb3ee30bd59924fe273 languageName: node linkType: hard "toggle-selection@npm:^1.0.6": version: 1.0.6 resolution: "toggle-selection@npm:1.0.6" - checksum: 9a0ed0ecbaac72b4944888dacd79fe0a55eeea76120a4c7e46b3bb3d85b24f086e90560bb22f5a965654a25ab43d79ec47dfdb3f1850ba740b14c5a50abc7040 + checksum: 10c0/f2cf1f2c70f374fd87b0cdc8007453ba9e981c4305a8bf4eac10a30e62ecdfd28bca7d18f8f15b15a506bf8a7bfb20dbe3539f0fcf2a2c8396c1a78d53e1f179 languageName: node linkType: hard "toidentifier@npm:1.0.1": version: 1.0.1 resolution: "toidentifier@npm:1.0.1" - checksum: 952c29e2a85d7123239b5cfdd889a0dde47ab0497f0913d70588f19c53f7e0b5327c95f4651e413c74b785147f9637b17410ac8c846d5d4a20a5a33eb6dc3a45 + checksum: 10c0/93937279934bd66cc3270016dd8d0afec14fb7c94a05c72dc57321f8bd1fa97e5bea6d1f7c89e728d077ca31ea125b78320a616a6c6cd0e6b9cb94cb864381c1 languageName: node linkType: hard @@ -13347,7 +13615,7 @@ __metadata: punycode: "npm:^2.1.1" universalify: "npm:^0.2.0" url-parse: "npm:^1.5.3" - checksum: cf148c359b638a7069fc3ba9a5257bdc9616a6948a98736b92c3570b3f8401cf9237a42bf716878b656f372a1fb65b74dd13a46ccff8eceba14ffd053d33f72a + checksum: 10c0/4fc0433a0cba370d57c4b240f30440c848906dee3180bb6e85033143c2726d322e7e4614abb51d42d111ebec119c4876ed8d7247d4113563033eebbc1739c831 languageName: node linkType: hard @@ -13356,21 +13624,21 @@ __metadata: resolution: "tr46@npm:3.0.0" dependencies: punycode: "npm:^2.1.1" - checksum: b09a15886cbfaee419a3469081223489051ce9dca3374dd9500d2378adedbee84a3c73f83bfdd6bb13d53657753fc0d4e20a46bfcd3f1b9057ef528426ad7ce4 + checksum: 10c0/cdc47cad3a9d0b6cb293e39ccb1066695ae6fdd39b9e4f351b010835a1f8b4f3a6dc3a55e896b421371187f22b48d7dac1b693de4f6551bdef7b6ab6735dfe3b languageName: node linkType: hard "tr46@npm:~0.0.3": version: 0.0.3 resolution: "tr46@npm:0.0.3" - checksum: 8f1f5aa6cb232f9e1bdc86f485f916b7aa38caee8a778b378ffec0b70d9307873f253f5cbadbe2955ece2ac5c83d0dc14a77513166ccd0a0c7fe197e21396695 + checksum: 10c0/047cb209a6b60c742f05c9d3ace8fa510bff609995c129a37ace03476a9b12db4dbf975e74600830ef0796e18882b2381fb5fb1f6b4f96b832c374de3ab91a11 languageName: node linkType: hard "triple-beam@npm:^1.3.0": version: 1.4.1 resolution: "triple-beam@npm:1.4.1" - checksum: 2e881a3e8e076b6f2b85b9ec9dd4a900d3f5016e6d21183ed98e78f9abcc0149e7d54d79a3f432b23afde46b0885bdcdcbff789f39bc75de796316961ec07f61 + checksum: 10c0/4bf1db71e14fe3ff1c3adbe3c302f1fdb553b74d7591a37323a7badb32dc8e9c290738996cbb64f8b10dc5a3833645b5d8c26221aaaaa12e50d1251c9aba2fea languageName: node linkType: hard @@ -13379,14 +13647,14 @@ __metadata: resolution: "ts-api-utils@npm:1.2.1" peerDependencies: typescript: ">=4.2.0" - checksum: 6d7f60fd01e3885bb334607f22b9cb1002e72da81dad2e672fef1b0d1a2f640b0f0ff5310369401488fac90c7a7f5d39c89fd18789af59c672c9b5aef4cade3e + checksum: 10c0/8ddb493e7ae581d3f57a2e469142feb60b420d4ad8366ab969fe8e36531f8f301f370676b47e8d97f28b5f5fd10d6f2d55f656943a8546ef95e35ce5cf117754 languageName: node linkType: hard "ts-dedent@npm:^2.0.0, ts-dedent@npm:^2.2.0": version: 2.2.0 resolution: "ts-dedent@npm:2.2.0" - checksum: 93ed8f7878b6d5ed3c08d99b740010eede6bccfe64bce61c5a4da06a2c17d6ddbb80a8c49c2d15251de7594a4f93ffa21dd10e7be75ef66a4dc9951b4a94e2af + checksum: 10c0/175adea838468cc2ff7d5e97f970dcb798bbcb623f29c6088cb21aa2880d207c5784be81ab1741f56b9ac37840cbaba0c0d79f7f8b67ffe61c02634cafa5c303 languageName: node linkType: hard @@ -13400,7 +13668,7 @@ __metadata: optional: true bin: tsconfck: bin/tsconfck.js - checksum: 61df3b03b334a25eabb0a52e67a0c8d85770c631f2739db7703af8fdd102a2ebd598f1c851cc5fc6d6a59f2497a26c845be71c934ea16d838a3ff95a885034fb + checksum: 10c0/6efc9cbbccdbbcafc86a744a1804fcd8438097c2beaac370444cc413fa1582a019a74002a111e3005b89ca0b0169ace730161864628fc751754e29b335c3c79f languageName: node linkType: hard @@ -13412,21 +13680,21 @@ __metadata: json5: "npm:^1.0.2" minimist: "npm:^1.2.6" strip-bom: "npm:^3.0.0" - checksum: 2041beaedc6c271fc3bedd12e0da0cc553e65d030d4ff26044b771fac5752d0460944c0b5e680f670c2868c95c664a256cec960ae528888db6ded83524e33a14 + checksum: 10c0/5b4f301a2b7a3766a986baf8fc0e177eb80bdba6e396792ff92dc23b5bca8bb279fc96517dcaaef63a3b49bebc6c4c833653ec58155780bc906bdbcf7dda0ef5 languageName: node linkType: hard "tslib@npm:^1.13.0, tslib@npm:^1.8.1": version: 1.14.1 resolution: "tslib@npm:1.14.1" - checksum: 7dbf34e6f55c6492637adb81b555af5e3b4f9cc6b998fb440dac82d3b42bdc91560a35a5fb75e20e24a076c651438234da6743d139e4feabf0783f3cdfe1dddb + checksum: 10c0/69ae09c49eea644bc5ebe1bca4fa4cc2c82b7b3e02f43b84bd891504edf66dbc6b2ec0eef31a957042de2269139e4acff911e6d186a258fb14069cd7f6febce2 languageName: node linkType: hard "tslib@npm:^2.0.0, tslib@npm:^2.0.1, tslib@npm:^2.1.0, tslib@npm:^2.4.0, tslib@npm:^2.6.2": version: 2.6.2 resolution: "tslib@npm:2.6.2" - checksum: bd26c22d36736513980091a1e356378e8b662ded04204453d353a7f34a4c21ed0afc59b5f90719d4ba756e581a162ecbf93118dc9c6be5acf70aa309188166ca + checksum: 10c0/e03a8a4271152c8b26604ed45535954c0a45296e32445b4b87f8a5abdb2421f40b59b4ca437c4346af0f28179780d604094eb64546bee2019d903d01c6c19bdb languageName: node linkType: hard @@ -13437,7 +13705,7 @@ __metadata: tslib: "npm:^1.8.1" peerDependencies: typescript: ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" - checksum: ea036bec1dd024e309939ffd49fda7a351c0e87a1b8eb049570dd119d447250e2c56e0e6c00554e8205760e7417793fdebff752a46e573fbe07d4f375502a5b2 + checksum: 10c0/02f19e458ec78ead8fffbf711f834ad8ecd2cc6ade4ec0320790713dccc0a412b99e7fd907c4cda2a1dc602c75db6f12e0108e87a5afad4b2f9e90a24cabd5a2 languageName: node linkType: hard @@ -13446,56 +13714,56 @@ __metadata: resolution: "type-check@npm:0.4.0" dependencies: prelude-ls: "npm:^1.2.1" - checksum: 14687776479d048e3c1dbfe58a2409e00367810d6960c0f619b33793271ff2a27f81b52461f14a162f1f89a9b1d8da1b237fc7c99b0e1fdcec28ec63a86b1fec + checksum: 10c0/7b3fd0ed43891e2080bf0c5c504b418fbb3e5c7b9708d3d015037ba2e6323a28152ec163bcb65212741fa5d2022e3075ac3c76440dbd344c9035f818e8ecee58 languageName: node linkType: hard "type-detect@npm:4.0.8": version: 4.0.8 resolution: "type-detect@npm:4.0.8" - checksum: 5179e3b8ebc51fce1b13efb75fdea4595484433f9683bbc2dca6d99789dba4e602ab7922d2656f2ce8383987467f7770131d4a7f06a26287db0615d2f4c4ce7d + checksum: 10c0/8fb9a51d3f365a7de84ab7f73b653534b61b622aa6800aecdb0f1095a4a646d3f5eb295322127b6573db7982afcd40ab492d038cf825a42093a58b1e1353e0bd languageName: node linkType: hard "type-fest@npm:^0.16.0": version: 0.16.0 resolution: "type-fest@npm:0.16.0" - checksum: fd8c47ccb90e9fe7bae8bfc0e116e200e096120200c1ab1737bf0bc9334b344dd4925f876ed698174ffd58cd179bb56a55467be96aedc22d5d72748eac428bc8 + checksum: 10c0/6b4d846534e7bcb49a6160b068ffaed2b62570d989d909ac3f29df5ef1e993859f890a4242eebe023c9e923f96adbcb3b3e88a198c35a1ee9a731e147a6839c3 languageName: node linkType: hard "type-fest@npm:^0.20.2": version: 0.20.2 resolution: "type-fest@npm:0.20.2" - checksum: 8907e16284b2d6cfa4f4817e93520121941baba36b39219ea36acfe64c86b9dbc10c9941af450bd60832c8f43464974d51c0957f9858bc66b952b66b6914cbb9 + checksum: 10c0/dea9df45ea1f0aaa4e2d3bed3f9a0bfe9e5b2592bddb92eb1bf06e50bcf98dbb78189668cd8bc31a0511d3fc25539b4cd5c704497e53e93e2d40ca764b10bfc3 languageName: node linkType: hard "type-fest@npm:^0.21.3": version: 0.21.3 resolution: "type-fest@npm:0.21.3" - checksum: f4254070d9c3d83a6e573bcb95173008d73474ceadbbf620dd32d273940ca18734dff39c2b2480282df9afe5d1675ebed5499a00d791758748ea81f61a38961f + checksum: 10c0/902bd57bfa30d51d4779b641c2bc403cdf1371fb9c91d3c058b0133694fcfdb817aef07a47f40faf79039eecbaa39ee9d3c532deff244f3a19ce68cea71a61e8 languageName: node linkType: hard "type-fest@npm:^0.6.0": version: 0.6.0 resolution: "type-fest@npm:0.6.0" - checksum: 9ecbf4ba279402b14c1a0614b6761bbe95626fab11377291fecd7e32b196109551e0350dcec6af74d97ced1b000ba8060a23eca33157091e642b409c2054ba82 + checksum: 10c0/0c585c26416fce9ecb5691873a1301b5aff54673c7999b6f925691ed01f5b9232db408cdbb0bd003d19f5ae284322523f44092d1f81ca0a48f11f7cf0be8cd38 languageName: node linkType: hard "type-fest@npm:^0.8.1": version: 0.8.1 resolution: "type-fest@npm:0.8.1" - checksum: fd4a91bfb706aeeb0d326ebd2e9a8ea5263979e5dec8d16c3e469a5bd3a946e014a062ef76c02e3086d3d1c7209a56a20a4caafd0e9f9a5c2ab975084ea3d388 + checksum: 10c0/dffbb99329da2aa840f506d376c863bd55f5636f4741ad6e65e82f5ce47e6914108f44f340a0b74009b0cb5d09d6752ae83203e53e98b1192cf80ecee5651636 languageName: node linkType: hard "type-fest@npm:^2.19.0, type-fest@npm:~2.19": version: 2.19.0 resolution: "type-fest@npm:2.19.0" - checksum: 7bf9e8fdf34f92c8bb364c0af14ca875fac7e0183f2985498b77be129dc1b3b1ad0a6b3281580f19e48c6105c037fb966ad9934520c69c6434d17fd0af4eed78 + checksum: 10c0/a5a7ecf2e654251613218c215c7493574594951c08e52ab9881c9df6a6da0aeca7528c213c622bc374b4e0cb5c443aa3ab758da4e3c959783ce884c3194e12cb languageName: node linkType: hard @@ -13505,7 +13773,7 @@ __metadata: dependencies: media-typer: "npm:0.3.0" mime-types: "npm:~2.1.24" - checksum: 0bd9eeae5efd27d98fd63519f999908c009e148039d8e7179a074f105362d4fcc214c38b24f6cda79c87e563cbd12083a4691381ed28559220d4a10c2047bed4 + checksum: 10c0/a23daeb538591b7efbd61ecf06b6feb2501b683ffdc9a19c74ef5baba362b4347e42f1b4ed81f5882a8c96a3bfff7f93ce3ffaf0cbbc879b532b04c97a55db9d languageName: node linkType: hard @@ -13516,7 +13784,7 @@ __metadata: call-bind: "npm:^1.0.7" es-errors: "npm:^1.3.0" is-typed-array: "npm:^1.1.13" - checksum: 02ffc185d29c6df07968272b15d5319a1610817916ec8d4cd670ded5d1efe72901541ff2202fcc622730d8a549c76e198a2f74e312eabbfb712ed907d45cbb0b + checksum: 10c0/9e043eb38e1b4df4ddf9dde1aa64919ae8bb909571c1cc4490ba777d55d23a0c74c7d73afcdd29ec98616d91bb3ae0f705fad4421ea147e1daf9528200b562da languageName: node linkType: hard @@ -13529,7 +13797,7 @@ __metadata: gopd: "npm:^1.0.1" has-proto: "npm:^1.0.3" is-typed-array: "npm:^1.1.13" - checksum: e4a38329736fe6a73b52a09222d4a9e8de14caaa4ff6ad8e55217f6705b017d9815b7284c85065b3b8a7704e226ccff1372a72b78c2a5b6b71b7bf662308c903 + checksum: 10c0/fcebeffb2436c9f355e91bd19e2368273b88c11d1acc0948a2a306792f1ab672bce4cfe524ab9f51a0505c9d7cd1c98eff4235c4f6bfef6a198f6cfc4ff3d4f3 languageName: node linkType: hard @@ -13543,7 +13811,7 @@ __metadata: gopd: "npm:^1.0.1" has-proto: "npm:^1.0.3" is-typed-array: "npm:^1.1.13" - checksum: ac26d720ebb2aacbc45e231347c359e6649f52e0cfe0e76e62005912f8030d68e4cb7b725b1754e8fdd48e433cb68df5a8620a3e420ad1457d666e8b29bf9150 + checksum: 10c0/d2628bc739732072e39269389a758025f75339de2ed40c4f91357023c5512d237f255b633e3106c461ced41907c1bf9a533c7e8578066b0163690ca8bc61b22f languageName: node linkType: hard @@ -13557,14 +13825,14 @@ __metadata: has-proto: "npm:^1.0.3" is-typed-array: "npm:^1.1.13" possible-typed-array-names: "npm:^1.0.0" - checksum: f9a0da99c41880b44e2c5e5d0d01515c2a6e0f54b10c594151804f013272d837df3b67ea84d7304ecfbab2c10d99c3372168bf3a4bd295abf13ac5a72f93054a + checksum: 10c0/5cc0f79196e70a92f8f40846cfa62b3de6be51e83f73655e137116cf65e3c29a288502b18cc8faf33c943c2470a4569009e1d6da338441649a2db2f135761ad5 languageName: node linkType: hard "typedarray@npm:^0.0.6": version: 0.0.6 resolution: "typedarray@npm:0.0.6" - checksum: 2cc1bcf7d8c1237f6a16c04efc06637b2c5f2d74e58e84665445cf87668b85a21ab18dd751fa49eee6ae024b70326635d7b79ad37b1c370ed2fec6aeeeb52714 + checksum: 10c0/6005cb31df50eef8b1f3c780eb71a17925f3038a100d82f9406ac2ad1de5eb59f8e6decbdc145b3a1f8e5836e17b0c0002fb698b9fe2516b8f9f9ff602d36412 languageName: node linkType: hard @@ -13574,7 +13842,7 @@ __metadata: bin: tsc: bin/tsc tsserver: bin/tsserver - checksum: d65e50eb849bd21ff8677e5b9447f9c6e74777e346afd67754934264dcbf4bd59e7d2473f6062d9a015d66bd573311166357e3eb07fea0b52859cf9bb2b58555 + checksum: 10c0/91ae3e6193d0ddb8656d4c418a033f0f75dec5e077ebbc2bd6d76439b93f35683936ee1bdc0e9cf94ec76863aa49f27159b5788219b50e1cd0cd6d110aa34b07 languageName: node linkType: hard @@ -13584,14 +13852,14 @@ __metadata: bin: tsc: bin/tsc tsserver: bin/tsserver - checksum: f79cc2ba802c94c2b78dbb00d767a10adb67368ae764709737dc277273ec148aa4558033a03ce901406b35fddf4eac46dabc94a1e1d12d2587e2b9cfe5707b4a + checksum: 10c0/062c1cee1990e6b9419ce8a55162b8dc917eb87f807e4de0327dbc1c2fa4e5f61bc0dd4e034d38ff541d1ed0479b53bcee8e4de3a4075c51a1724eb6216cb6f5 languageName: node linkType: hard "ufo@npm:^1.3.2": version: 1.4.0 resolution: "ufo@npm:1.4.0" - checksum: b7aea8503878dc5ad797d8fc6fe39fec64d9cc7e89fb147ef86ec676e37bb462d99d67c6aad20b15f7d3e6d275d66666b29214422e268f1d98f6eaf707a207a6 + checksum: 10c0/d9a3cb8c5fd13356e0af661362244fd0a901edcdd08996f42553271007cae01e85dcec29a3303a87ddab6aa705cbd630332aaa8c268d037483536b198fa67a7c languageName: node linkType: hard @@ -13600,7 +13868,7 @@ __metadata: resolution: "uglify-js@npm:3.17.4" bin: uglifyjs: bin/uglifyjs - checksum: 4c0b800e0ff192079d2c3ce8414fd3b656a570028c7c79af5c29c53d5c532b68bbcae4ad47307f89c2ee124d11826fff7a136b59d5c5bb18422bcdf5568afe1e + checksum: 10c0/8b7fcdca69deb284fed7d2025b73eb747ce37f9aca6af53422844f46427152d5440601b6e2a033e77856a2f0591e4167153d5a21b68674ad11f662034ec13ced languageName: node linkType: hard @@ -13612,21 +13880,21 @@ __metadata: has-bigints: "npm:^1.0.2" has-symbols: "npm:^1.0.3" which-boxed-primitive: "npm:^1.0.2" - checksum: 06e1ee41c1095e37281cb71a975cb3350f7cb470a0665d2576f02cc9564f623bd90cfc0183693b8a7fdf2d242963dcc3010b509fa3ac683f540c765c0f3e7e43 + checksum: 10c0/81ca2e81134167cc8f75fa79fbcc8a94379d6c61de67090986a2273850989dd3bae8440c163121b77434b68263e34787a675cbdcb34bb2f764c6b9c843a11b66 languageName: node linkType: hard "undici-types@npm:~5.26.4": version: 5.26.5 resolution: "undici-types@npm:5.26.5" - checksum: 0097779d94bc0fd26f0418b3a05472410408877279141ded2bd449167be1aed7ea5b76f756562cb3586a07f251b90799bab22d9019ceba49c037c76445f7cddd + checksum: 10c0/bb673d7876c2d411b6eb6c560e0c571eef4a01c1c19925175d16e3a30c4c428181fb8d7ae802a261f283e4166a0ac435e2f505743aa9e45d893f9a3df017b501 languageName: node linkType: hard "unicode-canonical-property-names-ecmascript@npm:^2.0.0": version: 2.0.0 resolution: "unicode-canonical-property-names-ecmascript@npm:2.0.0" - checksum: 39be078afd014c14dcd957a7a46a60061bc37c4508ba146517f85f60361acf4c7539552645ece25de840e17e293baa5556268d091ca6762747fdd0c705001a45 + checksum: 10c0/0fe812641bcfa3ae433025178a64afb5d9afebc21a922dafa7cba971deebb5e4a37350423890750132a85c936c290fb988146d0b1bd86838ad4897f4fc5bd0de languageName: node linkType: hard @@ -13636,21 +13904,21 @@ __metadata: dependencies: unicode-canonical-property-names-ecmascript: "npm:^2.0.0" unicode-property-aliases-ecmascript: "npm:^2.0.0" - checksum: 1f34a7434a23df4885b5890ac36c5b2161a809887000be560f56ad4b11126d433c0c1c39baf1016bdabed4ec54829a6190ee37aa24919aa116dc1a5a8a62965a + checksum: 10c0/4d05252cecaf5c8e36d78dc5332e03b334c6242faf7cf16b3658525441386c0a03b5f603d42cbec0f09bb63b9fd25c9b3b09667aee75463cac3efadae2cd17ec languageName: node linkType: hard "unicode-match-property-value-ecmascript@npm:^2.1.0": version: 2.1.0 resolution: "unicode-match-property-value-ecmascript@npm:2.1.0" - checksum: 06661bc8aba2a60c7733a7044f3e13085808939ad17924ffd4f5222a650f88009eb7c09481dc9c15cfc593d4ad99bd1cde8d54042733b335672591a81c52601c + checksum: 10c0/f5b9499b9e0ffdc6027b744d528f17ec27dd7c15da03254ed06851feec47e0531f20d410910c8a49af4a6a190f4978413794c8d75ce112950b56d583b5d5c7f2 languageName: node linkType: hard "unicode-property-aliases-ecmascript@npm:^2.0.0": version: 2.1.0 resolution: "unicode-property-aliases-ecmascript@npm:2.1.0" - checksum: 243524431893649b62cc674d877bd64ef292d6071dd2fd01ab4d5ad26efbc104ffcd064f93f8a06b7e4ec54c172bf03f6417921a0d8c3a9994161fe1f88f815b + checksum: 10c0/50ded3f8c963c7785e48c510a3b7c6bc4e08a579551489aa0349680a35b1ceceec122e33b2b6c1b579d0be2250f34bb163ac35f5f8695fe10bbc67fb757f0af8 languageName: node linkType: hard @@ -13659,7 +13927,7 @@ __metadata: resolution: "unique-filename@npm:3.0.0" dependencies: unique-slug: "npm:^4.0.0" - checksum: 8e2f59b356cb2e54aab14ff98a51ac6c45781d15ceaab6d4f1c2228b780193dc70fae4463ce9e1df4479cb9d3304d7c2043a3fb905bdeca71cc7e8ce27e063df + checksum: 10c0/6363e40b2fa758eb5ec5e21b3c7fb83e5da8dcfbd866cc0c199d5534c42f03b9ea9ab069769cc388e1d7ab93b4eeef28ef506ab5f18d910ef29617715101884f languageName: node linkType: hard @@ -13668,7 +13936,7 @@ __metadata: resolution: "unique-slug@npm:4.0.0" dependencies: imurmurhash: "npm:^0.1.4" - checksum: 40912a8963fc02fb8b600cf50197df4a275c602c60de4cac4f75879d3c48558cfac48de08a25cc10df8112161f7180b3bbb4d662aadb711568602f9eddee54f0 + checksum: 10c0/cb811d9d54eb5821b81b18205750be84cb015c20a4a44280794e915f5a0a70223ce39066781a354e872df3572e8155c228f43ff0cce94c7cbf4da2cc7cbdd635 languageName: node linkType: hard @@ -13677,14 +13945,14 @@ __metadata: resolution: "unique-string@npm:2.0.0" dependencies: crypto-random-string: "npm:^2.0.0" - checksum: 107cae65b0b618296c2c663b8e52e4d1df129e9af04ab38d53b4f2189e96da93f599c85f4589b7ffaf1a11c9327cbb8a34f04c71b8d4950d3e385c2da2a93828 + checksum: 10c0/11820db0a4ba069d174bedfa96c588fc2c96b083066fafa186851e563951d0de78181ac79c744c1ed28b51f9d82ac5b8196ff3e4560d0178046ef455d8c2244b languageName: node linkType: hard "unist-util-is@npm:^4.0.0": version: 4.1.0 resolution: "unist-util-is@npm:4.1.0" - checksum: c046cc87c0a4f797b2afce76d917218e6a9af946a56cb5a88cb7f82be34f16c11050a10ddc4c66a3297dbb2782ca7d72a358cd77900b439ea9c683ba003ffe90 + checksum: 10c0/21ca3d7bacc88853b880b19cb1b133a056c501617d7f9b8cce969cd8b430ed7e1bc416a3a11b02540d5de6fb86807e169d00596108a459d034cf5faec97c055e languageName: node linkType: hard @@ -13694,7 +13962,7 @@ __metadata: dependencies: "@types/unist": "npm:^2.0.0" unist-util-is: "npm:^4.0.0" - checksum: 1b18343d88a0ad9cafaf8164ff8a1d3e3903328b3936b1565d61731f0b5778b9b9f400c455d3ad5284eeebcfdd7558ce24eb15c303a9cc0bd9218d01b2116923 + checksum: 10c0/231c80c5ba8e79263956fcaa25ed2a11ad7fe77ac5ba0d322e9d51bbc4238501e3bb52f405e518bcdc5471e27b33eff520db0aa4a3b1feb9fb6e2de6ae385d49 languageName: node linkType: hard @@ -13705,28 +13973,28 @@ __metadata: "@types/unist": "npm:^2.0.0" unist-util-is: "npm:^4.0.0" unist-util-visit-parents: "npm:^3.0.0" - checksum: 1fe19d500e212128f96d8c3cfa3312846e586b797748a1fd195fe6479f06bc90a6f6904deb08eefc00dd58e83a1c8a32fb8677252d2273ad7a5e624525b69b8f + checksum: 10c0/7b11303d82271ca53a2ced2d56c87a689dd518596c99ff4a11cdff750f5cc5c0e4b64b146bd2363557cb29443c98713bfd1e8dc6d1c3f9d474b9eb1f23a60888 languageName: node linkType: hard "universalify@npm:^0.2.0": version: 0.2.0 resolution: "universalify@npm:0.2.0" - checksum: e86134cb12919d177c2353196a4cc09981524ee87abf621f7bc8d249dbbbebaec5e7d1314b96061497981350df786e4c5128dbf442eba104d6e765bc260678b5 + checksum: 10c0/cedbe4d4ca3967edf24c0800cfc161c5a15e240dac28e3ce575c689abc11f2c81ccc6532c8752af3b40f9120fb5e454abecd359e164f4f6aa44c29cd37e194fe languageName: node linkType: hard "universalify@npm:^2.0.0": version: 2.0.1 resolution: "universalify@npm:2.0.1" - checksum: ecd8469fe0db28e7de9e5289d32bd1b6ba8f7183db34f3bfc4ca53c49891c2d6aa05f3fb3936a81285a905cc509fb641a0c3fc131ec786167eff41236ae32e60 + checksum: 10c0/73e8ee3809041ca8b818efb141801a1004e3fc0002727f1531f4de613ea281b494a40909596dae4a042a4fb6cd385af5d4db2e137b1362e0e91384b828effd3a languageName: node linkType: hard "unpipe@npm:1.0.0, unpipe@npm:~1.0.0": version: 1.0.0 resolution: "unpipe@npm:1.0.0" - checksum: 4fa18d8d8d977c55cb09715385c203197105e10a6d220087ec819f50cb68870f02942244f1017565484237f1f8c5d3cd413631b1ae104d3096f24fdfde1b4aa2 + checksum: 10c0/193400255bd48968e5c5383730344fbb4fa114cdedfab26e329e50dd2d81b134244bb8a72c6ac1b10ab0281a58b363d06405632c9d49ca9dfd5e90cbd7d0f32c languageName: node linkType: hard @@ -13738,14 +14006,14 @@ __metadata: chokidar: "npm:^3.5.3" webpack-sources: "npm:^3.2.3" webpack-virtual-modules: "npm:^0.6.1" - checksum: cadee8d57d574b4b017c82e696c2ed03b9e90a13f8a3baad14261b6888b989f852ef91e401b6488c03886a4231250e61168f15ef89714d5760d729712c2d4064 + checksum: 10c0/4e358b4d45aeab6c654943edf63c0f4ad22831386eba414065c4b535c84ec4e295cca145f263f878059ea96e19c904835af25dd5f7f46f3c4a49302e621d3cab languageName: node linkType: hard "untildify@npm:^4.0.0": version: 4.0.0 resolution: "untildify@npm:4.0.0" - checksum: 39ced9c418a74f73f0a56e1ba4634b4d959422dff61f4c72a8e39f60b99380c1b45ed776fbaa0a4101b157e4310d873ad7d114e8534ca02609b4916bb4187fb9 + checksum: 10c0/d758e624c707d49f76f7511d75d09a8eda7f2020d231ec52b67ff4896bcf7013be3f9522d8375f57e586e9a2e827f5641c7e06ee46ab9c435fc2b2b2e9de517a languageName: node linkType: hard @@ -13759,7 +14027,7 @@ __metadata: browserslist: ">= 4.21.0" bin: update-browserslist-db: cli.js - checksum: 9074b4ef34d2ed931f27d390aafdd391ee7c45ad83c508e8fed6aaae1eb68f81999a768ed8525c6f88d4001a4fbf1b8c0268f099d0e8e72088ec5945ac796acf + checksum: 10c0/e52b8b521c78ce1e0c775f356cd16a9c22c70d25f3e01180839c407a5dc787fb05a13f67560cbaf316770d26fa99f78f1acd711b1b54a4f35d4820d4ea7136e6 languageName: node linkType: hard @@ -13768,7 +14036,7 @@ __metadata: resolution: "uri-js@npm:4.4.1" dependencies: punycode: "npm:^2.1.0" - checksum: b271ca7e3d46b7160222e3afa3e531505161c9a4e097febae9664e4b59912f4cbe94861361a4175edac3a03fee99d91e44b6a58c17a634bc5a664b19fc76fbcb + checksum: 10c0/4ef57b45aa820d7ac6496e9208559986c665e49447cb072744c13b66925a362d96dd5a46c4530a6b8e203e5db5fe849369444440cb22ecfc26c679359e5dfa3c languageName: node linkType: hard @@ -13778,7 +14046,7 @@ __metadata: dependencies: querystringify: "npm:^2.1.1" requires-port: "npm:^1.0.0" - checksum: c9e96bc8c5b34e9f05ddfeffc12f6aadecbb0d971b3cc26015b58d5b44676a99f50d5aeb1e5c9e61fa4d49961ae3ab1ae997369ed44da51b2f5ac010d188e6ad + checksum: 10c0/bd5aa9389f896974beb851c112f63b466505a04b4807cea2e5a3b7092f6fbb75316f0491ea84e44f66fed55f1b440df5195d7e3a8203f64fcefa19d182f5be87 languageName: node linkType: hard @@ -13793,7 +14061,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: 7cc68dbd8bb9890e21366f153938988967f0a17168a215bf31e24519f826a2de7de596e981f016603a363362f736f2cffad05091c3857fcafbc9c3b20a3eef1e + checksum: 10c0/6666cd62e13053d03e453b5199037cb8f6475a8f55afd664ff488bd8f2ee2ede4da3b220dd7e60f5ecd4926133364fbf4b1aed463eeb8203e7c5be3b1533b59b languageName: node linkType: hard @@ -13805,7 +14073,7 @@ __metadata: peerDependencies: react: 16.8.0 - 18 react-dom: 16.8.0 - 18 - checksum: 821d3f783090e36c694ef0ae3e366b364a691a8254d04337700ea79757e01e2d79f307ee517487c9246db7e8bc9625b474dd6ac7dad18d777004dee817826080 + checksum: 10c0/6ccdeb09fe20566ec182b1635a22f189e13d46226b74610432590e69b31ef5d05d069badc3306ebd0d2bb608743b17981fb535763a1d7dc2c8ae462ee8e5999c languageName: node linkType: hard @@ -13821,7 +14089,7 @@ __metadata: peerDependenciesMeta: "@types/react": optional: true - checksum: ec99e31aefeb880f6dc4d02cb19a01d123364954f857811470ece32872f70d6c3eadbe4d073770706a9b7db6136f2a9fbf1bb803e07fbb21e936a47479281690 + checksum: 10c0/89f0018fd9aee1fc17c85ac18c4bf8944d460d453d0d0e04ddbc8eaddf3fa591e9c74a1f8a438a1bff368a7a2417fab380bdb3df899d2194c4375b0982736de0 languageName: node linkType: hard @@ -13830,14 +14098,14 @@ __metadata: resolution: "use-sync-external-store@npm:1.2.0" peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 - checksum: a676216affc203876bd47981103f201f28c2731361bb186367e12d287a7566763213a8816910c6eb88265eccd4c230426eb783d64c373c4a180905be8820ed8e + checksum: 10c0/ac4814e5592524f242921157e791b022efe36e451fe0d4fd4d204322d5433a4fc300d63b0ade5185f8e0735ded044c70bcf6d2352db0f74d097a238cebd2da02 languageName: node linkType: hard "util-deprecate@npm:^1.0.1, util-deprecate@npm:^1.0.2, util-deprecate@npm:~1.0.1": version: 1.0.2 resolution: "util-deprecate@npm:1.0.2" - checksum: 474acf1146cb2701fe3b074892217553dfcf9a031280919ba1b8d651a068c9b15d863b7303cb15bd00a862b498e6cf4ad7b4a08fb134edd5a6f7641681cb54a2 + checksum: 10c0/41a5bdd214df2f6c3ecf8622745e4a366c4adced864bc3c833739791aeeeb1838119af7daed4ba36428114b5c67dcda034a79c882e97e43c03e66a4dd7389942 languageName: node linkType: hard @@ -13850,14 +14118,14 @@ __metadata: is-generator-function: "npm:^1.0.7" is-typed-array: "npm:^1.1.3" which-typed-array: "npm:^1.1.2" - checksum: 61a10de7753353dd4d744c917f74cdd7d21b8b46379c1e48e1c4fd8e83f8190e6bd9978fc4e5102ab6a10ebda6019d1b36572fa4a325e175ec8b789a121f6147 + checksum: 10c0/c27054de2cea2229a66c09522d0fa1415fb12d861d08523a8846bf2e4cbf0079d4c3f725f09dcb87493549bcbf05f5798dce1688b53c6c17201a45759e7253f3 languageName: node linkType: hard "utils-merge@npm:1.0.1": version: 1.0.1 resolution: "utils-merge@npm:1.0.1" - checksum: 5d6949693d58cb2e636a84f3ee1c6e7b2f9c16cb1d42d0ecb386d8c025c69e327205aa1c69e2868cc06a01e5e20681fbba55a4e0ed0cce913d60334024eae798 + checksum: 10c0/02ba649de1b7ca8854bfe20a82f1dfbdda3fb57a22ab4a8972a63a34553cf7aa51bc9081cf7e001b035b88186d23689d69e71b510e610a09a4c66f68aa95b672 languageName: node linkType: hard @@ -13866,7 +14134,7 @@ __metadata: resolution: "uuid@npm:9.0.1" bin: uuid: dist/bin/uuid - checksum: 9d0b6adb72b736e36f2b1b53da0d559125ba3e39d913b6072f6f033e0c87835b414f0836b45bcfaf2bdf698f92297fea1c3cc19b0b258bc182c9c43cc0fab9f2 + checksum: 10c0/1607dd32ac7fc22f2d8f77051e6a64845c9bce5cd3dd8aa0070c074ec73e666a1f63c7b4e0f4bf2bc8b9d59dc85a15e17807446d9d2b17c8485fbc2147b27f9b languageName: node linkType: hard @@ -13877,7 +14145,7 @@ __metadata: "@jridgewell/trace-mapping": "npm:^0.3.12" "@types/istanbul-lib-coverage": "npm:^2.0.1" convert-source-map: "npm:^2.0.0" - checksum: 18dd8cebfb6790f27f4e41e7cff77c7ab1c8904085f354dd7875e2eb65f4261c4cf40939132502875779d92304bfea46b8336346ecb40b6f33c3a3979e6f5729 + checksum: 10c0/e691ba4dd0dea4a884e52c37dbda30cce6f9eeafe9b26721e449429c6bb0f4b6d1e33fabe7711d0f67f7a34c3bfd56c873f7375bba0b1534e6a2843ce99550e5 languageName: node linkType: hard @@ -13887,14 +14155,14 @@ __metadata: dependencies: spdx-correct: "npm:^3.0.0" spdx-expression-parse: "npm:^3.0.0" - checksum: 86242519b2538bb8aeb12330edebb61b4eb37fd35ef65220ab0b03a26c0592c1c8a7300d32da3cde5abd08d18d95e8dabfad684b5116336f6de9e6f207eec224 + checksum: 10c0/7b91e455a8de9a0beaa9fe961e536b677da7f48c9a493edf4d4d4a87fd80a7a10267d438723364e432c2fcd00b5650b5378275cded362383ef570276e6312f4f languageName: node linkType: hard "vary@npm:~1.1.2": version: 1.1.2 resolution: "vary@npm:1.1.2" - checksum: 31389debef15a480849b8331b220782230b9815a8e0dbb7b9a8369559aed2e9a7800cd904d4371ea74f4c3527db456dc8e7ac5befce5f0d289014dbdf47b2242 + checksum: 10c0/f15d588d79f3675135ba783c91a4083dcd290a2a5be9fcb6514220a1634e23df116847b1cc51f66bfb0644cf9353b2abb7815ae499bab06e46dd33c1a6bf1f4f languageName: node linkType: hard @@ -13908,7 +14176,7 @@ __metadata: cloneable-readable: "npm:^1.0.0" remove-trailing-separator: "npm:^1.0.1" replace-ext: "npm:^1.0.0" - checksum: 6f7c034381afbfd2fd3d09d75a7275f232a00e623f84e9f7fd3569015110f7d03b7535e6c9e6dd0166e1cee6d490182a25aa17a95db1c6aab6d066561466fb49 + checksum: 10c0/e7073fe5a3e10bbd5a3abe7ccf3351ed1b784178576b09642c08b0ef4056265476610aabd29eabfaaf456ada45f05f4112a35687d502f33aab33b025fc6ec38f languageName: node linkType: hard @@ -13922,7 +14190,7 @@ __metadata: peerDependencies: eslint: ">=7" vite: ">=2" - checksum: 65598893e2063a287a690ae296ba1fc212ee50dbc810d396a6cda44ee60c6f400adb52fc4c4a5ff54a89c11100bbaaf43eada23c9a78654ab1717f097d78176f + checksum: 10c0/123c3dcf8229fe2104f139877e866c1a7fc21903dc09f80bebb319a29929667074b9db6d89b3c48eea4740567a07c875d13c4c863ccf7a30a6c9621c74a5c37a languageName: node linkType: hard @@ -13931,7 +14199,7 @@ __metadata: resolution: "vite-plugin-react-remove-attributes@npm:1.0.3" peerDependencies: vite: ^2.4.4 - checksum: 4ffda1ac666128caaef33de2634696df9f8f18f92af9909abddfec5c1b0929bb4061003af72c24fac6c0881200cefca54184b60fd52b48ce2c3738d40761ecd9 + checksum: 10c0/aed62844d7aff39dae90e85a4a9f76dfe398acf14b0bc545e909cd10d30edc08a006f77eab3397ffbbeb25db89e46224aeae8ab3789c30dedffad12e92d1a834 languageName: node linkType: hard @@ -13940,7 +14208,7 @@ __metadata: resolution: "vite-plugin-svg-spriter@npm:1.0.0" dependencies: svg-sprite: "npm:^2.0.2" - checksum: 5b2c821e8917237da4503a930dae8a5d51b4c1f312e1825f51a68db73cc1538d4ca8f2129ee4a0521974ff3d183584d9f40b0c53c2304eec55a55f135ed97805 + checksum: 10c0/ecda7c2f0141752caaade28b73fca57a1894e4e2616061aea61d8e71b2a7f562b33b0b6bd9b2b4ae13b85e4d04192bed8909aed3a382d73ab5d57d8a16bc3879 languageName: node linkType: hard @@ -13953,7 +14221,7 @@ __metadata: "@svgr/plugin-jsx": "npm:^7.0.0" peerDependencies: vite: ^2.6.0 || 3 || 4 - checksum: 19887e1db910ecdd6c12645e430d9e1d9ad40fe6945d3f7de68fc235fba0277586deffa47db1a6be2fa511207b01893a3c5bad9d1bd558ca28971feca13ecd9a + checksum: 10c0/f801759810be82e997acb26b6b0f8c6dc012d7bcb4d430e1e75ef210f6f05580c589b7f65c9729fe4993fa919433903b71a74ddfc490e41af69720cf857de9d9 languageName: node linkType: hard @@ -13969,7 +14237,7 @@ __metadata: peerDependenciesMeta: vite: optional: true - checksum: 8550650c5f5b203e13f4975f6bfd26389000175a031be8f408f6b5d619d5fd8430e067a0661599a4d0872c431496b81239e5d5ad69d9c4b4f9ca7efd4b874bb1 + checksum: 10c0/04bd792bb4f6b4fb57ec8368cff076abffba8d6923af032affb14be43b6e2dfd8b25085947a3204d702a8c8e9d79d3c361373cf98566df682420728857906289 languageName: node linkType: hard @@ -14009,7 +14277,7 @@ __metadata: optional: true bin: vite: bin/vite.js - checksum: d4827cdd41c8bb69c5071200a88015fa3c4d29082c46fd9f20db8df01f7b4948042e4b2e67b6a4391c0bdbfc6c4925832cc6b2411661518988c9a25d4b887598 + checksum: 10c0/28e616abc86038aa73d856b77525193bd77d6ac7fe49f699a3b2a93586613d5f109f07c9917c294faa23b939d07e181c55f9a33392109d278550cc0c87a2d5a3 languageName: node linkType: hard @@ -14018,7 +14286,7 @@ __metadata: resolution: "w3c-xmlserializer@npm:4.0.0" dependencies: xml-name-validator: "npm:^4.0.0" - checksum: 9a00c412b5496f4f040842c9520bc0aaec6e0c015d06412a91a723cd7d84ea605ab903965f546b4ecdb3eae267f5145ba08565222b1d6cb443ee488cda9a0aee + checksum: 10c0/02cc66d6efc590bd630086cd88252444120f5feec5c4043932b0d0f74f8b060512f79dc77eb093a7ad04b4f02f39da79ce4af47ceb600f2bf9eacdc83204b1a8 languageName: node linkType: hard @@ -14027,7 +14295,7 @@ __metadata: resolution: "walker@npm:1.0.8" dependencies: makeerror: "npm:1.0.12" - checksum: ad7a257ea1e662e57ef2e018f97b3c02a7240ad5093c392186ce0bcf1f1a60bbadd520d073b9beb921ed99f64f065efb63dfc8eec689a80e569f93c1c5d5e16c + checksum: 10c0/a17e037bccd3ca8a25a80cb850903facdfed0de4864bd8728f1782370715d679fa72e0a0f5da7c1c1379365159901e5935f35be531229da53bbfc0efdabdb48e languageName: node linkType: hard @@ -14037,7 +14305,7 @@ __metadata: dependencies: glob-to-regexp: "npm:^0.4.1" graceful-fs: "npm:^4.1.2" - checksum: 4280b45bc4b5d45d5579113f2a4af93b67ae1b9607cc3d86ae41cdd53ead10db5d9dc3237f24256d05ef88b28c69a02712f78e434cb7ecc8edaca134a56e8cab + checksum: 10c0/c5e35f9fb9338d31d2141d9835643c0f49b5f9c521440bb648181059e5940d93dd8ed856aa8a33fbcdd4e121dad63c7e8c15c063cf485429cd9d427be197fe62 languageName: node linkType: hard @@ -14046,35 +14314,35 @@ __metadata: resolution: "wcwidth@npm:1.0.1" dependencies: defaults: "npm:^1.0.3" - checksum: 182ebac8ca0b96845fae6ef44afd4619df6987fe5cf552fdee8396d3daa1fb9b8ec5c6c69855acb7b3c1231571393bd1f0a4cdc4028d421575348f64bb0a8817 + checksum: 10c0/5b61ca583a95e2dd85d7078400190efd452e05751a64accb8c06ce4db65d7e0b0cde9917d705e826a2e05cc2548f61efde115ffa374c3e436d04be45c889e5b4 languageName: node linkType: hard "webidl-conversions@npm:^3.0.0": version: 3.0.1 resolution: "webidl-conversions@npm:3.0.1" - checksum: b65b9f8d6854572a84a5c69615152b63371395f0c5dcd6729c45789052296df54314db2bc3e977df41705eacb8bc79c247cee139a63fa695192f95816ed528ad + checksum: 10c0/5612d5f3e54760a797052eb4927f0ddc01383550f542ccd33d5238cfd65aeed392a45ad38364970d0a0f4fea32e1f4d231b3d8dac4a3bdd385e5cf802ae097db languageName: node linkType: hard "webidl-conversions@npm:^7.0.0": version: 7.0.0 resolution: "webidl-conversions@npm:7.0.0" - checksum: 4c4f65472c010eddbe648c11b977d048dd96956a625f7f8b9d64e1b30c3c1f23ea1acfd654648426ce5c743c2108a5a757c0592f02902cf7367adb7d14e67721 + checksum: 10c0/228d8cb6d270c23b0720cb2d95c579202db3aaf8f633b4e9dd94ec2000a04e7e6e43b76a94509cdb30479bd00ae253ab2371a2da9f81446cc313f89a4213a2c4 languageName: node linkType: hard "webpack-sources@npm:^3.2.3": version: 3.2.3 resolution: "webpack-sources@npm:3.2.3" - checksum: a661f41795d678b7526ae8a88cd1b3d8ce71a7d19b6503da8149b2e667fc7a12f9b899041c1665d39e38245ed3a59ab68de648ea31040c3829aa695a5a45211d + checksum: 10c0/2ef63d77c4fad39de4a6db17323d75eb92897b32674e97d76f0a1e87c003882fc038571266ad0ef581ac734cbe20952912aaa26155f1905e96ce251adbb1eb4e languageName: node linkType: hard "webpack-virtual-modules@npm:^0.6.1": version: 0.6.1 resolution: "webpack-virtual-modules@npm:0.6.1" - checksum: 12a43ecdb910185c9d7e4ec19cc3b13bff228dae362e8a487c0bd292b393555e017ad16f771d5ce5b692d91d65b71a7bcd64763958d39066a5351ea325395539 + checksum: 10c0/696bdc1acf3806374bdeb4b9b9856b79ee70b31e92f325dfab9b8c8c7e14bb6ddffa9f895a214770c4fb8fea45a21f34ca64310f74e877292a90f4a9966c9c2f languageName: node linkType: hard @@ -14083,14 +14351,14 @@ __metadata: resolution: "whatwg-encoding@npm:2.0.0" dependencies: iconv-lite: "npm:0.6.3" - checksum: 162d712d88fd134a4fe587e53302da812eb4215a1baa4c394dfd86eff31d0a079ff932c05233857997de07481093358d6e7587997358f49b8a580a777be22089 + checksum: 10c0/91b90a49f312dc751496fd23a7e68981e62f33afe938b97281ad766235c4872fc4e66319f925c5e9001502b3040dd25a33b02a9c693b73a4cbbfdc4ad10c3e3e languageName: node linkType: hard "whatwg-mimetype@npm:^3.0.0": version: 3.0.0 resolution: "whatwg-mimetype@npm:3.0.0" - checksum: 96f9f628c663c2ae05412c185ca81b3df54bcb921ab52fe9ebc0081c1720f25d770665401eb2338ab7f48c71568133845638e18a81ed52ab5d4dcef7d22b40ef + checksum: 10c0/323895a1cda29a5fb0b9ca82831d2c316309fede0365047c4c323073e3239067a304a09a1f4b123b9532641ab604203f33a1403b5ca6a62ef405bcd7a204080f languageName: node linkType: hard @@ -14100,7 +14368,7 @@ __metadata: dependencies: tr46: "npm:^3.0.0" webidl-conversions: "npm:^7.0.0" - checksum: dfcd51c6f4bfb54685528fb10927f3fd3d7c809b5671beef4a8cdd7b1408a7abf3343a35bc71dab83a1424f1c1e92cc2700d7930d95d231df0fac361de0c7648 + checksum: 10c0/f7ec264976d7c725e0696fcaf9ebe056e14422eacbf92fdbb4462034609cba7d0c85ffa1aab05e9309d42969bcf04632ba5ed3f3882c516d7b093053315bf4c1 languageName: node linkType: hard @@ -14110,7 +14378,7 @@ __metadata: dependencies: tr46: "npm:~0.0.3" webidl-conversions: "npm:^3.0.0" - checksum: f95adbc1e80820828b45cc671d97da7cd5e4ef9deb426c31bcd5ab00dc7103042291613b3ef3caec0a2335ed09e0d5ed026c940755dbb6d404e2b27f940fdf07 + checksum: 10c0/1588bed84d10b72d5eec1d0faa0722ba1962f1821e7539c535558fb5398d223b0c50d8acab950b8c488b4ba69043fd833cc2697056b167d8ad46fac3995a55d5 languageName: node linkType: hard @@ -14123,7 +14391,7 @@ __metadata: is-number-object: "npm:^1.0.4" is-string: "npm:^1.0.5" is-symbol: "npm:^1.0.3" - checksum: 9c7ca7855255f25ac47f4ce8b59c4cc33629e713fd7a165c9d77a2bb47bf3d9655a5664660c70337a3221cf96742f3589fae15a3a33639908d33e29aa2941efb + checksum: 10c0/0a62a03c00c91dd4fb1035b2f0733c341d805753b027eebd3a304b9cb70e8ce33e25317add2fe9b5fea6f53a175c0633ae701ff812e604410ddd049777cd435e languageName: node linkType: hard @@ -14143,7 +14411,7 @@ __metadata: which-boxed-primitive: "npm:^1.0.2" which-collection: "npm:^1.0.1" which-typed-array: "npm:^1.1.9" - checksum: d7823c4a6aa4fc8183eb572edd9f9ee2751e5f3ba2ccd5b298cc163f720df0f02ee1a5291d18ca8a41d48144ef40007ff6a64e6f5e7c506527086c7513a5f673 + checksum: 10c0/2b7b234df3443b52f4fbd2b65b731804de8d30bcc4210ec84107ef377a81923cea7f2763b7fb78b394175cea59118bf3c41b9ffd2d643cb1d748ef93b33b6bd4 languageName: node linkType: hard @@ -14155,7 +14423,7 @@ __metadata: is-set: "npm:^2.0.1" is-weakmap: "npm:^2.0.1" is-weakset: "npm:^2.0.1" - checksum: 85c95fcf92df7972ce66bed879e53d9dc752a30ef08e1ca4696df56bcf1c302e3b9965a39b04a20fa280a997fad6c170eb0b4d62435569b7f6c0bc7be910572b + checksum: 10c0/249f913e1758ed2f06f00706007d87dc22090a80591a56917376e70ecf8fc9ab6c41d98e1c87208bb9648676f65d4b09c0e4d23c56c7afb0f0a73a27d701df5d languageName: node linkType: hard @@ -14168,7 +14436,7 @@ __metadata: for-each: "npm:^0.3.3" gopd: "npm:^1.0.1" has-tostringtag: "npm:^1.0.1" - checksum: 56253d2c9d6b41b8a4af96d8c2751bac5508906bd500cdcd0dc5301fb082de0391a4311ab21258bc8d2609ed593f422c1a66f0020fcb3a1e97f719bc928b9018 + checksum: 10c0/0960f1e77807058819451b98c51d4cd72031593e8de990b24bd3fc22e176f5eee22921d68d852297c786aec117689f0423ed20aa4fde7ce2704d680677891f56 languageName: node linkType: hard @@ -14179,7 +14447,7 @@ __metadata: isexe: "npm:^2.0.0" bin: node-which: ./bin/node-which - checksum: 4782f8a1d6b8fc12c65e968fea49f59752bf6302dc43036c3bf87da718a80710f61a062516e9764c70008b487929a73546125570acea95c5b5dcc8ac3052c70f + checksum: 10c0/66522872a768b60c2a65a57e8ad184e5372f5b6a9ca6d5f033d4b0dc98aff63995655a7503b9c0a2598936f532120e81dd8cc155e2e92ed662a2b9377cc4374f languageName: node linkType: hard @@ -14190,7 +14458,7 @@ __metadata: isexe: "npm:^3.1.1" bin: node-which: bin/which.js - checksum: f17e84c042592c21e23c8195108cff18c64050b9efb8459589116999ea9da6dd1509e6a1bac3aeebefd137be00fabbb61b5c2bc0aa0f8526f32b58ee2f545651 + checksum: 10c0/449fa5c44ed120ccecfe18c433296a4978a7583bf2391c50abce13f76878d2476defde04d0f79db8165bdf432853c1f8389d0485ca6e8ebce3bbcded513d5e6a languageName: node linkType: hard @@ -14201,7 +14469,7 @@ __metadata: logform: "npm:^2.3.2" readable-stream: "npm:^3.6.0" triple-beam: "npm:^1.3.0" - checksum: c8eae7b110e68396edcf26aec86608bd8ac98f3cc05961064e2e577b023d9c4aa485546cacba84efaf48b7d6b1e282dc211fd959ee16cbd31d34476d96daea43 + checksum: 10c0/cd16f3d0ab56697f93c4899e0eb5f89690f291bb6cf309194819789326a7c7ed943ef00f0b2fab513b114d371314368bde1a7ae6252ad1516181a79f90199cd2 languageName: node linkType: hard @@ -14220,14 +14488,14 @@ __metadata: stack-trace: "npm:0.0.x" triple-beam: "npm:^1.3.0" winston-transport: "npm:^4.7.0" - checksum: 436675598359af27e4eabde2ce578cf77da893ffd57d0479f037fef939e8eb721031f0102b14399eee93b3412b545946c431d1fff23db3beeac2ffa395537f7b + checksum: 10c0/2c3cc7389a691e1638edcb0d4bfea72caa82d87d5681ec6131ac9bae780d94d06fb7b112edcd4ec37c8b947a1b64943941b761e34d67c6b0dac6e9c31ae4b25b languageName: node linkType: hard "wordwrap@npm:^1.0.0": version: 1.0.0 resolution: "wordwrap@npm:1.0.0" - checksum: 497d40beb2bdb08e6d38754faa17ce20b0bf1306327f80cb777927edb23f461ee1f6bc659b3c3c93f26b08e1cf4b46acc5bae8fda1f0be3b5ab9a1a0211034cd + checksum: 10c0/7ed2e44f3c33c5c3e3771134d2b0aee4314c9e49c749e37f464bf69f2bcdf0cbf9419ca638098e2717cff4875c47f56a007532f6111c3319f557a2ca91278e92 languageName: node linkType: hard @@ -14238,7 +14506,7 @@ __metadata: ansi-styles: "npm:^4.0.0" string-width: "npm:^4.1.0" strip-ansi: "npm:^6.0.0" - checksum: cebdaeca3a6880da410f75209e68cd05428580de5ad24535f22696d7d9cab134d1f8498599f344c3cf0fb37c1715807a183778d8c648d6cc0cb5ff2bb4236540 + checksum: 10c0/d15fc12c11e4cbc4044a552129ebc75ee3f57aa9c1958373a4db0292d72282f54373b536103987a4a7594db1ef6a4f10acf92978f79b98c49306a4b58c77d4da languageName: node linkType: hard @@ -14249,14 +14517,25 @@ __metadata: ansi-styles: "npm:^6.1.0" string-width: "npm:^5.0.1" strip-ansi: "npm:^7.0.1" - checksum: 7b1e4b35e9bb2312d2ee9ee7dc95b8cb5f8b4b5a89f7dde5543fe66c1e3715663094defa50d75454ac900bd210f702d575f15f3f17fa9ec0291806d2578d1ddf + checksum: 10c0/138ff58a41d2f877eae87e3282c0630fc2789012fc1af4d6bd626eeb9a2f9a65ca92005e6e69a75c7b85a68479fe7443c7dbe1eb8fbaa681a4491364b7c55c60 + languageName: node + linkType: hard + +"wrap-ansi@npm:^9.0.0": + version: 9.0.0 + resolution: "wrap-ansi@npm:9.0.0" + dependencies: + ansi-styles: "npm:^6.2.1" + string-width: "npm:^7.0.0" + strip-ansi: "npm:^7.1.0" + checksum: 10c0/a139b818da9573677548dd463bd626a5a5286271211eb6e4e82f34a4f643191d74e6d4a9bb0a3c26ec90e6f904f679e0569674ac099ea12378a8b98e20706066 languageName: node linkType: hard "wrappy@npm:1": version: 1.0.2 resolution: "wrappy@npm:1.0.2" - checksum: 159da4805f7e84a3d003d8841557196034155008f817172d4e986bd591f74aa82aa7db55929a54222309e01079a65a92a9e6414da5a6aa4b01ee44a511ac3ee5 + checksum: 10c0/56fece1a4018c6a6c8e28fbc88c87e0fbf4ea8fd64fc6c63b18f4acc4bd13e0ad2515189786dd2c30d3eec9663d70f4ecf699330002f8ccb547e4a18231fc9f0 languageName: node linkType: hard @@ -14267,7 +14546,7 @@ __metadata: graceful-fs: "npm:^4.1.11" imurmurhash: "npm:^0.1.4" signal-exit: "npm:^3.0.2" - checksum: 15ce863dce07075d0decedd7c9094f4461e46139d28a758c53162f24c0791c16cd2e7a76baa5b47b1a851fbb51e16f2fab739afb156929b22628f3225437135c + checksum: 10c0/8cb4bba0c1ab814a9b127844da0db4fb8c5e06ddbe6317b8b319377c73b283673036c8b9360120062898508b9428d81611cf7fa97584504a00bc179b2a580b92 languageName: node linkType: hard @@ -14277,7 +14556,7 @@ __metadata: dependencies: imurmurhash: "npm:^0.1.4" signal-exit: "npm:^3.0.7" - checksum: 3be1f5508a46c190619d5386b1ac8f3af3dbe951ed0f7b0b4a0961eed6fc626bd84b50cf4be768dabc0a05b672f5d0c5ee7f42daa557b14415d18c3a13c7d246 + checksum: 10c0/a2c282c95ef5d8e1c27b335ae897b5eca00e85590d92a3fd69a437919b7b93ff36a69ea04145da55829d2164e724bc62202cdb5f4b208b425aba0807889375c7 languageName: node linkType: hard @@ -14286,7 +14565,7 @@ __metadata: resolution: "ws@npm:6.2.2" dependencies: async-limiter: "npm:~1.0.0" - checksum: bb791ac02ad7e59fd4208cc6dd3a5bf7a67dff4611a128ed33365996f9fc24fa0d699043559f1798b4bc8045639fd21a1fd3ceca81de560124444abd8e321afc + checksum: 10c0/d628a1e95668a296644b4f51ce5debb43d9f1d89ebb2e32fef205a685b9439378eb824d60ce3a40bbc3bad0e887d84a56b343f2076f48d74f17c4c0800c42967 languageName: node linkType: hard @@ -14301,63 +14580,72 @@ __metadata: optional: true utf-8-validate: optional: true - checksum: 7c511c59e979bd37b63c3aea4a8e4d4163204f00bd5633c053b05ed67835481995f61a523b0ad2b603566f9a89b34cb4965cb9fab9649fbfebd8f740cea57f17 + checksum: 10c0/a7783bb421c648b1e622b423409cb2a58ac5839521d2f689e84bc9dc41d59379c692dd405b15a997ea1d4c0c2e5314ad707332d0c558f15232d2bc07c0b4618a languageName: node linkType: hard "xml-name-validator@npm:^4.0.0": version: 4.0.0 resolution: "xml-name-validator@npm:4.0.0" - checksum: f9582a3f281f790344a471c207516e29e293c6041b2c20d84dd6e58832cd7c19796c47e108fd4fd4b164a5e72ad94f2268f8ace8231cde4a2c6428d6aa220f92 + checksum: 10c0/c1bfa219d64e56fee265b2bd31b2fcecefc063ee802da1e73bad1f21d7afd89b943c9e2c97af2942f60b1ad46f915a4c81e00039c7d398b53cf410e29d3c30bd languageName: node linkType: hard "xmlchars@npm:^2.2.0": version: 2.2.0 resolution: "xmlchars@npm:2.2.0" - checksum: 4ad5924974efd004a47cce6acf5c0269aee0e62f9a805a426db3337af7bcbd331099df174b024ace4fb18971b8a56de386d2e73a1c4b020e3abd63a4a9b917f1 + checksum: 10c0/b64b535861a6f310c5d9bfa10834cf49127c71922c297da9d4d1b45eeaae40bf9b4363275876088fbe2667e5db028d2cd4f8ee72eed9bede840a67d57dab7593 languageName: node linkType: hard "xpath@npm:^0.0.34": version: 0.0.34 resolution: "xpath@npm:0.0.34" - checksum: 77ce03c4494dab97b70fa443761c35a6bd484538a449714b981387a532a6eb22e245b29164f5d8a4a82f4f3cfd71d27ba71d09ed2b6fe933654585c6e46c0a25 + checksum: 10c0/88335108884ca164421f7fed048ef1a18ab3f7b1ae446b627fd3f51fc2396dcce798601c5e426de3bbd55d5940b84cf2326c75cd76620c1b49491283b85de17a languageName: node linkType: hard "xtend@npm:^4.0.0, xtend@npm:~4.0.1": version: 4.0.2 resolution: "xtend@npm:4.0.2" - checksum: ac5dfa738b21f6e7f0dd6e65e1b3155036d68104e67e5d5d1bde74892e327d7e5636a076f625599dc394330a731861e87343ff184b0047fef1360a7ec0a5a36a + checksum: 10c0/366ae4783eec6100f8a02dff02ac907bf29f9a00b82ac0264b4d8b832ead18306797e283cf19de776538babfdcb2101375ec5646b59f08c52128ac4ab812ed0e languageName: node linkType: hard "y18n@npm:^5.0.5": version: 5.0.8 resolution: "y18n@npm:5.0.8" - checksum: 5f1b5f95e3775de4514edbb142398a2c37849ccfaf04a015be5d75521e9629d3be29bd4432d23c57f37e5b61ade592fb0197022e9993f81a06a5afbdcda9346d + checksum: 10c0/4df2842c36e468590c3691c894bc9cdbac41f520566e76e24f59401ba7d8b4811eb1e34524d57e54bc6d864bcb66baab7ffd9ca42bf1eda596618f9162b91249 languageName: node linkType: hard "yallist@npm:^3.0.2": version: 3.1.1 resolution: "yallist@npm:3.1.1" - checksum: 9af0a4329c3c6b779ac4736c69fae4190ac03029fa27c1aef4e6bcc92119b73dea6fe5db5fe881fb0ce2a0e9539a42cdf60c7c21eda04d1a0b8c082e38509efb + checksum: 10c0/c66a5c46bc89af1625476f7f0f2ec3653c1a1791d2f9407cfb4c2ba812a1e1c9941416d71ba9719876530e3340a99925f697142989371b72d93b9ee628afd8c1 languageName: node linkType: hard "yallist@npm:^4.0.0": version: 4.0.0 resolution: "yallist@npm:4.0.0" - checksum: 4cb02b42b8a93b5cf50caf5d8e9beb409400a8a4d85e83bb0685c1457e9ac0b7a00819e9f5991ac25ffabb56a78e2f017c1acc010b3a1babfe6de690ba531abd + checksum: 10c0/2286b5e8dbfe22204ab66e2ef5cc9bbb1e55dfc873bbe0d568aa943eb255d131890dfd5bf243637273d31119b870f49c18fcde2c6ffbb7a7a092b870dc90625a + languageName: node + linkType: hard + +"yaml@npm:~2.5.0": + version: 2.5.1 + resolution: "yaml@npm:2.5.1" + bin: + yaml: bin.mjs + checksum: 10c0/40fba5682898dbeeb3319e358a968fe886509fab6f58725732a15f8dda3abac509f91e76817c708c9959a15f786f38ff863c1b88062d7c1162c5334a7d09cb4a languageName: node linkType: hard "yargs-parser@npm:^21.1.1": version: 21.1.1 resolution: "yargs-parser@npm:21.1.1" - checksum: 9dc2c217ea3bf8d858041252d43e074f7166b53f3d010a8c711275e09cd3d62a002969a39858b92bbda2a6a63a585c7127014534a560b9c69ed2d923d113406e + checksum: 10c0/f84b5e48169479d2f402239c59f084cfd1c3acc197a05c59b98bab067452e6b3ea46d4dd8ba2985ba7b3d32a343d77df0debd6b343e5dae3da2aab2cdf5886b2 languageName: node linkType: hard @@ -14372,7 +14660,7 @@ __metadata: string-width: "npm:^4.2.3" y18n: "npm:^5.0.5" yargs-parser: "npm:^21.1.1" - checksum: abb3e37678d6e38ea85485ed86ebe0d1e3464c640d7d9069805ea0da12f69d5a32df8e5625e370f9c96dd1c2dc088ab2d0a4dd32af18222ef3c4224a19471576 + checksum: 10c0/ccd7e723e61ad5965fffbb791366db689572b80cca80e0f96aad968dfff4156cd7cd1ad18607afe1046d8241e6fb2d6c08bf7fa7bfb5eaec818735d8feac8f05 languageName: node linkType: hard @@ -14382,13 +14670,13 @@ __metadata: dependencies: buffer-crc32: "npm:~0.2.3" fd-slicer: "npm:~1.1.0" - checksum: 1e4c311050dc0cf2ee3dbe8854fe0a6cde50e420b3e561a8d97042526b4cf7a0718d6c8d89e9e526a152f4a9cec55bcea9c3617264115f48bd6704cf12a04445 + checksum: 10c0/f265002af7541b9ec3589a27f5fb8f11cf348b53cc15e2751272e3c062cd73f3e715bc72d43257de71bbaecae446c3f1b14af7559e8ab0261625375541816422 languageName: node linkType: hard "yocto-queue@npm:^0.1.0": version: 0.1.0 resolution: "yocto-queue@npm:0.1.0" - checksum: f77b3d8d00310def622123df93d4ee654fc6a0096182af8bd60679ddcdfb3474c56c6c7190817c84a2785648cdee9d721c0154eb45698c62176c322fb46fc700 + checksum: 10c0/dceb44c28578b31641e13695d200d34ec4ab3966a5729814d5445b194933c096b7ced71494ce53a0e8820685d1d010df8b2422e5bf2cdea7e469d97ffbea306f languageName: node linkType: hard diff --git a/adcm-web/docker/test/Dockerfile b/adcm-web/docker/test/Dockerfile index 978eafb1be..34870eed36 100644 --- a/adcm-web/docker/test/Dockerfile +++ b/adcm-web/docker/test/Dockerfile @@ -1,4 +1,4 @@ -FROM node:18.16-alpine +FROM node:20.9.0-alpine WORKDIR /app From 680c6558a12490f257b7108192e1efbb38c9727f Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 8 Oct 2024 12:59:23 +0000 Subject: [PATCH 88/98] ADCM-6011 Relink /usr/bin/python --- Dockerfile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Dockerfile b/Dockerfile index a15a42ff0f..91ef0f4b31 100644 --- a/Dockerfile +++ b/Dockerfile @@ -50,6 +50,10 @@ RUN apk add --no-cache --virtual .build-deps \ build-base \ linux-headers \ libffi-dev && \ + # remove python links (3.12) from /usr/bin and link python to local one (3.10) + rm /usr/bin/python /usr/bin/python3 && \ + ln -s /usr/local/bin/python3 /usr/bin/python3 && \ + ln -s /usr/bin/python3 /usr/bin/python && \ python -m venv $POETRY_VENV && \ $POETRY_VENV/bin/pip install --no-cache-dir -U pip setuptools && \ $POETRY_VENV/bin/pip install --no-cache-dir poetry==$POETRY_VERSION && \ From bc704669a8a7daf4a32c8d2a1fef3eb23b0ca9fd Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 8 Oct 2024 13:00:16 +0000 Subject: [PATCH 89/98] ADCM-5997 Fix config issue check & add concern (dis)appearance notifications --- python/api/tests/test_adcm.py | 12 ++---------- python/cm/services/concern/checks.py | 2 +- python/cm/services/mapping.py | 22 ++++++++++++++++++---- python/cm/upgrade.py | 5 ++++- 4 files changed, 25 insertions(+), 16 deletions(-) diff --git a/python/api/tests/test_adcm.py b/python/api/tests/test_adcm.py index 2933810b52..cf6130b8b4 100644 --- a/python/api/tests/test_adcm.py +++ b/python/api/tests/test_adcm.py @@ -11,11 +11,7 @@ # limitations under the License. from adcm.tests.base import BaseTestCase -from cm.converters import orm_object_to_core_type -from cm.issue import add_concern_to_object -from cm.models import ADCM, ConcernCause -from cm.services.concern import create_issue -from core.types import CoreObjectDescriptor +from cm.models import ADCM from django.urls import reverse from rest_framework.response import Response from rest_framework.status import HTTP_200_OK @@ -26,11 +22,7 @@ def setUp(self) -> None: super().setUp() self.adcm = ADCM.objects.select_related("prototype").last() - self.concern = create_issue( - owner=CoreObjectDescriptor(id=self.adcm.id, type=orm_object_to_core_type(self.adcm)), - cause=ConcernCause.CONFIG, - ) - add_concern_to_object(object_=self.adcm, concern=self.concern) + self.concern = self.adcm.concerns.get() def test_list(self): test_data = { diff --git a/python/cm/services/concern/checks.py b/python/cm/services/concern/checks.py index a31dd4dc91..73002fd81b 100644 --- a/python/cm/services/concern/checks.py +++ b/python/cm/services/concern/checks.py @@ -111,7 +111,7 @@ def filter_objects_with_configuration_issues(config_spec: FlatSpec, *objects: Ob group_name = None if group_name: - if not attr.get(group_name, {}).get("active", False): + if not attr.get(group_name, {}).get("active", True): continue if config[group_name][field_name] is None: diff --git a/python/cm/services/mapping.py b/python/cm/services/mapping.py index 23a9b39f0f..2e61610d93 100644 --- a/python/cm/services/mapping.py +++ b/python/cm/services/mapping.py @@ -170,8 +170,6 @@ def _recreate_mapping_in_db(topology: ClusterTopology) -> None: def _update_concerns( old_topology: ClusterTopology, new_topology: ClusterTopology, bundle_restrictions: BundleRestrictions ) -> tuple[AffectedObjectConcernMap, AffectedObjectConcernMap]: - # todo HC may break (?) - # We can't be sure this method is called after some sort of "check" cluster = CoreObjectDescriptor(id=old_topology.cluster_id, type=ADCMCoreType.CLUSTER) if not cluster_mapping_has_issue(cluster_id=cluster.id, bundle_restrictions=bundle_restrictions): delete_issue(owner=cluster, cause=ConcernCause.HOSTCOMPONENT) @@ -193,6 +191,15 @@ def _update_concerns( targets=(CoreObjectDescriptor(id=host_id, type=ADCMCoreType.HOST) for host_id in unmapped), lock_id=lock.id, ) + if ADCMCoreType.HOST not in removed: + removed[ADCMCoreType.HOST] = {host_id: {lock.id} for host_id in unmapped} + else: + hosts_node = removed[ADCMCoreType.HOST] + for host_id in unmapped: + if host_id in hosts_node: + hosts_node[host_id].add(lock.id) + else: + hosts_node[host_id] = {lock.id} mapped = unmapped_in_previous_topology - unmapped_in_new_topology if mapped: @@ -200,9 +207,16 @@ def _update_concerns( targets=(CoreObjectDescriptor(id=host_id, type=ADCMCoreType.HOST) for host_id in mapped), lock_id=lock.id, ) + if ADCMCoreType.HOST not in added: + added[ADCMCoreType.HOST] = {host_id: {lock.id} for host_id in mapped} + else: + hosts_node = added[ADCMCoreType.HOST] + for host_id in mapped: + if host_id in hosts_node: + hosts_node[host_id].add(lock.id) + else: + hosts_node[host_id] = {lock.id} - # since mechanism for locks redistribution is different from the one for flags/issues, - # there's no need in considering them in concern update events return added, removed diff --git a/python/cm/upgrade.py b/python/cm/upgrade.py index ab3ac576ae..cc12e0a82f 100644 --- a/python/cm/upgrade.py +++ b/python/cm/upgrade.py @@ -73,7 +73,7 @@ from cm.services.job.action import ActionRunPayload, run_action from cm.services.job.types import HcAclAction from cm.services.mapping import change_host_component_mapping, check_nothing -from cm.status_api import send_prototype_and_state_update_event +from cm.status_api import notify_about_redistributed_concerns_from_maps, send_prototype_and_state_update_event from cm.utils import obj_ref @@ -512,6 +512,9 @@ def perform(self) -> None: ): policy.apply() + if added or removed: + notify_about_redistributed_concerns_from_maps(added=added, removed=removed) + logger.info("upgrade %s OK to version %s", obj_ref(obj=self._target), new_prototype.version) @abstractmethod From 0d608c7dc107a9361c00ec12fb3fcb0fe650d224 Mon Sep 17 00:00:00 2001 From: Kirill Fedorenko Date: Tue, 8 Oct 2024 19:31:43 +0000 Subject: [PATCH 90/98] ADCM-5996 [UI] HC map can be saved with expand on host in MM https://tracker.yandex.ru/ADCM-5996 --- .../DynamicActionHostMapping.utils.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/adcm-web/app/src/components/common/DynamicActionDialog/DynamicActionSteps/DynamicActionHostMapping/DynamicActionHostMapping.utils.ts b/adcm-web/app/src/components/common/DynamicActionDialog/DynamicActionSteps/DynamicActionHostMapping/DynamicActionHostMapping.utils.ts index 8821d3a626..69756904ed 100644 --- a/adcm-web/app/src/components/common/DynamicActionDialog/DynamicActionSteps/DynamicActionHostMapping/DynamicActionHostMapping.utils.ts +++ b/adcm-web/app/src/components/common/DynamicActionDialog/DynamicActionSteps/DynamicActionHostMapping/DynamicActionHostMapping.utils.ts @@ -6,6 +6,7 @@ import { type AdcmMappingComponent, type AdcmMappingComponentService, type HostId, + AdcmMaintenanceMode, } from '@models/adcm'; import type { ComponentAvailabilityErrors, @@ -66,5 +67,8 @@ export const checkHostActionsMappingAvailability = ( disabledHosts: Set = new Set(), ): string | undefined => { const isDisabled = !allowActions.has(AdcmHostComponentMapRuleAction.Remove) && disabledHosts.has(host.id); - return isDisabled ? 'Removing host is not allowed in the action configuration' : undefined; + const isHostInMaintenanceMode = host.maintenanceMode === AdcmMaintenanceMode.On; + + if (isDisabled) return 'Removing host is not allowed in the action configuration'; + if (isHostInMaintenanceMode) return 'The host is in the maintenance mode'; }; From 03dabc652c30b0c52b6b2ac3160adc6eccc96fd8 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Thu, 10 Oct 2024 10:56:37 +0000 Subject: [PATCH 91/98] ADCM-6014: Add email for system and status users --- python/init_db.py | 45 ++++++++++++++++++++++++++++++++++----------- 1 file changed, 34 insertions(+), 11 deletions(-) diff --git a/python/init_db.py b/python/init_db.py index 6764cd4c05..e409656e69 100755 --- a/python/init_db.py +++ b/python/init_db.py @@ -60,16 +60,44 @@ def prepare_secrets_json(status_user_username: str, status_user_password: str | logger.info("Secret file %s is not updated", settings.SECRETS_FILE) -def create_status_user() -> tuple[str, str | None]: +def _create_admin_user() -> None: + username = "admin" + email = f"{username}@example.com" + + if not User.objects.filter(username=username).exists(): + User.objects.create_superuser(username=username, email=email, password=username, built_in=False) + + +def _create_status_user() -> tuple[str, str | None]: username = "status" - if User.objects.filter(username=username).exists(): + email = f"{username}@example.com" + + status_user = User.objects.filter(username=username).only("email").first() + if status_user is not None: + if status_user.email != email: + status_user.email = email + status_user.save(update_fields=["email"]) + return username, None password = token_hex(TOKEN_LENGTH) - User.objects.create_superuser(username, "", password, built_in=True) + User.objects.create_superuser(username=username, email=email, password=password, built_in=True) + return username, password +def _create_system_user() -> None: + username = "system" + email = f"{username}@example.com" + + system_user = User.objects.filter(username=username).only("email").first() + if system_user is None: + User.objects.create_superuser(username=username, email=email, password=None, built_in=True) + elif system_user.email != email: + system_user.email = email + system_user.save(update_fields=["email"]) + + def clear_temp_tables(): CheckLog.objects.all().delete() GroupCheckLog.objects.all().delete() @@ -105,15 +133,10 @@ def abort_all(): def init(adcm_conf_file: Path = Path(settings.BASE_DIR, "conf", "adcm", "config.yaml")): logger.info("Start initializing ADCM DB...") - if not User.objects.filter(username="admin").exists(): - User.objects.create_superuser("admin", "admin@example.com", "admin", built_in=False) - - status_user_username, status_user_password = create_status_user() + _create_admin_user() + status_user_username, status_user_password = _create_status_user() prepare_secrets_json(status_user_username, status_user_password) - - if not User.objects.filter(username="system").exists(): - User.objects.create_superuser("system", "", None, built_in=True) - logger.info("Create system user") + _create_system_user() abort_all() clear_temp_tables() From bf44d83205c4c950475a98c1ce64776cc229885e Mon Sep 17 00:00:00 2001 From: Kirill Fedorenko Date: Fri, 11 Oct 2024 08:02:41 +0000 Subject: [PATCH 92/98] ADCM-5989 [UI] Block the upgrade button if there are concerns https://tracker.yandex.ru/ADCM-5989 --- .../pages/ClustersPage/ClustersTable/ClustersTable.tsx | 3 ++- .../HostProvidersTable/HostProvidersTable.tsx | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/adcm-web/app/src/components/pages/ClustersPage/ClustersTable/ClustersTable.tsx b/adcm-web/app/src/components/pages/ClustersPage/ClustersTable/ClustersTable.tsx index 3c83270acc..4138114ed3 100644 --- a/adcm-web/app/src/components/pages/ClustersPage/ClustersTable/ClustersTable.tsx +++ b/adcm-web/app/src/components/pages/ClustersPage/ClustersTable/ClustersTable.tsx @@ -12,6 +12,7 @@ import ClusterDynamicActionsIcon from '@pages/ClustersPage/ClustersTable/Cluster import MultiStateCell from '@commonComponents/Table/Cells/MultiStateCell'; import { openClusterUpgradeDialog } from '@store/adcm/clusters/clusterUpgradesSlice'; import { isShowSpinner } from '@uikit/Table/Table.utils'; +import { isBlockingConcernPresent } from '@utils/concernUtils.ts'; const ClustersTable = () => { const dispatch = useDispatch(); @@ -76,7 +77,7 @@ const ClustersTable = () => { handleUpgradeClick(cluster)} title={cluster.isUpgradable ? 'Upgrade' : 'No upgrades'} /> diff --git a/adcm-web/app/src/components/pages/HostProvidersPage/HostProvidersTable/HostProvidersTable.tsx b/adcm-web/app/src/components/pages/HostProvidersPage/HostProvidersTable/HostProvidersTable.tsx index 2e3ecf52f1..352ab06dd8 100644 --- a/adcm-web/app/src/components/pages/HostProvidersPage/HostProvidersTable/HostProvidersTable.tsx +++ b/adcm-web/app/src/components/pages/HostProvidersPage/HostProvidersTable/HostProvidersTable.tsx @@ -12,6 +12,7 @@ import HostProvidersDynamicActionsIcon from '../HostProvidersDynamicActionsIcon/ import { AdcmHostProvider } from '@models/adcm'; import { opeHostProviderUpgradeDialog } from '@store/adcm/hostProviders/hostProviderUpgradesSlice'; import { isShowSpinner } from '@uikit/Table/Table.utils'; +import { isBlockingConcernPresent } from '@utils/concernUtils.ts'; const HostProviderTable = () => { const dispatch = useDispatch(); @@ -59,7 +60,7 @@ const HostProviderTable = () => { handleUpgradeClick(hostProvider)} title={hostProvider.isUpgradable ? 'Upgrade' : 'No upgrades'} /> From 7e6db25a61e00ab37c9930b594dad27cfc839029 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Mon, 14 Oct 2024 12:38:46 +0000 Subject: [PATCH 93/98] ADCM-6012 Process task config before jobs preparation --- python/adcm/tests/base.py | 4 +- python/api_v2/tests/test_tasks.py | 10 +- python/cm/services/job/action.py | 106 ++++++++++++++---- python/cm/services/job/jinja_scripts.py | 4 +- python/cm/services/job/prepare.py | 37 ------ python/cm/tests/bundles/cluster_1/config.yaml | 11 ++ .../cm/tests/bundles/cluster_1/with_group.j2 | 12 ++ .../bundles/cluster_full_config/config.yaml | 8 +- .../action_configs/cluster.json.j2 | 6 +- .../action_configs/cluster_on_host.json.j2 | 9 +- .../action_configs/component.json.j2 | 7 +- .../action_configs/component_on_host.json.j2 | 6 +- .../action_configs/host.json.j2 | 7 +- .../action_configs/hostprovider.json.j2 | 11 +- .../action_configs/service.json.j2 | 9 +- .../action_configs/service_on_host.json.j2 | 7 +- .../test_inventory/test_action_config.py | 52 +++++---- .../test_inventory/test_cluster_hosts.py | 8 +- python/cm/tests/test_jinja_scripts.py | 21 +++- python/cm/tests/test_task_log.py | 4 +- python/core/job/task.py | 69 ------------ 21 files changed, 213 insertions(+), 195 deletions(-) delete mode 100644 python/cm/services/job/prepare.py create mode 100644 python/cm/tests/bundles/cluster_1/with_group.j2 delete mode 100644 python/core/job/task.py diff --git a/python/adcm/tests/base.py b/python/adcm/tests/base.py index b662705132..770a7209ed 100644 --- a/python/adcm/tests/base.py +++ b/python/adcm/tests/base.py @@ -44,7 +44,7 @@ Prototype, ServiceComponent, ) -from cm.services.job.prepare import prepare_task_for_action +from cm.services.job.action import prepare_task_for_action from cm.services.mapping import change_host_component_mapping from cm.utils import deep_merge from core.cluster.types import HostComponentEntry @@ -585,5 +585,5 @@ def prepare_task( action = Action.objects.get(prototype_id=owner.prototype_id, **action_search_kwargs) target = owner_descriptor if not host else CoreObjectDescriptor(id=host.id, type=ADCMCoreType.HOST) return prepare_task_for_action( - target=target, owner=owner_descriptor, action=action.id, payload=payload or TaskPayloadDTO() + target=target, orm_owner=owner, action=action.id, payload=payload or TaskPayloadDTO() ) diff --git a/python/api_v2/tests/test_tasks.py b/python/api_v2/tests/test_tasks.py index 5915f415c1..c27c44194d 100644 --- a/python/api_v2/tests/test_tasks.py +++ b/python/api_v2/tests/test_tasks.py @@ -27,7 +27,7 @@ ServiceComponent, TaskLog, ) -from cm.services.job.prepare import prepare_task_for_action +from cm.services.job.action import prepare_task_for_action from cm.tests.mocks.task_runner import RunTaskMock from core.job.dto import TaskPayloadDTO from core.types import ADCMCoreType, CoreObjectDescriptor @@ -55,7 +55,7 @@ def setUp(self) -> None: self.cluster_task = TaskLog.objects.get( id=prepare_task_for_action( target=cluster_object, - owner=cluster_object, + orm_owner=self.cluster_1, action=self.cluster_action.pk, payload=TaskPayloadDTO(), ).id @@ -64,7 +64,7 @@ def setUp(self) -> None: self.service_task = TaskLog.objects.get( id=prepare_task_for_action( target=service_object, - owner=service_object, + orm_owner=self.service_1, action=self.service_1_action.pk, payload=TaskPayloadDTO(), ).id @@ -73,7 +73,7 @@ def setUp(self) -> None: self.component_task = TaskLog.objects.get( id=prepare_task_for_action( target=component_object, - owner=component_object, + orm_owner=component_1, action=component_1_action.pk, payload=TaskPayloadDTO(), ).id @@ -320,6 +320,6 @@ def create_task( ) target = CoreObjectDescriptor(id=host.pk, type=ADCMCoreType.HOST) if host else owner - launch = prepare_task_for_action(target=target, owner=owner, action=action.pk, payload=TaskPayloadDTO()) + launch = prepare_task_for_action(target=target, orm_owner=object_, action=action.pk, payload=TaskPayloadDTO()) return TaskLog.objects.get(id=launch.id) diff --git a/python/cm/services/job/action.py b/python/cm/services/job/action.py index 506af79a21..c2680abfd5 100644 --- a/python/cm/services/job/action.py +++ b/python/cm/services/job/action.py @@ -15,8 +15,10 @@ from core.cluster.operations import create_topology_with_new_mapping, find_hosts_difference from core.cluster.types import ClusterTopology, HostComponentEntry -from core.job.dto import TaskPayloadDTO -from core.types import ActionTargetDescriptor, BundleID, CoreObjectDescriptor, HostID +from core.job.dto import LogCreateDTO, TaskPayloadDTO +from core.job.errors import TaskCreateError +from core.job.types import Task +from core.types import ActionID, ActionTargetDescriptor, BundleID, CoreObjectDescriptor, GeneralEntityDescriptor, HostID from django.conf import settings from django.db.transaction import atomic from rbac.roles import re_apply_policy_for_jobs @@ -47,8 +49,9 @@ from cm.services.job._utils import check_delta_is_allowed, construct_delta_for_task from cm.services.job.constants import HC_CONSTRAINT_VIOLATION_ON_UPGRADE_TEMPLATE from cm.services.job.inventory._config import update_configuration_for_inventory_inplace -from cm.services.job.prepare import prepare_task_for_action +from cm.services.job.jinja_scripts import get_job_specs_from_template from cm.services.job.run import run_task +from cm.services.job.run.repo import ActionRepoImpl, JobRepoImpl from cm.services.job.types import ActionHCRule, TaskMappingDelta from cm.services.mapping import change_host_component_mapping, check_no_host_in_mm, check_nothing from cm.status_api import send_task_status_update_event @@ -58,7 +61,7 @@ ActionTarget: TypeAlias = ObjectWithAction | ActionHostGroup -@dataclass +@dataclass(slots=True) class ActionRunPayload: conf: dict = field(default_factory=dict) attr: dict = field(default_factory=dict) @@ -89,10 +92,6 @@ def run_action( _check_no_blocking_concerns(lock_owner=action_objects.object_to_lock, action_name=action.name) _check_action_is_available_for_object(owner=action_objects.owner, action=action) - spec, flat_spec = _process_run_config( - action=action, owner=action_objects.owner, conf=payload.conf, attr=payload.attr - ) - delta = TaskMappingDelta() if action_objects.cluster and (action_has_hc_acl or is_upgrade_action): topology = retrieve_cluster_topology(cluster_id=action_objects.cluster.id) @@ -113,11 +112,12 @@ def run_action( } with atomic(): - owner = CoreObjectDescriptor(id=action_objects.owner.id, type=orm_object_to_core_type(action_objects.owner)) target = ActionTargetDescriptor( id=action_objects.target.id, type=orm_object_to_action_target_type(action_objects.target) ) - task = prepare_task_for_action(target=target, owner=owner, action=action.id, payload=task_payload, delta=delta) + task = prepare_task_for_action( + target=target, orm_owner=action_objects.owner, action=action.id, payload=task_payload, delta=delta + ) orm_task = TaskLog.objects.get(id=task.id) @@ -131,17 +131,6 @@ def run_action( checks_func=check_nothing, ) - if payload.conf: - new_conf = update_configuration_for_inventory_inplace( - configuration=payload.conf, - attributes=payload.attr, - specification=convert_to_flat_spec_from_proto_flat_spec(prototypes_flat_spec=flat_spec), - config_owner=owner, - ) - process_file_type(obj=orm_task, spec=spec, conf=payload.conf) - orm_task.config = new_conf - orm_task.save(update_fields=["config"]) - re_apply_policy_for_jobs(action_object=action_objects.owner, task=orm_task) run_task(orm_task) @@ -151,6 +140,81 @@ def run_action( return orm_task +def prepare_task_for_action( + target: ActionTargetDescriptor, + orm_owner: ObjectWithAction, + action: ActionID, + payload: TaskPayloadDTO, + delta: TaskMappingDelta | None = None, +) -> Task: + """ + Prepare task based on action, target object and task payload. + + Target object is an object on which action is going to be launched, not the on it's described on. + + `Task` is launched action, "task for ADCM to perform action" in other words. + `Job` is an actual piece of work required by task to be performed. + + ! WARNING ! + Currently, stdout/stderr logs are created alongside the jobs + for policies to be re-applied correctly after this method is called. + + It may be changed if favor of creating logs when job is actually prepared/started. + + ! ADCM-6012 ! + Code moved from `core.job.task` here, because it's unclear for now + how required level of unity can be implemented with enough isolation and readability. + """ + job_repo = JobRepoImpl + action_repo = ActionRepoImpl + owner = CoreObjectDescriptor(id=orm_owner.id, type=orm_object_to_core_type(orm_owner)) + + spec, flat_spec = _process_run_config( + action=Action.objects.select_related("prototype").get(id=action), + owner=orm_owner, + conf=payload.conf, + attr=payload.attr, + ) + + action_info = action_repo.get_action(id=action) + task = job_repo.create_task(target=target, owner=owner, action=action_info, payload=payload) + + if payload.conf: + orm_task = TaskLog.objects.get(id=task.id) + orm_task.config = update_configuration_for_inventory_inplace( + configuration=payload.conf, + attributes=payload.attr, + specification=convert_to_flat_spec_from_proto_flat_spec(prototypes_flat_spec=flat_spec), + config_owner=GeneralEntityDescriptor(id=task.id, type="task"), + ) + process_file_type(obj=orm_task, spec=spec, conf=payload.conf) + orm_task.save(update_fields=["config"]) + # reread to update config + # ! this should be reworked when "layering" will be performed + task = job_repo.get_task(id=task.id) + + if action_info.scripts_jinja: + job_specifications = tuple(get_job_specs_from_template(task_id=task.id, delta=delta)) + else: + job_specifications = tuple(action_repo.get_job_specs(id=action)) + + if not job_specifications: + message = f"Can't compose task for action #{action}, because no associated jobs found" + raise TaskCreateError(message) + + job_repo.create_jobs(task_id=task.id, jobs=job_specifications) + + logs = [] + for job in job_repo.get_task_jobs(task_id=task.id): + logs.append(LogCreateDTO(job_id=job.id, name=job.type.value, type="stdout", format="txt")) + logs.append(LogCreateDTO(job_id=job.id, name=job.type.value, type="stderr", format="txt")) + + if logs: + job_repo.create_logs(logs) + + return task + + class _ActionLaunchObjects: """ Utility container to process differences in action's target/owner in one place diff --git a/python/cm/services/job/jinja_scripts.py b/python/cm/services/job/jinja_scripts.py index 69c37a6059..90249de440 100755 --- a/python/cm/services/job/jinja_scripts.py +++ b/python/cm/services/job/jinja_scripts.py @@ -90,9 +90,9 @@ def get_env(task: TaskLog, delta: TaskMappingDelta | None = None) -> JinjaScript } return JinjaScriptsEnvironment( - cluster=cluster_vars.cluster.dict(by_alias=True), + cluster=cluster_vars.cluster.model_dump(by_alias=True), services={ - service_name: service_data.dict(by_alias=True) + service_name: service_data.model_dump(by_alias=True) for service_name, service_data in cluster_vars.services.items() }, groups=host_groups, diff --git a/python/cm/services/job/prepare.py b/python/cm/services/job/prepare.py deleted file mode 100644 index efe8e0eeb5..0000000000 --- a/python/cm/services/job/prepare.py +++ /dev/null @@ -1,37 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from core.job.dto import TaskPayloadDTO -from core.job.task import compose_task -from core.job.types import Task -from core.types import ActionID, ActionTargetDescriptor, CoreObjectDescriptor - -from cm.services.job.run.repo import ActionRepoImpl, JobRepoImpl -from cm.services.job.types import TaskMappingDelta - - -def prepare_task_for_action( - target: ActionTargetDescriptor, - owner: CoreObjectDescriptor, - action: ActionID, - payload: TaskPayloadDTO, - delta: TaskMappingDelta | None = None, -) -> Task: - return compose_task( - target=target, - owner=owner, - action=action, - payload=payload, - job_repo=JobRepoImpl, - action_repo=ActionRepoImpl, - delta=delta, - ) diff --git a/python/cm/tests/bundles/cluster_1/config.yaml b/python/cm/tests/bundles/cluster_1/config.yaml index 0dc6c69f53..98e6ef98f2 100644 --- a/python/cm/tests/bundles/cluster_1/config.yaml +++ b/python/cm/tests/bundles/cluster_1/config.yaml @@ -66,6 +66,17 @@ scripts_jinja: "./unprocessable_scripts.j2" states: available: any + with_activatable_group_jinja: + type: task + scripts_jinja: "./with_group.j2" + masking: {} + config: + - name: group + activatable: true + type: group + subs: + - name: x + type: integer - name: service_one_component type: service diff --git a/python/cm/tests/bundles/cluster_1/with_group.j2 b/python/cm/tests/bundles/cluster_1/with_group.j2 new file mode 100644 index 0000000000..6127ae5ea5 --- /dev/null +++ b/python/cm/tests/bundles/cluster_1/with_group.j2 @@ -0,0 +1,12 @@ +- name: default + script_type: ansible + script: nono.yaml +{% if not task.config.group %} +- name: inactive + script_type: ansible + script: haha.yaml +{% else %} +- name: active + script_type: ansible + script: haha.yaml +{% endif %} diff --git a/python/cm/tests/bundles/cluster_full_config/config.yaml b/python/cm/tests/bundles/cluster_full_config/config.yaml index 51a3d3a1ae..234b6a5a3b 100644 --- a/python/cm/tests/bundles/cluster_full_config/config.yaml +++ b/python/cm/tests/bundles/cluster_full_config/config.yaml @@ -158,7 +158,13 @@ components: all_params_component: - actions: *actions + actions: + <<: *actions + without_config_on_host: + <<: *job + allow_for_action_host_group: false + host_action: true + config: *config - type: service diff --git a/python/cm/tests/files/response_templates/action_configs/cluster.json.j2 b/python/cm/tests/files/response_templates/action_configs/cluster.json.j2 index 4bea701268..25af488dd7 100644 --- a/python/cm/tests/files/response_templates/action_configs/cluster.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/cluster.json.j2 @@ -45,9 +45,9 @@ }, "job": { "id": {{ job_id }}, - "action": "with_config", - "job_name": "with_config", - "command": "with_config", + "action": "dummy", + "job_name": "dummy", + "command": "dummy", "script": "playbook.yaml", "verbose": false, "playbook": "{{ stackdir }}/data/bundle/{{ cluster_bundle.hash }}/playbook.yaml", diff --git a/python/cm/tests/files/response_templates/action_configs/cluster_on_host.json.j2 b/python/cm/tests/files/response_templates/action_configs/cluster_on_host.json.j2 index 9000362318..a52fc4bdf8 100644 --- a/python/cm/tests/files/response_templates/action_configs/cluster_on_host.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/cluster_on_host.json.j2 @@ -64,7 +64,7 @@ ], "variant_inline": "f", "plain_group": { - "file": "contente\t\n\n\n\tbest\n\t ", + "file": "{{ filedir }}/task.{{ task_id }}.plain_group.file", "map": { "k": "v", "key": "val" @@ -84,8 +84,8 @@ "me": "you" }, "json": "{\"hey\": [\"yooo\", 1]}", - "file": "filecontent", - "secretfile": "somesecrethere", + "file": "{{ filedir }}/task.{{ task_id }}.file.", + "secretfile": "{{ filedir }}/task.{{ task_id }}.secretfile.", "variant_builtin": "host-1", "activatable_group": { "simple": "inactive", @@ -93,7 +93,8 @@ "one", "two" ] - } + }, + "source_list": ["ok", "fail"] } } } \ No newline at end of file diff --git a/python/cm/tests/files/response_templates/action_configs/component.json.j2 b/python/cm/tests/files/response_templates/action_configs/component.json.j2 index 476d2651d7..ecc40d6b21 100644 --- a/python/cm/tests/files/response_templates/action_configs/component.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/component.json.j2 @@ -68,7 +68,7 @@ ], "variant_inline": "f", "plain_group": { - "file": "contente\t\n\n\n\tbest\n\t ", + "file": "{{ filedir }}/task.{{ task_id }}.plain_group.file", "map": { "k": "v", "key": "val" @@ -79,13 +79,14 @@ "float": null, "string": null, "password": null, - "map": null, + "map": {}, "secretmap": null, "json": null, "file": null, "secretfile": null, "variant_builtin": null, - "activatable_group": null + "activatable_group": null, + "source_list": ["ok", "fail"] } } } \ No newline at end of file diff --git a/python/cm/tests/files/response_templates/action_configs/component_on_host.json.j2 b/python/cm/tests/files/response_templates/action_configs/component_on_host.json.j2 index c150ad94ca..bc4a0edf7a 100644 --- a/python/cm/tests/files/response_templates/action_configs/component_on_host.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/component_on_host.json.j2 @@ -48,9 +48,9 @@ }, "job": { "id": {{ job_id }}, - "action": "with_config_on_host", - "job_name": "with_config_on_host", - "command": "with_config_on_host", + "action": "without_config_on_host", + "job_name": "without_config_on_host", + "command": "without_config_on_host", "script": "playbook.yaml", "verbose": true, "playbook": "{{ stackdir }}/data/bundle/{{ cluster_bundle.hash }}/playbook.yaml", diff --git a/python/cm/tests/files/response_templates/action_configs/host.json.j2 b/python/cm/tests/files/response_templates/action_configs/host.json.j2 index 930c0ef2f8..559f205770 100644 --- a/python/cm/tests/files/response_templates/action_configs/host.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/host.json.j2 @@ -68,7 +68,7 @@ ], "variant_inline": "f", "plain_group": { - "file": "contente\t\n\n\n\tbest\n\t ", + "file": "{{ filedir }}/task.{{ task_id }}.plain_group.file", "map": { "k": "v", "key": "val" @@ -79,13 +79,14 @@ "float": null, "string": null, "password": null, - "map": null, + "map": {}, "secretmap": null, "json": null, "file": null, "secretfile": null, "variant_builtin": null, - "activatable_group": null + "activatable_group": null, + "source_list": ["ok", "fail"] } } } \ No newline at end of file diff --git a/python/cm/tests/files/response_templates/action_configs/hostprovider.json.j2 b/python/cm/tests/files/response_templates/action_configs/hostprovider.json.j2 index b38bc6b9f8..17a00e6d0f 100644 --- a/python/cm/tests/files/response_templates/action_configs/hostprovider.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/hostprovider.json.j2 @@ -63,7 +63,7 @@ ], "variant_inline": "f", "plain_group": { - "file": "contente\t\n\n\n\tbest\n\t ", + "file": "{{ filedir }}/task.{{ task_id }}.plain_group.file", "map": { "k": "v", "key": "val" @@ -83,16 +83,17 @@ "me": "you" }, "json": "{\"hey\": [\"yooo\", 1]}", - "file": "filecontent", - "secretfile": "somesecrethere", - "variant_builtin": "host-1", + "file": "{{ filedir }}/task.{{ task_id }}.file.", + "secretfile": "{{ filedir }}/task.{{ task_id }}.secretfile.", + "variant_builtin": "host-3", "activatable_group": { "simple": "inactive", "list": [ "one", "two" ] - } + }, + "source_list": ["ok", "fail"] } } } \ No newline at end of file diff --git a/python/cm/tests/files/response_templates/action_configs/service.json.j2 b/python/cm/tests/files/response_templates/action_configs/service.json.j2 index be5e97abcd..997be53b75 100644 --- a/python/cm/tests/files/response_templates/action_configs/service.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/service.json.j2 @@ -66,7 +66,7 @@ ], "variant_inline": "f", "plain_group": { - "file": "contente\t\n\n\n\tbest\n\t ", + "file": "{{ filedir }}/task.{{ task_id }}.plain_group.file", "map": { "k": "v", "key": "val" @@ -86,8 +86,8 @@ "me": "you" }, "json": "{\"hey\": [\"yooo\", 1]}", - "file": "filecontent", - "secretfile": "somesecrethere", + "file": "{{ filedir }}/task.{{ task_id }}.file.", + "secretfile": "{{ filedir }}/task.{{ task_id }}.secretfile.", "variant_builtin": "host-1", "activatable_group": { "simple": "inactive", @@ -95,7 +95,8 @@ "one", "two" ] - } + }, + "source_list": ["ok", "fail"] } } } \ No newline at end of file diff --git a/python/cm/tests/files/response_templates/action_configs/service_on_host.json.j2 b/python/cm/tests/files/response_templates/action_configs/service_on_host.json.j2 index 5bba2eb8c2..41f9ab404f 100644 --- a/python/cm/tests/files/response_templates/action_configs/service_on_host.json.j2 +++ b/python/cm/tests/files/response_templates/action_configs/service_on_host.json.j2 @@ -67,7 +67,7 @@ ], "variant_inline": "f", "plain_group": { - "file": "contente\t\n\n\n\tbest\n\t ", + "file": "{{ filedir }}/task.{{ task_id }}.plain_group.file", "map": { "k": "v", "key": "val" @@ -78,13 +78,14 @@ "float": null, "string": null, "password": null, - "map": null, + "map": {}, "secretmap": null, "json": null, "file": null, "secretfile": null, "variant_builtin": null, - "activatable_group": null + "activatable_group": null, + "source_list": ["ok", "fail"] } } } \ No newline at end of file diff --git a/python/cm/tests/test_inventory/test_action_config.py b/python/cm/tests/test_inventory/test_action_config.py index c0370432f7..012a285150 100644 --- a/python/cm/tests/test_inventory/test_action_config.py +++ b/python/cm/tests/test_inventory/test_action_config.py @@ -20,12 +20,11 @@ from cm.adcm_config.ansible import ansible_decrypt from cm.converters import model_name_to_core_type from cm.models import Action, ServiceComponent -from cm.services.job.action import ActionRunPayload, run_action -from cm.services.job.prepare import prepare_task_for_action +from cm.services.job.action import ActionRunPayload, prepare_task_for_action, run_action from cm.services.job.run._target_factories import prepare_ansible_job_config from cm.services.job.run.repo import JobRepoImpl from cm.tests.mocks.task_runner import RunTaskMock -from cm.tests.test_inventory.base import BaseInventoryTestCase +from cm.tests.test_inventory.base import BaseInventoryTestCase, decrypt_secrets class TestConfigAndImportsInInventory(BaseInventoryTestCase): @@ -45,7 +44,8 @@ class TestConfigAndImportsInInventory(BaseInventoryTestCase): "file": None, "secretfile": None, "variant_builtin": None, - "activatable_group": None, + "activatable_group": {"simple": "inactive", "list": ["one", "two"]}, + "source_list": ["ok", "fail"], } FULL_CONFIG = { @@ -61,7 +61,6 @@ class TestConfigAndImportsInInventory(BaseInventoryTestCase): "secretfile": "somesecrethere", "variant_builtin": "host-1", "plain_group": {**CONFIG_WITH_NONES["plain_group"], "simple": "ingroup"}, - "activatable_group": {"simple": "inactive", "list": ["one", "two"]}, } def setUp(self) -> None: @@ -76,6 +75,7 @@ def setUp(self) -> None: self.host_2 = self.add_host( bundle=self.hostprovider.prototype.bundle, provider=self.hostprovider, fqdn="host-2" ) + self.host_3 = self.add_host(provider=self.hostprovider, fqdn="host-3") self.cluster = self.add_cluster( bundle=self.add_bundle(self.bundles_dir / "cluster_full_config"), name="Main Cluster" @@ -94,6 +94,7 @@ def setUp(self) -> None: "cluster_bundle": self.cluster.prototype.bundle, "datadir": self.directories["DATA_DIR"], "stackdir": self.directories["STACK_DIR"], + "filedir": self.directories["FILE_DIR"], "token": settings.STATUS_SECRET_KEY, "component_type_id": self.component.prototype_id, } @@ -105,10 +106,6 @@ def setUp(self) -> None: ) def test_action_config(self) -> None: - # Thou action has a defined config - # `prepare_job_config` itself doesn't check input config sanity, - # but `None` is a valid config, - # so I find it easier to check it in pairs here rather than use a separate action for object_, config, type_name in ( (self.cluster, None, "cluster"), (self.service, self.FULL_CONFIG, "service"), @@ -116,53 +113,64 @@ def test_action_config(self) -> None: (self.hostprovider, self.FULL_CONFIG, "hostprovider"), (self.host_1, self.CONFIG_WITH_NONES, "host"), ): - action = Action.objects.filter(prototype=object_.prototype, name="with_config").first() + # prepare_task_for_action is now checking sanity of config, so we have to pass the correct one + action_name = "with_config" if type_name != "cluster" else "dummy" + active = type_name in ("service", "hostprovider") + config_diff = {} if type_name != "hostprovider" else {"variant_builtin": "host-3"} + + action = Action.objects.filter(prototype=object_.prototype, name=action_name).first() obj_ = CoreObjectDescriptor( id=object_.pk, type=model_name_to_core_type(model_name=object_.__class__.__name__.lower()) ) task = prepare_task_for_action( target=obj_, - owner=obj_, + orm_owner=object_, action=action.pk, - payload=TaskPayloadDTO(conf=config), + payload=TaskPayloadDTO( + conf=(deepcopy(config) or {}) | config_diff, attr={"activatable_group": {"active": active}} + ), ) job, *_ = JobRepoImpl.get_task_jobs(task.id) with self.subTest(f"Own Action for {object_.__class__.__name__}"): expected_data = self.render_json_template( file=self.templates_dir / "action_configs" / f"{type_name}.json.j2", - context={**self.context, "job_id": job.id}, + context={**self.context, "job_id": job.id, "task_id": task.id}, ) job_config = prepare_ansible_job_config(task=task, job=job, configuration=self.configuration) - self.assertDictEqual(job_config, expected_data) + self.assertDictEqual(decrypt_secrets(job_config), expected_data) for object_, config, type_name in ( (self.cluster, self.FULL_CONFIG, "cluster"), (self.service, self.CONFIG_WITH_NONES, "service"), (self.component, None, "component"), ): - action = Action.objects.filter(prototype=object_.prototype, name="with_config_on_host").first() + # prepare_task_for_action is now checking sanity of config, so we have to pass the correct one + action_name = "with_config_on_host" if type_name != "component" else "without_config_on_host" + active = type_name == "cluster" + + action = Action.objects.filter(prototype=object_.prototype, name=action_name).first() target = CoreObjectDescriptor(id=self.host_1.pk, type=ADCMCoreType.HOST) task = prepare_task_for_action( target=target, - owner=CoreObjectDescriptor( - id=object_.pk, type=model_name_to_core_type(object_.__class__.__name__.lower()) - ), + orm_owner=object_, action=action.pk, - payload=TaskPayloadDTO(verbose=True, conf=config), + payload=TaskPayloadDTO( + verbose=True, conf=deepcopy(config), attr={"activatable_group": {"active": active}} + ), ) job, *_ = JobRepoImpl.get_task_jobs(task.id) with self.subTest(f"Host Action for {object_.__class__.__name__}"): expected_data = self.render_json_template( file=self.templates_dir / "action_configs" / f"{type_name}_on_host.json.j2", - context={**self.context, "job_id": job.id}, + context={**self.context, "job_id": job.id, "task_id": task.id}, ) job_config = prepare_ansible_job_config(task=task, job=job, configuration=self.configuration) - self.assertDictEqual(job_config, expected_data) + self.assertDictEqual(decrypt_secrets(job_config), expected_data) def test_adcm_5305_action_config_with_secrets_bug(self): """ @@ -279,7 +287,7 @@ def test_scripts_in_action_config(self) -> None: ) task = prepare_task_for_action( target=target, - owner=target, + orm_owner=object_, action=action.pk, payload=TaskPayloadDTO(), ) diff --git a/python/cm/tests/test_inventory/test_cluster_hosts.py b/python/cm/tests/test_inventory/test_cluster_hosts.py index 1a5916e8b3..fbc0c46688 100644 --- a/python/cm/tests/test_inventory/test_cluster_hosts.py +++ b/python/cm/tests/test_inventory/test_cluster_hosts.py @@ -18,8 +18,8 @@ from django.core.exceptions import ObjectDoesNotExist from cm.models import Action +from cm.services.job.action import prepare_task_for_action from cm.services.job.inventory import get_inventory_data -from cm.services.job.prepare import prepare_task_for_action from cm.tests.test_inventory.base import BaseInventoryTestCase @@ -202,10 +202,8 @@ def test_adcm_5747_delete_service(self) -> None: self.set_hostcomponent(cluster=self.cluster_1, entries=[(host, service.servicecomponent_set.first())]) action = Action.objects.get(prototype=service.prototype, name="action_on_service") - target = owner_descriptor = CoreObjectDescriptor(id=service.id, type=ADCMCoreType.SERVICE) - task = prepare_task_for_action( - target=target, owner=owner_descriptor, action=action.id, payload=TaskPayloadDTO() - ) + target = CoreObjectDescriptor(id=service.id, type=ADCMCoreType.SERVICE) + task = prepare_task_for_action(target=target, orm_owner=service, action=action.id, payload=TaskPayloadDTO()) # imitate service deletion during task run (prev job deleted service) service.delete() diff --git a/python/cm/tests/test_jinja_scripts.py b/python/cm/tests/test_jinja_scripts.py index 17e82beb81..2cb6935470 100644 --- a/python/cm/tests/test_jinja_scripts.py +++ b/python/cm/tests/test_jinja_scripts.py @@ -13,10 +13,12 @@ from pathlib import Path from adcm.tests.base import BaseTestCase, BusinessLogicMixin, TaskTestMixin +from api.tests.test_job import RunTaskMock from rest_framework.status import HTTP_422_UNPROCESSABLE_ENTITY from cm.errors import AdcmEx -from cm.models import ConfigLog, JobLog, MaintenanceMode, ServiceComponent, TaskLog +from cm.models import Action, ConcernItem, ConfigLog, JobLog, MaintenanceMode, ServiceComponent, TaskLog +from cm.services.job.action import ActionRunPayload, run_action from cm.services.job.jinja_scripts import get_env from cm.tests.test_inventory.base import ansible_decrypt, decrypt_secrets @@ -168,3 +170,20 @@ def test_unprocessable_template(self): self.assertEqual(err.exception.msg, "Can't render jinja template") self.assertEqual(err.exception.status_code, HTTP_422_UNPROCESSABLE_ENTITY) self.assertEqual(JobLog.objects.count(), initial_jobs_count) + + def test_adcm_6012_task_config_processing(self) -> None: + action = Action.objects.get(prototype_id=self.cluster.prototype_id, name="with_activatable_group_jinja") + + for active, expected_jobs in ((False, ["default", "inactive"]), (True, ["default", "active"])): + with RunTaskMock(): + task = run_action( + action=action, + obj=self.cluster, + payload=ActionRunPayload(conf={"group": {"x": 2}}, attr={"group": {"active": active}}), + ) + + self.assertListEqual(list(JobLog.objects.filter(task=task).values_list("name", flat=True)), expected_jobs) + + task.status = "succeed" + task.save(update_fields=["status"]) + ConcernItem.objects.all().delete() diff --git a/python/cm/tests/test_task_log.py b/python/cm/tests/test_task_log.py index 4e4ca10a97..80bb7abcb6 100644 --- a/python/cm/tests/test_task_log.py +++ b/python/cm/tests/test_task_log.py @@ -31,7 +31,7 @@ SubAction, TaskLog, ) -from cm.services.job.prepare import prepare_task_for_action +from cm.services.job.action import prepare_task_for_action from cm.tests.utils import ( gen_adcm, gen_cluster, @@ -108,7 +108,7 @@ def test_download_negative(self): task = TaskLog.objects.get( id=prepare_task_for_action( target=object_, - owner=object_, + orm_owner=cluster, action=action.pk, payload=TaskPayloadDTO(), ).id diff --git a/python/core/job/task.py b/python/core/job/task.py deleted file mode 100644 index d2eb934337..0000000000 --- a/python/core/job/task.py +++ /dev/null @@ -1,69 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from cm.services.job.jinja_scripts import get_job_specs_from_template -from cm.services.job.types import TaskMappingDelta - -from core.job.dto import LogCreateDTO, TaskPayloadDTO -from core.job.errors import TaskCreateError -from core.job.repo import ActionRepoInterface, JobRepoInterface -from core.types import ActionID, ActionTargetDescriptor, CoreObjectDescriptor - - -def compose_task( - target: ActionTargetDescriptor, - owner: CoreObjectDescriptor, - action: ActionID, - payload: TaskPayloadDTO, - job_repo: JobRepoInterface, - action_repo: ActionRepoInterface, - delta: TaskMappingDelta | None = None, -): - """ - Prepare task based on action, target object and task payload. - - Target object is an object on which action is going to be launched, not the on it's described on. - - `Task` is launched action, "task for ADCM to perform action" in other words. - `Job` is an actual piece of work required by task to be performed. - - ! WARNING ! - Currently, stdout/stderr logs are created alongside the jobs - for policies to be re-applied correctly after this method is called. - - It may be changed if favor of creating logs when job is actually prepared/started. - """ - - action_info = action_repo.get_action(id=action) - task = job_repo.create_task(target=target, owner=owner, action=action_info, payload=payload) - - if action_info.scripts_jinja: - job_specifications = tuple(get_job_specs_from_template(task_id=task.id, delta=delta)) - else: - job_specifications = tuple(action_repo.get_job_specs(id=action)) - - if not job_specifications: - message = f"Can't compose task for action #{action}, because no associated jobs found" - raise TaskCreateError(message) - - job_repo.create_jobs(task_id=task.id, jobs=job_specifications) - - logs = [] - for job in job_repo.get_task_jobs(task_id=task.id): - logs.append(LogCreateDTO(job_id=job.id, name=job.type.value, type="stdout", format="txt")) - logs.append(LogCreateDTO(job_id=job.id, name=job.type.value, type="stderr", format="txt")) - - if logs: - job_repo.create_logs(logs) - - return task From c5466334efae931bd83556e51f3133c34f20ec89 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 15 Oct 2024 05:07:07 +0000 Subject: [PATCH 94/98] ADCM-6023 & ADCM-6028 & ADCM-6027 Cluster dump/load fixes Changed: 1. Decryption is now based on `str` values instead of `__ansible_vault` key in dictionary ("inventory" format) to be universal 2. Use business function for cluster mapping on `loadcluster` Added: 1. Cluster's ansible config is restored during dump/load cluster process --- python/cm/services/config/secrets.py | 20 +++++++------------- python/cm/services/transition/dump.py | 5 +++++ python/cm/services/transition/load.py | 23 ++++++++++++----------- python/cm/services/transition/types.py | 1 + 4 files changed, 25 insertions(+), 24 deletions(-) diff --git a/python/cm/services/config/secrets.py b/python/cm/services/config/secrets.py index e5a1e57d5a..850c5c746e 100644 --- a/python/cm/services/config/secrets.py +++ b/python/cm/services/config/secrets.py @@ -42,20 +42,14 @@ def reveal_secrets(self, source: dict) -> dict: result = {} for key, value in source.items(): - if not isinstance(value, dict): - if isinstance(value, list): - result[key] = [ - entry if not isinstance(entry, dict) else self.reveal_secrets(entry) for entry in value - ] - else: - result[key] = value - - continue - - if "__ansible_vault" in value: - result[key] = self.decrypt(value["__ansible_vault"]) - else: + if isinstance(value, dict): result[key] = self.reveal_secrets(value) + elif isinstance(value, list): + result[key] = [entry if not isinstance(entry, dict) else self.reveal_secrets(entry) for entry in value] + elif isinstance(value, str): + result[key] = self.decrypt(value) + else: + result[key] = value return result diff --git a/python/cm/services/transition/dump.py b/python/cm/services/transition/dump.py index 01a4ef5bdf..a8af5e5abe 100644 --- a/python/cm/services/transition/dump.py +++ b/python/cm/services/transition/dump.py @@ -30,6 +30,7 @@ from django.db.models import F, Q from cm.models import ( + AnsibleConfig, Bundle, Cluster, ClusterObject, @@ -159,6 +160,10 @@ def retrieve_cluster( bundle=cluster.bundle_hash, name=cluster.name, description=cluster.description, condition=current_condition ) + cluster_info.ansible_config = AnsibleConfig.objects.values_list("value", flat=True).get( + object_id=cluster.id, object_type=ContentType.objects.get_for_model(Cluster) + ) + service_id_name_map: dict[ServiceID, ServiceNameKey] = {} component_id_name_map: dict[ComponentID, ComponentNameKey] = {} diff --git a/python/cm/services/transition/load.py b/python/cm/services/transition/load.py index 98e1f483d1..48d5ca0507 100644 --- a/python/cm/services/transition/load.py +++ b/python/cm/services/transition/load.py @@ -15,6 +15,7 @@ from api_v2.host.utils import create_host from api_v2.service.utils import bulk_add_services_to_cluster +from core.cluster.types import HostComponentEntry from core.types import ( BundleID, ClusterID, @@ -30,12 +31,12 @@ from cm.api import add_cluster, add_host_provider, update_obj_config from cm.models import ( + AnsibleConfig, Bundle, Cluster, ClusterObject, GroupConfig, Host, - HostComponent, HostProvider, MaintenanceMode, ObjectType, @@ -43,6 +44,7 @@ ServiceComponent, ) from cm.services.cluster import perform_host_to_cluster_map +from cm.services.mapping import change_host_component_mapping from cm.services.status import notify from cm.services.transition.types import ( BundleHash, @@ -154,6 +156,9 @@ def create_cluster(cluster: ClusterInfo, bundles: BundleHashIDMap, hosts: HostNa cluster_prototype = Prototype.objects.get(bundle_id=bundle_id, type=ObjectType.CLUSTER) cluster_object = add_cluster(prototype=cluster_prototype, name=cluster.name, description=cluster.description) + AnsibleConfig.objects.filter( + object_id=cluster_object.id, object_type=ContentType.objects.get_for_model(Cluster) + ).update(value=cluster.ansible_config) services_to_add = Prototype.objects.filter( bundle_id=bundle_id, type=ObjectType.SERVICE, name__in=(service.name for service in cluster.services.values()) ) @@ -202,21 +207,17 @@ def create_cluster(cluster: ClusterInfo, bundles: BundleHashIDMap, hosts: HostNa ServiceComponent.objects.filter(id__in=components_in_mm).update(_maintenance_mode=MaintenanceMode.ON) if cluster.mapping: - entries = deque() - + mapping = deque() for hc_entry in cluster.mapping: - service_object, component_object_mapping = orm_objects[hc_entry.service] - component_object = component_object_mapping[hc_entry.component] - entries.append( - HostComponent( - cluster_id=cluster_object.id, - service_id=service_object.id, - component_id=component_object.id, + _, component_object_mapping = orm_objects[hc_entry.service] + mapping.append( + HostComponentEntry( + component_id=component_object_mapping[hc_entry.component].id, host_id=hosts[hc_entry.host], ) ) - HostComponent.objects.bulk_create(objs=entries) + change_host_component_mapping(cluster_id=cluster_object.id, bundle_id=bundle_id, flat_mapping=mapping) if config_host_groups: for owner, group in config_host_groups: diff --git a/python/cm/services/transition/types.py b/python/cm/services/transition/types.py index b7178f718e..5315f5f075 100644 --- a/python/cm/services/transition/types.py +++ b/python/cm/services/transition/types.py @@ -98,6 +98,7 @@ class ClusterInfo: services: dict[ServiceName, ServiceInfo] = field(default_factory=dict) mapping: list[NamedMappingEntry] = field(default_factory=list) host_groups: list[ConfigHostGroupInfo] = field(default_factory=list) + ansible_config: ConfigurationDict = field(default_factory=dict) class TransitionPayload(BaseModel): From c5e532e9a6556227b55aa227406b12ecbce5443f Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Wed, 16 Oct 2024 11:13:26 +0000 Subject: [PATCH 95/98] ADCM-6013: Integer constraints for config_jinja --- .../bundles/cluster_actions_jinja/config.yaml | 11 ++ .../config_jinja_numeric_min_max.jinja2 | 17 +++ ...for_action_with_numeric_min_max_param.json | 83 +++++++++++ ...th_numeric_min_max_param_target_state.json | 103 +++++++++++++ python/api_v2/tests/test_actions.py | 29 ++++ python/cm/services/config/jinja.py | 138 +++++++++--------- 6 files changed, 316 insertions(+), 65 deletions(-) create mode 100644 python/api_v2/tests/bundles/cluster_actions_jinja/config_jinja_numeric_min_max.jinja2 create mode 100644 python/api_v2/tests/files/responses/config_schemas/for_action_with_numeric_min_max_param.json create mode 100644 python/api_v2/tests/files/responses/config_schemas/for_action_with_numeric_min_max_param_target_state.json diff --git a/python/api_v2/tests/bundles/cluster_actions_jinja/config.yaml b/python/api_v2/tests/bundles/cluster_actions_jinja/config.yaml index 46fbdefb2b..ccf0ea4ce9 100644 --- a/python/api_v2/tests/bundles/cluster_actions_jinja/config.yaml +++ b/python/api_v2/tests/bundles/cluster_actions_jinja/config.yaml @@ -53,6 +53,17 @@ available: any config_jinja: ./config_jinja_multistate.jinja2 + check_numeric_min_max_param: + type: job + script: ./actions.yaml + script_type: ansible + params: + ansible_tags: sleep + states: + on_success: created + available: any + config_jinja: ./config_jinja_numeric_min_max.jinja2 + - type: service name: first_service version: *version diff --git a/python/api_v2/tests/bundles/cluster_actions_jinja/config_jinja_numeric_min_max.jinja2 b/python/api_v2/tests/bundles/cluster_actions_jinja/config_jinja_numeric_min_max.jinja2 new file mode 100644 index 0000000000..a75e865c51 --- /dev/null +++ b/python/api_v2/tests/bundles/cluster_actions_jinja/config_jinja_numeric_min_max.jinja2 @@ -0,0 +1,17 @@ +- name: group_name + type: group + activatable: true + ui_options: + advanced: True + active: true + subs: + - name: boolean_param + type: boolean + default: true +{% if cluster.state == "ready_for_numeric_min_max" %} + - name: some_numeric_param + type: integer + default: 5 + min: 0 + max: 65535 +{% endif %} diff --git a/python/api_v2/tests/files/responses/config_schemas/for_action_with_numeric_min_max_param.json b/python/api_v2/tests/files/responses/config_schemas/for_action_with_numeric_min_max_param.json new file mode 100644 index 0000000000..274554df62 --- /dev/null +++ b/python/api_v2/tests/files/responses/config_schemas/for_action_with_numeric_min_max_param.json @@ -0,0 +1,83 @@ +{ + "name": "check_numeric_min_max_param", + "displayName": "check_numeric_min_max_param", + "startImpossibleReason": null, + "isAllowToTerminate": false, + "hostComponentMapRules": [], + "disclaimer": "", + "configuration": { + "configSchema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Configuration", + "description": "", + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "nullValue": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + }, + "type": "object", + "properties": { + "group_name": { + "title": "group_name", + "type": "object", + "description": "", + "default": {}, + "readOnly": false, + "adcmMeta": { + "isAdvanced": true, + "isInvisible": false, + "activation": { + "isAllowChange": true + }, + "synchronization": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + }, + "additionalProperties": false, + "properties": { + "boolean_param": { + "title": "boolean_param", + "type": "boolean", + "description": "", + "default": true, + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + } + } + }, + "required": [ + "boolean_param" + ] + } + }, + "additionalProperties": false, + "required": [ + "group_name" + ] + }, + "config": { + "group_name": { + "boolean_param": true + } + }, + "adcmMeta": { + "/group_name": { + "isActive": true + } + } + } +} \ No newline at end of file diff --git a/python/api_v2/tests/files/responses/config_schemas/for_action_with_numeric_min_max_param_target_state.json b/python/api_v2/tests/files/responses/config_schemas/for_action_with_numeric_min_max_param_target_state.json new file mode 100644 index 0000000000..8e01f1baab --- /dev/null +++ b/python/api_v2/tests/files/responses/config_schemas/for_action_with_numeric_min_max_param_target_state.json @@ -0,0 +1,103 @@ +{ + "name": "check_numeric_min_max_param", + "displayName": "check_numeric_min_max_param", + "startImpossibleReason": null, + "isAllowToTerminate": false, + "hostComponentMapRules": [], + "disclaimer": "", + "configuration": { + "configSchema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Configuration", + "description": "", + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "nullValue": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + }, + "type": "object", + "properties": { + "group_name": { + "title": "group_name", + "type": "object", + "description": "", + "default": {}, + "readOnly": false, + "adcmMeta": { + "isAdvanced": true, + "isInvisible": false, + "activation": { + "isAllowChange": true + }, + "synchronization": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + }, + "additionalProperties": false, + "properties": { + "boolean_param": { + "title": "boolean_param", + "type": "boolean", + "description": "", + "default": true, + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + } + }, + "some_numeric_param": { + "title": "some_numeric_param", + "type": "integer", + "description": "", + "default": 5, + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + }, + "minimum": 0, + "maximum": 65535 + } + }, + "required": [ + "boolean_param", + "some_numeric_param" + ] + } + }, + "additionalProperties": false, + "required": [ + "group_name" + ] + }, + "config": { + "group_name": { + "boolean_param": true, + "some_numeric_param": 5 + } + }, + "adcmMeta": { + "/group_name": { + "isActive": true + } + } + } +} \ No newline at end of file diff --git a/python/api_v2/tests/test_actions.py b/python/api_v2/tests/test_actions.py index 23ef024a18..a58efa4654 100644 --- a/python/api_v2/tests/test_actions.py +++ b/python/api_v2/tests/test_actions.py @@ -490,6 +490,35 @@ def test_retrieve_jinja_config(self): ) self.assertDictEqual(configuration["adcmMeta"], {"/activatable_group": {"isActive": True}}) + def test_adcm_6013_jinja_config_with_min_max(self): + action = Action.objects.get(name="check_numeric_min_max_param", prototype=self.cluster.prototype) + + response = self.client.v2[self.cluster, "actions", action].get() + + self.assertEqual(response.status_code, HTTP_200_OK) + expected_response = json.loads( + ( + self.test_files_dir / "responses" / "config_schemas" / "for_action_with_numeric_min_max_param.json" + ).read_text(encoding="utf-8") + ) + expected_response["id"] = action.id + self.assertDictEqual(response.json(), expected_response) + + self.cluster.set_state(state="ready_for_numeric_min_max") + response = self.client.v2[self.cluster, "actions", action].get() + + self.assertEqual(response.status_code, HTTP_200_OK) + expected_response = json.loads( + ( + self.test_files_dir + / "responses" + / "config_schemas" + / "for_action_with_numeric_min_max_param_target_state.json" + ).read_text(encoding="utf-8") + ) + expected_response["id"] = action.id + self.assertDictEqual(response.json(), expected_response) + def test_adcm_4703_action_retrieve_returns_500(self) -> None: for object_ in (self.cluster, self.service_1, self.component_1): with self.subTest(object_.__class__.__name__): diff --git a/python/cm/services/config/jinja.py b/python/cm/services/config/jinja.py index 70bcc3bb6d..f5fe8b15d4 100644 --- a/python/cm/services/config/jinja.py +++ b/python/cm/services/config/jinja.py @@ -31,8 +31,6 @@ from cm.services.job.jinja_scripts import get_action_info from cm.services.template import TemplateBuilder -_TEMPLATE_CONFIG_DELETE_FIELDS = {"yspec", "option", "activatable", "active", "read_only", "writable", "subs", "source"} - def get_jinja_config( action: Action, cluster_relative_object: Cluster | ClusterObject | ServiceComponent | Host @@ -52,126 +50,136 @@ def get_jinja_config( configs = [] attr = {} - for config in template_builder.data: - for normalized_config in _normalize_config( - config=config, dir_with_config=jinja_conf_file.parent.relative_to(resolver.bundle_root), resolver=resolver + for field in template_builder.data: + for normalized_field in _normalize_field( + field=field, dir_with_config=jinja_conf_file.parent.relative_to(resolver.bundle_root), resolver=resolver ): - configs.append(PrototypeConfig(prototype=action.prototype, action=action, **normalized_config)) + configs.append(PrototypeConfig(prototype=action.prototype, action=action, **normalized_field)) if ( - normalized_config["type"] == "group" - and "activatable" in normalized_config["limits"] - and "active" in normalized_config["limits"] - and normalized_config.get("name") + normalized_field["type"] == "group" + and "activatable" in normalized_field["limits"] + and "active" in normalized_field["limits"] + and normalized_field.get("name") ): - attr[normalized_config["name"]] = normalized_config["limits"] + attr[normalized_field["name"]] = normalized_field["limits"] return configs, attr -def _normalize_config( - config: dict, dir_with_config: Path, resolver: BundlePathResolver, name: str = "", subname: str = "" +def _normalize_field( + field: dict, dir_with_config: Path, resolver: BundlePathResolver, name: str = "", subname: str = "" ) -> list[dict]: """`dir_with_config` should be relative to bundle root""" - config_list = [config] + normalized_field = {} + normalized_fields = [normalized_field] + + name = name or field["name"] + normalized_field["name"] = name - name = name or config["name"] - config["name"] = name if subname: - config["subname"] = subname + normalized_field["subname"] = subname + else: + normalized_field["subname"] = "" - if config.get("display_name") is None: - config["display_name"] = subname or name + if field.get("display_name") is None: + normalized_field["display_name"] = subname or name + else: + normalized_field["display_name"] = field["display_name"] - config["limits"] = _get_limits(config=config, dir_with_config=dir_with_config, resolver=resolver) + normalized_field["limits"] = _get_limits(field=field, dir_with_config=dir_with_config, resolver=resolver) - if config["type"] in settings.STACK_FILE_FIELD_TYPES and config.get("default"): - config["default"] = detect_relative_path_to_bundle_root( - source_file_dir=dir_with_config, raw_path=config["default"] + if field["type"] in settings.STACK_FILE_FIELD_TYPES and field.get("default"): + normalized_field["default"] = str( + detect_relative_path_to_bundle_root(source_file_dir=dir_with_config, raw_path=field["default"]) ) - - if "subs" in config: - for subconf in config["subs"]: - config_list.extend( - _normalize_config( - config=subconf, + else: + normalized_field["default"] = field.get("default", "") + + normalized_field["type"] = field["type"] + normalized_field["description"] = field.get("description", "") + normalized_field["group_customization"] = field.get("group_customization", None) + normalized_field["required"] = field.get("required", True) + normalized_field["ui_options"] = field.get("ui_options", {}) + + if "subs" in field: + for sub in field["subs"]: + normalized_fields.extend( + _normalize_field( + field=sub, dir_with_config=dir_with_config, resolver=resolver, name=name, - subname=subconf["name"], + subname=sub["name"], ), ) - for field in _TEMPLATE_CONFIG_DELETE_FIELDS: - if field in config: - del config[field] - - return config_list + return normalized_fields -def _get_limits(config: dict, dir_with_config: Path, resolver: BundlePathResolver) -> dict: +def _get_limits(field: dict, dir_with_config: Path, resolver: BundlePathResolver) -> dict: limits = {} - if "pattern" in config: - if config["type"] not in ("string", "text", "password", "secrettext"): - message = f"Incorrectly rendered `config_jinja` file. `pattern` is not allowed in {config['type']}" + if "pattern" in field: + if field["type"] not in ("string", "text", "password", "secrettext"): + message = f"Incorrectly rendered `config_jinja` file. `pattern` is not allowed in {field['type']}" raise RuntimeError(message) - pattern = Pattern(regex_pattern=config.pop("pattern")) + pattern = Pattern(regex_pattern=field.pop("pattern")) if not pattern.is_valid: - display_name = config.get("display_name", config["name"]) + display_name = field.get("display_name", field["name"]) message = f"The pattern attribute value of {display_name} config parameter is not valid regular expression" raise RuntimeError(message) - default = config.get("default") + default = field.get("default") if default is not None and not pattern.matches(str(default)): - display_name = config.get("display_name", config["name"]) + display_name = field.get("display_name", field["name"]) message = f"Default attribute value of {display_name} config parameter does not match pattern" raise RuntimeError(message) limits["pattern"] = pattern.raw - if "yspec" in config and config["type"] in settings.STACK_COMPLEX_FIELD_TYPES: - spec_path = detect_relative_path_to_bundle_root(source_file_dir=dir_with_config, raw_path=config["yspec"]) + if "yspec" in field and field["type"] in settings.STACK_COMPLEX_FIELD_TYPES: + spec_path = detect_relative_path_to_bundle_root(source_file_dir=dir_with_config, raw_path=field["yspec"]) limits["yspec"] = safe_load(stream=resolver.resolve(spec_path).read_text(encoding="utf-8")) - if "option" in config and config["type"] == "option": - limits["option"] = config["option"] + if "option" in field and field["type"] == "option": + limits["option"] = field["option"] - if "source" in config and config["type"] == "variant": - variant_type = config["source"]["type"] + if "source" in field and field["type"] == "variant": + variant_type = field["source"]["type"] source = {"type": variant_type, "args": None} - source["strict"] = config["source"].get("strict", True) + source["strict"] = field["source"].get("strict", True) if variant_type == "inline": - source["value"] = config["source"]["value"] + source["value"] = field["source"]["value"] elif variant_type in ("config", "builtin"): - source["name"] = config["source"]["name"] + source["name"] = field["source"]["name"] - if variant_type == "builtin" and "args" in config["source"]: - source["args"] = config["source"]["args"] + if variant_type == "builtin" and "args" in field["source"]: + source["args"] = field["source"]["args"] limits["source"] = source - if "activatable" in config and config["type"] == "group": + if "activatable" in field and field["type"] == "group": limits.update( - activatable=config["activatable"], + activatable=field["activatable"], active=False, ) - if "active" in config: - limits.update(active=config["active"]) + if "active" in field: + limits.update(active=field["active"]) - if config["type"] in settings.STACK_NUMERIC_FIELD_TYPES: - if "min" in config: - limits["min"] = config["min"] + if field["type"] in settings.STACK_NUMERIC_FIELD_TYPES: + if "min" in field: + limits["min"] = field["min"] - if "max" in config: - limits["max"] = config["max"] + if "max" in field: + limits["max"] = field["max"] for label in ("read_only", "writable"): - if label in config: - limits[label] = config[label] + if label in field: + limits[label] = field[label] return limits From 6f1d7b133cbb57880758951e055073bf62562522 Mon Sep 17 00:00:00 2001 From: Aleksandr Alferov Date: Thu, 17 Oct 2024 12:39:41 +0300 Subject: [PATCH 96/98] ADCM-6044 Fix working with file/secretfile in action config --- python/cm/adcm_config/config.py | 3 +- python/cm/services/job/action.py | 54 +++++++++++++++++--------------- 2 files changed, 31 insertions(+), 26 deletions(-) diff --git a/python/cm/adcm_config/config.py b/python/cm/adcm_config/config.py index 73d4813941..4a5c1a1894 100644 --- a/python/cm/adcm_config/config.py +++ b/python/cm/adcm_config/config.py @@ -48,6 +48,7 @@ Prototype, PrototypeConfig, ServiceComponent, + TaskLog, ) from cm.services.bundle import ADCMBundlePathResolver, BundlePathResolver, PathResolver from cm.services.config.jinja import get_jinja_config @@ -739,7 +740,7 @@ def _process_secretmap(conf: dict, key: str, subkey: str) -> None: conf[key][secretmap_key] = ansible_encrypt_and_format(msg=secretmap_value) -def process_config_spec(obj: ADCMEntity, spec: dict, new_config: dict) -> dict: +def process_config_spec(obj: ADCMEntity | TaskLog, spec: dict, new_config: dict) -> dict: for cfg_key, cfg_value in new_config.items(): spec_type = spec[cfg_key].get("type") diff --git a/python/cm/services/job/action.py b/python/cm/services/job/action.py index c2680abfd5..3ba9098dda 100644 --- a/python/cm/services/job/action.py +++ b/python/cm/services/job/action.py @@ -25,7 +25,7 @@ from rest_framework.status import HTTP_409_CONFLICT from cm.adcm_config.checks import check_attr -from cm.adcm_config.config import check_config_spec, get_prototype_config, process_config_spec, process_file_type +from cm.adcm_config.config import check_config_spec, get_prototype_config, process_config_spec from cm.converters import orm_object_to_action_target_type, orm_object_to_core_type from cm.errors import AdcmEx from cm.models import ( @@ -168,26 +168,40 @@ def prepare_task_for_action( job_repo = JobRepoImpl action_repo = ActionRepoImpl owner = CoreObjectDescriptor(id=orm_owner.id, type=orm_object_to_core_type(orm_owner)) + orm_action = Action.objects.select_related("prototype").get(id=action) - spec, flat_spec = _process_run_config( - action=Action.objects.select_related("prototype").get(id=action), - owner=orm_owner, - conf=payload.conf, - attr=payload.attr, - ) + spec, flat_spec, _, _ = get_prototype_config(prototype=orm_action.prototype, action=orm_action, obj=orm_owner) + + if not spec: + if payload.conf: + raise AdcmEx(code="CONFIG_VALUE_ERROR", msg="Absent config in action prototype") + + elif not payload.conf: + raise AdcmEx("TASK_ERROR", "action config is required") action_info = action_repo.get_action(id=action) task = job_repo.create_task(target=target, owner=owner, action=action_info, payload=payload) if payload.conf: orm_task = TaskLog.objects.get(id=task.id) + + _process_run_config( + action=orm_action, + owner=orm_owner, + task=orm_task, + conf=payload.conf, + attr=payload.attr, + spec=spec, + flat_spec=flat_spec, + ) + orm_task.config = update_configuration_for_inventory_inplace( configuration=payload.conf, attributes=payload.attr, specification=convert_to_flat_spec_from_proto_flat_spec(prototypes_flat_spec=flat_spec), config_owner=GeneralEntityDescriptor(id=task.id, type="task"), ) - process_file_type(obj=orm_task, spec=spec, conf=payload.conf) + orm_task.save(update_fields=["config"]) # reread to update config # ! this should be reworked when "layering" will be performed @@ -295,30 +309,20 @@ def _check_action_is_available_for_object(owner: ObjectWithAction, action: Actio raise AdcmEx(code="TASK_ERROR", msg="action is disabled") -def _process_run_config(action: Action, owner: ObjectWithAction, conf: dict, attr: dict) -> tuple[dict, dict]: - proto = action.prototype - spec, flat_spec, _, _ = get_prototype_config(prototype=proto, action=action, obj=owner) - if not spec: - if conf: - raise AdcmEx(code="CONFIG_VALUE_ERROR", msg="Absent config in action prototype") - - return {}, {} - - if not conf: - raise AdcmEx("TASK_ERROR", "action config is required") - - check_attr(proto, action, attr, flat_spec) +def _process_run_config( + action: Action, owner: ObjectWithAction, task: TaskLog, conf: dict, attr: dict, spec: dict, flat_spec: dict +) -> None: + check_attr(action.prototype, action, attr, flat_spec) object_config = {} + if owner.config is not None: object_config = ConfigLog.objects.get(id=owner.config.current).config process_variant(obj=owner, spec=spec, conf=object_config) - check_config_spec(proto=proto, obj=action, spec=spec, flat_spec=flat_spec, conf=conf, attr=attr) - - process_config_spec(obj=owner, spec=spec, new_config=conf) + check_config_spec(proto=action.prototype, obj=action, spec=spec, flat_spec=flat_spec, conf=conf, attr=attr) - return spec, flat_spec + process_config_spec(obj=task, spec=spec, new_config=conf) def _check_hostcomponent_and_get_delta( From 434270326c0535446f3939848c2c66065b5fd456 Mon Sep 17 00:00:00 2001 From: Aleksandr Alferov Date: Thu, 17 Oct 2024 20:51:29 +0300 Subject: [PATCH 97/98] Bump version 2.4.0 --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 48b0502390..9a89130d19 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ APP_IMAGE ?= hub.adsw.io/adcm/adcm APP_TAG ?= $(subst /,_,$(BRANCH_NAME)) SELENOID_HOST ?= 10.92.2.65 SELENOID_PORT ?= 4444 -ADCM_VERSION = "2.4.0-dev" +ADCM_VERSION = "2.4.0" PY_FILES = python dev/linters conf/adcm/python_scripts .PHONY: build unittests_sqlite unittests_postgresql pretty lint version From a75f284aa1d70ac135dadc81a3326a712c345fb8 Mon Sep 17 00:00:00 2001 From: Aleksandr Alferov Date: Mon, 21 Oct 2024 12:32:26 +0300 Subject: [PATCH 98/98] ADCM-6050 Fix error message for upload bundle --- python/api_v2/tests/test_bundle.py | 4 ++-- python/cm/bundle.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/python/api_v2/tests/test_bundle.py b/python/api_v2/tests/test_bundle.py index 1138063d39..5f45308e36 100644 --- a/python/api_v2/tests/test_bundle.py +++ b/python/api_v2/tests/test_bundle.py @@ -64,8 +64,8 @@ def test_upload_duplicate_fail(self): self.assertDictEqual( response.json(), { - "code": "BUNDLE_CONFLICT", - "desc": "Bundle with the same content is already " + "code": "BUNDLE_ERROR", + "desc": "Bundle already exists: Bundle with the same content is already " f"uploaded {settings.DOWNLOAD_DIR / self.new_bundle_file}", "level": "error", }, diff --git a/python/cm/bundle.py b/python/cm/bundle.py index 0382b38625..18f68c737f 100644 --- a/python/cm/bundle.py +++ b/python/cm/bundle.py @@ -263,7 +263,8 @@ def upload_file(file) -> Path: if duplicate_path := _get_file_hashes(path=settings.DOWNLOAD_DIR).get(hash_): tmp_path.unlink() raise AdcmEx( - code="BUNDLE_CONFLICT", msg=f"Bundle with the same content is already uploaded {duplicate_path}" + code="BUNDLE_ERROR", + msg=f"Bundle already exists: Bundle with the same content is already uploaded {duplicate_path}", ) # move to downloads