From 70f49e0dfde771f78dff5e7a23e98f0d591eb411 Mon Sep 17 00:00:00 2001 From: Roman Prilipskii Date: Tue, 9 Apr 2024 02:30:20 +0400 Subject: [PATCH 01/14] Reimplement the vendors mechanism on top of 0.19.0 --- .gitignore | 1 + .../actors/checkenabledvendorrepos/actor.py | 53 +++++++ .../common/actors/peseventsscanner/actor.py | 4 +- .../libraries/pes_events_scanner.py | 18 ++- .../actors/redhatsignedrpmscanner/actor.py | 89 +++++++----- .../actors/scanvendorrepofiles/actor.py | 26 ++++ .../libraries/scanvendorrepofiles.py | 72 ++++++++++ .../tests/test_scanvendorrepofiles.py | 131 ++++++++++++++++++ .../common/actors/setuptargetrepos/actor.py | 6 +- .../libraries/setuptargetrepos.py | 71 +++++++++- .../vendorreposignaturescanner/actor.py | 72 ++++++++++ .../actors/vendorrepositoriesmapping/actor.py | 19 +++ .../libraries/vendorrepositoriesmapping.py | 92 ++++++++++++ .../common/models/activevendorlist.py | 7 + .../common/models/repositoriesmap.py | 1 + .../common/models/targetrepositories.py | 6 + .../common/models/vendorsignatures.py | 8 ++ .../common/models/vendorsourcerepos.py | 12 ++ .../common/topics/vendortopic.py | 5 + 19 files changed, 650 insertions(+), 43 deletions(-) create mode 100644 repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py create mode 100644 repos/system_upgrade/common/actors/scanvendorrepofiles/actor.py create mode 100644 repos/system_upgrade/common/actors/scanvendorrepofiles/libraries/scanvendorrepofiles.py create mode 100644 repos/system_upgrade/common/actors/scanvendorrepofiles/tests/test_scanvendorrepofiles.py create mode 100644 repos/system_upgrade/common/actors/vendorreposignaturescanner/actor.py create mode 100644 repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py create mode 100644 repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py create mode 100644 repos/system_upgrade/common/models/activevendorlist.py create mode 100644 repos/system_upgrade/common/models/vendorsignatures.py create mode 100644 repos/system_upgrade/common/models/vendorsourcerepos.py create mode 100644 repos/system_upgrade/common/topics/vendortopic.py diff --git a/.gitignore b/.gitignore index 0bb92d3d7d..a04c7ded65 100644 --- a/.gitignore +++ b/.gitignore @@ -115,6 +115,7 @@ ENV/ # visual studio code configuration .vscode +*.code-workspace # pycharm .idea diff --git a/repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py b/repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py new file mode 100644 index 0000000000..52f5af9d08 --- /dev/null +++ b/repos/system_upgrade/common/actors/checkenabledvendorrepos/actor.py @@ -0,0 +1,53 @@ +from leapp.actors import Actor +from leapp.libraries.stdlib import api +from leapp.models import ( + RepositoriesFacts, + VendorSourceRepos, + ActiveVendorList, +) +from leapp.tags import FactsPhaseTag, IPUWorkflowTag + + +class CheckEnabledVendorRepos(Actor): + """ + Create a list of vendors whose repositories are present on the system and enabled. + Only those vendors' configurations (new repositories, PES actions, etc.) + will be included in the upgrade process. + """ + + name = "check_enabled_vendor_repos" + consumes = (RepositoriesFacts, VendorSourceRepos) + produces = (ActiveVendorList) + tags = (IPUWorkflowTag, FactsPhaseTag.Before) + + def process(self): + vendor_mapping_data = {} + active_vendors = set() + + # Make a dict for easy mapping of repoid -> corresponding vendor name. + for vendor_src_repodata in api.consume(VendorSourceRepos): + for vendor_src_repo in vendor_src_repodata.source_repoids: + vendor_mapping_data[vendor_src_repo] = vendor_src_repodata.vendor + + # Is the repo listed in the vendor map as from_repoid present on the system? + for repos_facts in api.consume(RepositoriesFacts): + for repo_file in repos_facts.repositories: + for repo_data in repo_file.data: + self.log.debug( + "Looking for repository {} in vendor maps".format(repo_data.repoid) + ) + if repo_data.enabled and repo_data.repoid in vendor_mapping_data: + # If the vendor's repository is present in the system and enabled, count the vendor as active. + new_vendor = vendor_mapping_data[repo_data.repoid] + self.log.debug( + "Repository {} found and enabled, enabling vendor {}".format( + repo_data.repoid, new_vendor + ) + ) + active_vendors.add(new_vendor) + + if active_vendors: + self.log.debug("Active vendor list: {}".format(active_vendors)) + api.produce(ActiveVendorList(data=list(active_vendors))) + else: + self.log.info("No active vendors found, vendor list not generated") diff --git a/repos/system_upgrade/common/actors/peseventsscanner/actor.py b/repos/system_upgrade/common/actors/peseventsscanner/actor.py index c00c1e0fcb..7256f38063 100644 --- a/repos/system_upgrade/common/actors/peseventsscanner/actor.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/actor.py @@ -10,7 +10,8 @@ RepositoriesMapping, RepositoriesSetupTasks, RHUIInfo, - RpmTransactionTasks + RpmTransactionTasks, + ActiveVendorList, ) from leapp.reporting import Report from leapp.tags import FactsPhaseTag, IPUWorkflowTag @@ -33,6 +34,7 @@ class PesEventsScanner(Actor): RepositoriesMapping, RHUIInfo, RpmTransactionTasks, + ActiveVendorList, ) produces = (ConsumedDataAsset, PESRpmTransactionTasks, RepositoriesSetupTasks, Report) tags = (IPUWorkflowTag, FactsPhaseTag) diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py index 01457f2a43..c737f88060 100644 --- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py @@ -1,5 +1,6 @@ from collections import defaultdict, namedtuple from functools import partial +import os from leapp import reporting from leapp.exceptions import StopActorExecutionError @@ -19,7 +20,8 @@ RepositoriesMapping, RepositoriesSetupTasks, RHUIInfo, - RpmTransactionTasks + RpmTransactionTasks, + ActiveVendorList, ) SKIPPED_PKGS_MSG = ( @@ -30,6 +32,7 @@ 'for details.\nThe list of these packages:' ) +VENDORS_DIR = "/etc/leapp/files/vendors.d" TransactionConfiguration = namedtuple('TransactionConfiguration', ('to_install', 'to_remove', 'to_keep')) @@ -485,6 +488,19 @@ def process(): if not events: return + active_vendors = [] + for vendor_list in api.consume(ActiveVendorList): + active_vendors.extend(vendor_list.data) + + pes_json_suffix = "_pes.json" + if os.path.isdir(VENDORS_DIR): + vendor_pesfiles = list(filter(lambda vfile: pes_json_suffix in vfile, os.listdir(VENDORS_DIR))) + + for pesfile in vendor_pesfiles: + if pesfile[:-len(pes_json_suffix)] in active_vendors: + vendor_events = get_pes_events(VENDORS_DIR, pesfile) + events.extend(vendor_events) + releases = get_relevant_releases(events) source_pkgs = get_installed_pkgs() source_pkgs = apply_transaction_configuration(source_pkgs) diff --git a/repos/system_upgrade/common/actors/redhatsignedrpmscanner/actor.py b/repos/system_upgrade/common/actors/redhatsignedrpmscanner/actor.py index 8416fd394a..dbb3e66668 100644 --- a/repos/system_upgrade/common/actors/redhatsignedrpmscanner/actor.py +++ b/repos/system_upgrade/common/actors/redhatsignedrpmscanner/actor.py @@ -1,44 +1,65 @@ from leapp.actors import Actor from leapp.libraries.common import rhui -from leapp.models import InstalledRedHatSignedRPM, InstalledRPM, InstalledUnsignedRPM +from leapp.models import InstalledRedHatSignedRPM, InstalledRPM, InstalledUnsignedRPM, VendorSignatures from leapp.tags import FactsPhaseTag, IPUWorkflowTag +VENDOR_SIGS = { + 'rhel': ['199e2f91fd431d51', + '5326810137017186', + '938a80caf21541eb', + 'fd372689897da07a', + '45689c882fa658e0'], + 'centos': ['24c6a8a7f4a80eb5', + '05b555b38483c65d', + '4eb84e71f2ee9d55', + 'a963bbdbf533f4fa', + '6c7cb6ef305d49d6'], + 'almalinux': ['51d6647ec21ad6ea', + 'd36cb86cb86b3716'], + 'rocky': ['15af5dac6d745a60', + '702d426d350d275d'], + 'ol': ['72f97b74ec551f03', + '82562ea9ad986da3', + 'bc4d06a08d8b756f'], + 'eurolinux': ['75c333f418cd4a9e', + 'b413acad6275f250', + 'f7ad3e5a1c9fd080'], + 'scientific': ['b0b4183f192a7d7d'] +} + +VENDOR_PACKAGERS = { + "rhel": "Red Hat, Inc.", + "centos": "CentOS", + "almalinux": "AlmaLinux Packaging Team", + "rocky": "infrastructure@rockylinux.org", + "eurolinux": "EuroLinux", + "scientific": "Scientific Linux", +} + + class RedHatSignedRpmScanner(Actor): - """Provide data about installed RPM Packages signed by Red Hat. + """Provide data about installed RPM Packages signed by vendors. + + The "Red Hat" in the name of the actor is a historical artifact - the actor + is used for all vendors present in the config. After filtering the list of installed RPM packages by signature, a message with relevant data will be produced. """ name = 'red_hat_signed_rpm_scanner' - consumes = (InstalledRPM,) + consumes = (InstalledRPM, VendorSignatures) produces = (InstalledRedHatSignedRPM, InstalledUnsignedRPM,) tags = (IPUWorkflowTag, FactsPhaseTag) def process(self): - RH_SIGS = ['199e2f91fd431d51', # rhel - '5326810137017186', - '938a80caf21541eb', - 'fd372689897da07a', - '45689c882fa658e0', - '24c6a8a7f4a80eb5', # centos - '05b555b38483c65d', - '4eb84e71f2ee9d55', - 'a963bbdbf533f4fa', - '6c7cb6ef305d49d6', - '51d6647ec21ad6ea', # almalinux - 'd36cb86cb86b3716', - '2ae81e8aced7258b', - '15af5dac6d745a60', # rockylinux - '702d426d350d275d', - '72f97b74ec551f03', # ol - '82562ea9ad986da3', - 'bc4d06a08d8b756f', - '75c333f418cd4a9e', # eurolinux - 'b413acad6275f250', - 'f7ad3e5a1c9fd080', - 'b0b4183f192a7d7d'] # scientific + # Packages from multiple vendors can be installed on the system. + # Picking the vendor based on the OS release is not enough. + vendor_keys = sum(VENDOR_SIGS.values(), []) + + for siglist in self.consume(VendorSignatures): + vendor_keys.extend(siglist.sigs) signed_pkgs = InstalledRedHatSignedRPM() unsigned_pkgs = InstalledUnsignedRPM() @@ -52,8 +73,8 @@ def process(self): if env.name == 'LEAPP_DEVEL_RPMS_ALL_SIGNED' and env.value == '1' ] - def has_rhsig(pkg): - return any(key in pkg.pgpsig for key in RH_SIGS) + def has_vendorsig(pkg): + return any(key in pkg.pgpsig for key in vendor_keys) def is_gpg_pubkey(pkg): """Check if gpg-pubkey pkg exists or LEAPP_DEVEL_RPMS_ALL_SIGNED=1 @@ -61,15 +82,9 @@ def is_gpg_pubkey(pkg): gpg-pubkey is not signed as it would require another package to verify its signature """ - return ( # pylint: disable-msg=consider-using-ternary - pkg.name == 'gpg-pubkey' - and (pkg.packager.startswith('Red Hat, Inc.') - or pkg.packager.startswith('CentOS') - or pkg.packager.startswith('AlmaLinux') - or pkg.packager.startswith('infrastructure@rockylinux.org') - or pkg.packager.startswith('EuroLinux') - or pkg.packager.startswith('Scientific Linux')) - or all_signed + return ( # pylint: disable-msg=consider-using-ternary + pkg.name == "gpg-pubkey" + or all_signed ) def has_katello_prefix(pkg): @@ -101,7 +116,7 @@ def has_katello_prefix(pkg): for pkg in rpm_pkgs.items: if any( [ - has_rhsig(pkg), + has_vendorsig(pkg), is_gpg_pubkey(pkg), has_katello_prefix(pkg), pkg.name in whitelisted_cloud_pkgs, diff --git a/repos/system_upgrade/common/actors/scanvendorrepofiles/actor.py b/repos/system_upgrade/common/actors/scanvendorrepofiles/actor.py new file mode 100644 index 0000000000..a5e481cb0e --- /dev/null +++ b/repos/system_upgrade/common/actors/scanvendorrepofiles/actor.py @@ -0,0 +1,26 @@ +from leapp.actors import Actor +from leapp.libraries.actor import scanvendorrepofiles +from leapp.models import ( + CustomTargetRepositoryFile, + ActiveVendorList, + VendorCustomTargetRepositoryList, +) +from leapp.tags import FactsPhaseTag, IPUWorkflowTag + + +class ScanVendorRepofiles(Actor): + """ + Load and produce custom repository data from vendor-provided files. + Only those vendors whose source system repoids were found on the system will be included. + """ + + name = "scan_vendor_repofiles" + consumes = ActiveVendorList + produces = ( + CustomTargetRepositoryFile, + VendorCustomTargetRepositoryList, + ) + tags = (FactsPhaseTag, IPUWorkflowTag) + + def process(self): + scanvendorrepofiles.process() diff --git a/repos/system_upgrade/common/actors/scanvendorrepofiles/libraries/scanvendorrepofiles.py b/repos/system_upgrade/common/actors/scanvendorrepofiles/libraries/scanvendorrepofiles.py new file mode 100644 index 0000000000..843921018e --- /dev/null +++ b/repos/system_upgrade/common/actors/scanvendorrepofiles/libraries/scanvendorrepofiles.py @@ -0,0 +1,72 @@ +import os + +from leapp.libraries.common import repofileutils +from leapp.libraries.stdlib import api +from leapp.models import ( + CustomTargetRepository, + CustomTargetRepositoryFile, + ActiveVendorList, + VendorCustomTargetRepositoryList, +) + + +VENDORS_DIR = "/etc/leapp/files/vendors.d/" +REPOFILE_SUFFIX = ".repo" + + +def process(): + """ + Produce CustomTargetRepository msgs for the vendor repo files inside the + . + + The CustomTargetRepository messages are produced only if a "from" vendor repository + listed indide its map matched one of the repositories active on the system. + """ + if not os.path.isdir(VENDORS_DIR): + api.current_logger().debug( + "The {} directory doesn't exist. Nothing to do.".format(VENDORS_DIR) + ) + return + + for repofile_name in os.listdir(VENDORS_DIR): + if not repofile_name.endswith(REPOFILE_SUFFIX): + continue + # Cut the .repo part to get only the name. + vendor_name = repofile_name[:-5] + + active_vendors = [] + for vendor_list in api.consume(ActiveVendorList): + active_vendors.extend(vendor_list.data) + + api.current_logger().debug("Active vendor list: {}".format(active_vendors)) + + if vendor_name not in active_vendors: + api.current_logger().debug( + "Vendor {} not in active list, skipping".format(vendor_name) + ) + continue + + full_repo_path = os.path.join(VENDORS_DIR, repofile_name) + parsed_repofile = repofileutils.parse_repofile(full_repo_path) + api.current_logger().debug( + "Vendor {} found in active list, processing file {}".format(vendor_name, repofile_name) + ) + + api.produce(CustomTargetRepositoryFile(file=full_repo_path)) + + custom_vendor_repos = [ + CustomTargetRepository( + repoid=repo.repoid, + name=repo.name, + baseurl=repo.baseurl, + enabled=repo.enabled, + ) for repo in parsed_repofile.data + ] + + api.produce( + VendorCustomTargetRepositoryList(vendor=vendor_name, repos=custom_vendor_repos) + ) + + api.current_logger().info( + "The {} directory exists, vendor repositories loaded.".format(VENDORS_DIR) + ) diff --git a/repos/system_upgrade/common/actors/scanvendorrepofiles/tests/test_scanvendorrepofiles.py b/repos/system_upgrade/common/actors/scanvendorrepofiles/tests/test_scanvendorrepofiles.py new file mode 100644 index 0000000000..cb5c7ab7ed --- /dev/null +++ b/repos/system_upgrade/common/actors/scanvendorrepofiles/tests/test_scanvendorrepofiles.py @@ -0,0 +1,131 @@ +import os + +from leapp.libraries.actor import scancustomrepofile +from leapp.libraries.common import repofileutils +from leapp.libraries.common.testutils import produce_mocked +from leapp.libraries.stdlib import api + +from leapp.models import (CustomTargetRepository, CustomTargetRepositoryFile, + RepositoryData, RepositoryFile) + + +_REPODATA = [ + RepositoryData(repoid="repo1", name="repo1name", baseurl="repo1url", enabled=True), + RepositoryData(repoid="repo2", name="repo2name", baseurl="repo2url", enabled=False), + RepositoryData(repoid="repo3", name="repo3name", enabled=True), + RepositoryData(repoid="repo4", name="repo4name", mirrorlist="mirror4list", enabled=True), +] + +_CUSTOM_REPOS = [ + CustomTargetRepository(repoid="repo1", name="repo1name", baseurl="repo1url", enabled=True), + CustomTargetRepository(repoid="repo2", name="repo2name", baseurl="repo2url", enabled=False), + CustomTargetRepository(repoid="repo3", name="repo3name", baseurl=None, enabled=True), + CustomTargetRepository(repoid="repo4", name="repo4name", baseurl=None, enabled=True), +] + +_CUSTOM_REPO_FILE_MSG = CustomTargetRepositoryFile(file=scancustomrepofile.CUSTOM_REPO_PATH) + + +_TESTING_REPODATA = [ + RepositoryData(repoid="repo1-stable", name="repo1name", baseurl="repo1url", enabled=True), + RepositoryData(repoid="repo2-testing", name="repo2name", baseurl="repo2url", enabled=False), + RepositoryData(repoid="repo3-stable", name="repo3name", enabled=False), + RepositoryData(repoid="repo4-testing", name="repo4name", mirrorlist="mirror4list", enabled=True), +] + +_TESTING_CUSTOM_REPOS_STABLE_TARGET = [ + CustomTargetRepository(repoid="repo1-stable", name="repo1name", baseurl="repo1url", enabled=True), + CustomTargetRepository(repoid="repo2-testing", name="repo2name", baseurl="repo2url", enabled=False), + CustomTargetRepository(repoid="repo3-stable", name="repo3name", baseurl=None, enabled=False), + CustomTargetRepository(repoid="repo4-testing", name="repo4name", baseurl=None, enabled=True), +] + +_TESTING_CUSTOM_REPOS_BETA_TARGET = [ + CustomTargetRepository(repoid="repo1-stable", name="repo1name", baseurl="repo1url", enabled=True), + CustomTargetRepository(repoid="repo2-testing", name="repo2name", baseurl="repo2url", enabled=True), + CustomTargetRepository(repoid="repo3-stable", name="repo3name", baseurl=None, enabled=False), + CustomTargetRepository(repoid="repo4-testing", name="repo4name", baseurl=None, enabled=True), +] + +_PROCESS_STABLE_TARGET = "stable" +_PROCESS_BETA_TARGET = "beta" + + +class LoggerMocked(object): + def __init__(self): + self.infomsg = None + self.debugmsg = None + + def info(self, msg): + self.infomsg = msg + + def debug(self, msg): + self.debugmsg = msg + + def __call__(self): + return self + + +def test_no_repofile(monkeypatch): + monkeypatch.setattr(os.path, 'isfile', lambda dummy: False) + monkeypatch.setattr(api, 'produce', produce_mocked()) + monkeypatch.setattr(api, 'current_logger', LoggerMocked()) + scancustomrepofile.process() + msg = "The {} file doesn't exist. Nothing to do.".format(scancustomrepofile.CUSTOM_REPO_PATH) + assert api.current_logger.debugmsg == msg + assert not api.produce.called + + +def test_valid_repofile_exists(monkeypatch): + def _mocked_parse_repofile(fpath): + return RepositoryFile(file=fpath, data=_REPODATA) + monkeypatch.setattr(os.path, 'isfile', lambda dummy: True) + monkeypatch.setattr(api, 'produce', produce_mocked()) + monkeypatch.setattr(repofileutils, 'parse_repofile', _mocked_parse_repofile) + monkeypatch.setattr(api, 'current_logger', LoggerMocked()) + scancustomrepofile.process() + msg = "The {} file exists, custom repositories loaded.".format(scancustomrepofile.CUSTOM_REPO_PATH) + assert api.current_logger.infomsg == msg + assert api.produce.called == len(_CUSTOM_REPOS) + 1 + assert _CUSTOM_REPO_FILE_MSG in api.produce.model_instances + for crepo in _CUSTOM_REPOS: + assert crepo in api.produce.model_instances + + +def test_target_stable_repos(monkeypatch): + def _mocked_parse_repofile(fpath): + return RepositoryFile(file=fpath, data=_TESTING_REPODATA) + monkeypatch.setattr(os.path, 'isfile', lambda dummy: True) + monkeypatch.setattr(api, 'produce', produce_mocked()) + monkeypatch.setattr(repofileutils, 'parse_repofile', _mocked_parse_repofile) + + scancustomrepofile.process(_PROCESS_STABLE_TARGET) + assert api.produce.called == len(_TESTING_CUSTOM_REPOS_STABLE_TARGET) + 1 + for crepo in _TESTING_CUSTOM_REPOS_STABLE_TARGET: + assert crepo in api.produce.model_instances + + +def test_target_beta_repos(monkeypatch): + def _mocked_parse_repofile(fpath): + return RepositoryFile(file=fpath, data=_TESTING_REPODATA) + monkeypatch.setattr(os.path, 'isfile', lambda dummy: True) + monkeypatch.setattr(api, 'produce', produce_mocked()) + monkeypatch.setattr(repofileutils, 'parse_repofile', _mocked_parse_repofile) + + scancustomrepofile.process(_PROCESS_BETA_TARGET) + assert api.produce.called == len(_TESTING_CUSTOM_REPOS_BETA_TARGET) + 1 + for crepo in _TESTING_CUSTOM_REPOS_BETA_TARGET: + assert crepo in api.produce.model_instances + + +def test_empty_repofile_exists(monkeypatch): + def _mocked_parse_repofile(fpath): + return RepositoryFile(file=fpath, data=[]) + monkeypatch.setattr(os.path, 'isfile', lambda dummy: True) + monkeypatch.setattr(api, 'produce', produce_mocked()) + monkeypatch.setattr(repofileutils, 'parse_repofile', _mocked_parse_repofile) + monkeypatch.setattr(api, 'current_logger', LoggerMocked()) + scancustomrepofile.process() + msg = "The {} file exists, but is empty. Nothing to do.".format(scancustomrepofile.CUSTOM_REPO_PATH) + assert api.current_logger.infomsg == msg + assert not api.produce.called diff --git a/repos/system_upgrade/common/actors/setuptargetrepos/actor.py b/repos/system_upgrade/common/actors/setuptargetrepos/actor.py index 767fa00c2f..bc1d5bfa13 100644 --- a/repos/system_upgrade/common/actors/setuptargetrepos/actor.py +++ b/repos/system_upgrade/common/actors/setuptargetrepos/actor.py @@ -10,7 +10,8 @@ RHUIInfo, SkippedRepositories, TargetRepositories, - UsedRepositories + UsedRepositories, + VendorCustomTargetRepositoryList ) from leapp.tags import FactsPhaseTag, IPUWorkflowTag @@ -32,7 +33,8 @@ class SetupTargetRepos(Actor): RepositoriesFacts, RepositoriesBlacklisted, RHUIInfo, - UsedRepositories) + UsedRepositories, + VendorCustomTargetRepositoryList) produces = (TargetRepositories, SkippedRepositories) tags = (IPUWorkflowTag, FactsPhaseTag) diff --git a/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos.py b/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos.py index 4b8405d010..1029fc0331 100644 --- a/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos.py +++ b/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos.py @@ -13,7 +13,8 @@ RHUIInfo, SkippedRepositories, TargetRepositories, - UsedRepositories + UsedRepositories, + VendorCustomTargetRepositoryList ) @@ -66,6 +67,26 @@ def _get_used_repo_dict(): return used +def _combine_repomap_messages(mapping_list): + """ + Combine multiple repository mapping messages into one. + Needed because we might get more than one message if there are vendors present. + """ + combined_mapping = [] + combined_repositories = [] + # Depending on whether there are any vendors present, we might get more than one message. + for msg in mapping_list: + combined_mapping.extend(msg.mapping) + combined_repositories.extend(msg.repositories) + + combined_repomapping = RepositoriesMapping( + mapping=combined_mapping, + repositories=combined_repositories + ) + + return combined_repomapping + + def _get_mapped_repoids(repomap, src_repoids): mapped_repoids = set() src_maj_ver = get_source_major_version() @@ -75,16 +96,62 @@ def _get_mapped_repoids(repomap, src_repoids): return mapped_repoids +def _get_vendor_custom_repos(enabled_repos, mapping_list): + # Look at what source repos from the vendor mapping were enabled. + # If any of them are in beta, include vendor's custom repos in the list. + # Otherwise skip them. + + result = [] + + # Build a dict of vendor mappings for easy lookup. + map_dict = {mapping.vendor: mapping for mapping in mapping_list if mapping.vendor} + + for vendor_repolist in api.consume(VendorCustomTargetRepositoryList): + vendor_repomap = map_dict[vendor_repolist.vendor] + + # Find the beta channel repositories for the vendor. + beta_repos = [ + x.repoid for x in vendor_repomap.repositories if x.channel == "beta" + ] + api.current_logger().debug( + "Vendor {} beta repos: {}".format(vendor_repolist.vendor, beta_repos) + ) + + # Are any of the beta repos present and enabled on the system? + if any(rep in beta_repos for rep in enabled_repos): + # If so, use all repos including beta in the upgrade. + vendor_repos = vendor_repolist.repos + else: + # Otherwise filter beta repos out. + vendor_repos = [repo for repo in vendor_repolist.repos if repo.repoid not in beta_repos] + + result.extend([CustomTargetRepository( + repoid=repo.repoid, + name=repo.name, + baseurl=repo.baseurl, + enabled=repo.enabled, + ) for repo in vendor_repos]) + + return result + + def process(): # Load relevant data from messages used_repoids_dict = _get_used_repo_dict() enabled_repoids = _get_enabled_repoids() excluded_repoids = _get_blacklisted_repoids() + + # Remember that we can't just grab one message, each vendor can have its own mapping. + repo_mapping_list = list(api.consume(RepositoriesMapping)) + custom_repos = _get_custom_target_repos() repoids_from_installed_packages = _get_repoids_from_installed_packages() + vendor_repos = _get_vendor_custom_repos(enabled_repoids, repo_mapping_list) + custom_repos.extend(vendor_repos) + # Setup repomap handler - repo_mappig_msg = next(api.consume(RepositoriesMapping), RepositoriesMapping()) + repo_mappig_msg = _combine_repomap_messages(repo_mapping_list) rhui_info = next(api.consume(RHUIInfo), RHUIInfo(provider='')) repomap = setuptargetrepos_repomap.RepoMapDataHandler(repo_mappig_msg, cloud_provider=rhui_info.provider) diff --git a/repos/system_upgrade/common/actors/vendorreposignaturescanner/actor.py b/repos/system_upgrade/common/actors/vendorreposignaturescanner/actor.py new file mode 100644 index 0000000000..dbf8697437 --- /dev/null +++ b/repos/system_upgrade/common/actors/vendorreposignaturescanner/actor.py @@ -0,0 +1,72 @@ +import os + +from leapp.actors import Actor +from leapp.models import VendorSignatures, ActiveVendorList +from leapp.tags import FactsPhaseTag, IPUWorkflowTag + + +VENDORS_DIR = "/etc/leapp/files/vendors.d/" +SIGFILE_SUFFIX = ".sigs" + + +class VendorRepoSignatureScanner(Actor): + """ + Produce VendorSignatures messages for the vendor signature files inside the + . + These messages are used to extend the list of pakcages Leapp will consider + signed and will attempt to upgrade. + + The messages are produced only if a "from" vendor repository + listed indide its map matched one of the repositories active on the system. + """ + + name = 'vendor_repo_signature_scanner' + consumes = (ActiveVendorList) + produces = (VendorSignatures) + tags = (IPUWorkflowTag, FactsPhaseTag.Before) + + def process(self): + if not os.path.isdir(VENDORS_DIR): + self.log.debug( + "The {} directory doesn't exist. Nothing to do.".format(VENDORS_DIR) + ) + return + + active_vendors = [] + for vendor_list in self.consume(ActiveVendorList): + active_vendors.extend(vendor_list.data) + + self.log.debug( + "Active vendor list: {}".format(active_vendors) + ) + + for sigfile_name in os.listdir(VENDORS_DIR): + if not sigfile_name.endswith(SIGFILE_SUFFIX): + continue + # Cut the suffix part to get only the name. + vendor_name = sigfile_name[:-5] + + if vendor_name not in active_vendors: + self.log.debug( + "Vendor {} not in active list, skipping".format(vendor_name) + ) + continue + + self.log.debug( + "Vendor {} found in active list, processing file {}".format(vendor_name, sigfile_name) + ) + + full_sigfile_path = os.path.join(VENDORS_DIR, sigfile_name) + with open(full_sigfile_path) as f: + signatures = [line for line in f.read().splitlines() if line] + + self.produce( + VendorSignatures( + vendor=vendor_name, + sigs=signatures, + ) + ) + + self.log.info( + "The {} directory exists, vendor signatures loaded.".format(VENDORS_DIR) + ) diff --git a/repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py new file mode 100644 index 0000000000..132564769e --- /dev/null +++ b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/actor.py @@ -0,0 +1,19 @@ +from leapp.actors import Actor +# from leapp.libraries.common.repomaputils import scan_vendor_repomaps, VENDOR_REPOMAP_DIR +from leapp.libraries.actor.vendorrepositoriesmapping import scan_vendor_repomaps +from leapp.models import VendorSourceRepos, RepositoriesMapping +from leapp.tags import FactsPhaseTag, IPUWorkflowTag + + +class VendorRepositoriesMapping(Actor): + """ + Scan the vendor repository mapping files and provide the data to other actors. + """ + + name = "vendor_repositories_mapping" + consumes = () + produces = (RepositoriesMapping, VendorSourceRepos,) + tags = (IPUWorkflowTag, FactsPhaseTag.Before) + + def process(self): + scan_vendor_repomaps() diff --git a/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py new file mode 100644 index 0000000000..73dac26a67 --- /dev/null +++ b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py @@ -0,0 +1,92 @@ +import os +import json + +from leapp.libraries.actor import repositoriesmapping +from leapp.libraries.common import fetch +from leapp.libraries.common.config.version import get_target_major_version, get_source_major_version +from leapp.libraries.stdlib import api +from leapp.models import VendorSourceRepos, RepositoriesMapping +from leapp.models.fields import ModelViolationError +from leapp.exceptions import StopActorExecutionError + + +VENDORS_DIR = "/etc/leapp/files/vendors.d" +"""The folder containing the vendor repository mapping files.""" + + +def inhibit_upgrade(msg): + raise StopActorExecutionError( + msg, + details={'hint': ('Read documentation at the following link for more' + ' information about how to retrieve the valid file:' + ' https://access.redhat.com/articles/3664871')}) + + +def read_repofile(repofile, repodir): + try: + return json.loads(fetch.read_or_fetch(repofile, directory=repodir, allow_download=False)) + except ValueError: + # The data does not contain a valid json + inhibit_upgrade('The repository mapping file is invalid: file does not contain a valid JSON object.') + return None + + +def read_repomap_file(repomap_file, read_repofile_func, vendor_name): + json_data = read_repofile_func(repomap_file, VENDORS_DIR) + try: + repomap_data = repositoriesmapping.RepoMapData.load_from_dict(json_data) + + source_major = get_source_major_version() + target_major = get_target_major_version() + + api.produce(VendorSourceRepos( + vendor=vendor_name, + source_repoids=repomap_data.get_version_repoids(source_major) + )) + + mapping = repomap_data.get_mappings(source_major, target_major) + valid_major_versions = [source_major, target_major] + + api.produce(RepositoriesMapping( + mapping=mapping, + repositories=repomap_data.get_repositories(valid_major_versions), + vendor=vendor_name + )) + except ModelViolationError as err: + err_message = ( + 'The repository mapping file is invalid: ' + 'the JSON does not match required schema (wrong field type/value): {}. ' + 'Ensure that the current upgrade path is correct and is present in the mappings: {} -> {}' + .format(err, source_major, target_major) + ) + inhibit_upgrade(err_message) + except KeyError as err: + inhibit_upgrade( + 'The repository mapping file is invalid: the JSON is missing a required field: {}'.format(err)) + except ValueError as err: + # The error should contain enough information, so we do not need to clarify it further + inhibit_upgrade('The repository mapping file is invalid: {}'.format(err)) + + +def scan_vendor_repomaps(read_repofile_func=read_repofile): + """ + Scan the repository mapping file and produce RepositoriesMapping msg. + + See the description of the actor for more details. + """ + + map_json_suffix = "_map.json" + if os.path.isdir(VENDORS_DIR): + vendor_mapfiles = list(filter(lambda vfile: map_json_suffix in vfile, os.listdir(VENDORS_DIR))) + + for mapfile in vendor_mapfiles: + read_repomap_file(mapfile, read_repofile_func, mapfile[:-len(map_json_suffix)]) + else: + api.current_logger().debug( + "The {} directory doesn't exist. Nothing to do.".format(VENDORS_DIR) + ) + # vendor_repomap_collection = scan_vendor_repomaps(VENDOR_REPOMAP_DIR) + # if vendor_repomap_collection: + # self.produce(vendor_repomap_collection) + # for repomap in vendor_repomap_collection.maps: + # self.produce(repomap) diff --git a/repos/system_upgrade/common/models/activevendorlist.py b/repos/system_upgrade/common/models/activevendorlist.py new file mode 100644 index 0000000000..de4056fbc5 --- /dev/null +++ b/repos/system_upgrade/common/models/activevendorlist.py @@ -0,0 +1,7 @@ +from leapp.models import Model, fields +from leapp.topics import VendorTopic + + +class ActiveVendorList(Model): + topic = VendorTopic + data = fields.List(fields.String()) diff --git a/repos/system_upgrade/common/models/repositoriesmap.py b/repos/system_upgrade/common/models/repositoriesmap.py index 12639e19b4..da4f7aac0a 100644 --- a/repos/system_upgrade/common/models/repositoriesmap.py +++ b/repos/system_upgrade/common/models/repositoriesmap.py @@ -91,3 +91,4 @@ class RepositoriesMapping(Model): mapping = fields.List(fields.Model(RepoMapEntry), default=[]) repositories = fields.List(fields.Model(PESIDRepositoryEntry), default=[]) + vendor = fields.Nullable(fields.String()) diff --git a/repos/system_upgrade/common/models/targetrepositories.py b/repos/system_upgrade/common/models/targetrepositories.py index 02c6c5e530..f9fd4238f1 100644 --- a/repos/system_upgrade/common/models/targetrepositories.py +++ b/repos/system_upgrade/common/models/targetrepositories.py @@ -21,6 +21,12 @@ class CustomTargetRepository(TargetRepositoryBase): enabled = fields.Boolean(default=True) +class VendorCustomTargetRepositoryList(Model): + topic = TransactionTopic + vendor = fields.String() + repos = fields.List(fields.Model(CustomTargetRepository)) + + class TargetRepositories(Model): """ Repositories supposed to be used during the IPU process diff --git a/repos/system_upgrade/common/models/vendorsignatures.py b/repos/system_upgrade/common/models/vendorsignatures.py new file mode 100644 index 0000000000..f456aec5d5 --- /dev/null +++ b/repos/system_upgrade/common/models/vendorsignatures.py @@ -0,0 +1,8 @@ +from leapp.models import Model, fields +from leapp.topics import VendorTopic + + +class VendorSignatures(Model): + topic = VendorTopic + vendor = fields.String() + sigs = fields.List(fields.String()) diff --git a/repos/system_upgrade/common/models/vendorsourcerepos.py b/repos/system_upgrade/common/models/vendorsourcerepos.py new file mode 100644 index 0000000000..b7a219b467 --- /dev/null +++ b/repos/system_upgrade/common/models/vendorsourcerepos.py @@ -0,0 +1,12 @@ +from leapp.models import Model, fields +from leapp.topics import VendorTopic + + +class VendorSourceRepos(Model): + """ + This model contains the data on all source repositories associated with a specific vendor. + Its data is used to determine whether the vendor should be included into the upgrade process. + """ + topic = VendorTopic + vendor = fields.String() + source_repoids = fields.List(fields.String()) diff --git a/repos/system_upgrade/common/topics/vendortopic.py b/repos/system_upgrade/common/topics/vendortopic.py new file mode 100644 index 0000000000..014b7afbcc --- /dev/null +++ b/repos/system_upgrade/common/topics/vendortopic.py @@ -0,0 +1,5 @@ +from leapp.topics import Topic + + +class VendorTopic(Topic): + name = 'vendor_topic' From 323b532dbf6f4f54dd888649bbcbc2a05c57a917 Mon Sep 17 00:00:00 2001 From: Roman Prilipskii Date: Mon, 15 Apr 2024 06:35:24 +0400 Subject: [PATCH 02/14] Move the RepoMapData class to a common library --- .../libraries/repositoriesmapping.py | 118 +----------------- .../libraries/vendorrepositoriesmapping.py | 4 +- .../common/libraries/repomaputils.py | 116 +++++++++++++++++ 3 files changed, 120 insertions(+), 118 deletions(-) create mode 100644 repos/system_upgrade/common/libraries/repomaputils.py diff --git a/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py b/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py index 416034ace3..e010c5b9a7 100644 --- a/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py +++ b/repos/system_upgrade/common/actors/repositoriesmapping/libraries/repositoriesmapping.py @@ -1,11 +1,11 @@ import os -from collections import defaultdict from leapp.exceptions import StopActorExecutionError from leapp.libraries.common.config.version import get_source_major_version, get_target_major_version +from leapp.libraries.common.repomaputils import RepoMapData from leapp.libraries.common.fetch import load_data_asset from leapp.libraries.stdlib import api -from leapp.models import PESIDRepositoryEntry, RepoMapEntry, RepositoriesMapping +from leapp.models import RepositoriesMapping from leapp.models.fields import ModelViolationError OLD_REPOMAP_FILE = 'repomap.csv' @@ -15,120 +15,6 @@ """The name of the new repository mapping file.""" -class RepoMapData(object): - VERSION_FORMAT = '1.2.0' - - def __init__(self): - self.repositories = [] - self.mapping = {} - - def add_repository(self, data, pesid): - """ - Add new PESIDRepositoryEntry with given pesid from the provided dictionary. - - :param data: A dict containing the data of the added repository. The dictionary structure corresponds - to the repositories entries in the repository mapping JSON schema. - :type data: Dict[str, str] - :param pesid: PES id of the repository family that the newly added repository belongs to. - :type pesid: str - """ - self.repositories.append(PESIDRepositoryEntry( - repoid=data['repoid'], - channel=data['channel'], - rhui=data.get('rhui', ''), - repo_type=data['repo_type'], - arch=data['arch'], - major_version=data['major_version'], - pesid=pesid - )) - - def get_repositories(self, valid_major_versions): - """ - Return the list of PESIDRepositoryEntry object matching the specified major versions. - """ - return [repo for repo in self.repositories if repo.major_version in valid_major_versions] - - def add_mapping(self, source_major_version, target_major_version, source_pesid, target_pesid): - """ - Add a new mapping entry that is mapping the source pesid to the destination pesid(s), - relevant in an IPU from the supplied source major version to the supplied target - major version. - - :param str source_major_version: Specifies the major version of the source system - for which the added mapping applies. - :param str target_major_version: Specifies the major version of the target system - for which the added mapping applies. - :param str source_pesid: PESID of the source repository. - :param Union[str|List[str]] target_pesid: A single target PESID or a list of target - PESIDs of the added mapping. - """ - # NOTE: it could be more simple, but I prefer to be sure the input data - # contains just one map per source PESID. - key = '{}:{}'.format(source_major_version, target_major_version) - rmap = self.mapping.get(key, defaultdict(set)) - self.mapping[key] = rmap - if isinstance(target_pesid, list): - rmap[source_pesid].update(target_pesid) - else: - rmap[source_pesid].add(target_pesid) - - def get_mappings(self, src_major_version, dst_major_version): - """ - Return the list of RepoMapEntry objects for the specified upgrade path. - - IOW, the whole mapping for specified IPU. - """ - key = '{}:{}'.format(src_major_version, dst_major_version) - rmap = self.mapping.get(key, None) - if not rmap: - return None - map_list = [] - for src_pesid in sorted(rmap.keys()): - map_list.append(RepoMapEntry(source=src_pesid, target=sorted(rmap[src_pesid]))) - return map_list - - @staticmethod - def load_from_dict(data): - if data['version_format'] != RepoMapData.VERSION_FORMAT: - raise ValueError( - 'The obtained repomap data has unsupported version of format.' - ' Get {} required {}' - .format(data['version_format'], RepoMapData.VERSION_FORMAT) - ) - - repomap = RepoMapData() - - # Load reposiories - existing_pesids = set() - for repo_family in data['repositories']: - existing_pesids.add(repo_family['pesid']) - for repo in repo_family['entries']: - repomap.add_repository(repo, repo_family['pesid']) - - # Load mappings - for mapping in data['mapping']: - for entry in mapping['entries']: - if not isinstance(entry['target'], list): - raise ValueError( - 'The target field of a mapping entry is not a list: {}' - .format(entry) - ) - - for pesid in [entry['source']] + entry['target']: - if pesid not in existing_pesids: - raise ValueError( - 'The {} pesid is not related to any repository.' - .format(pesid) - ) - repomap.add_mapping( - source_major_version=mapping['source_major_version'], - target_major_version=mapping['target_major_version'], - source_pesid=entry['source'], - target_pesid=entry['target'], - ) - return repomap - - def _inhibit_upgrade(msg): rpmname = 'leapp-upgrade-el{}toel{}'.format(get_source_major_version(), get_target_major_version()) hint = ( diff --git a/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py index 73dac26a67..6a41d4e505 100644 --- a/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py +++ b/repos/system_upgrade/common/actors/vendorrepositoriesmapping/libraries/vendorrepositoriesmapping.py @@ -1,9 +1,9 @@ import os import json -from leapp.libraries.actor import repositoriesmapping from leapp.libraries.common import fetch from leapp.libraries.common.config.version import get_target_major_version, get_source_major_version +from leapp.libraries.common.repomaputils import RepoMapData from leapp.libraries.stdlib import api from leapp.models import VendorSourceRepos, RepositoriesMapping from leapp.models.fields import ModelViolationError @@ -34,7 +34,7 @@ def read_repofile(repofile, repodir): def read_repomap_file(repomap_file, read_repofile_func, vendor_name): json_data = read_repofile_func(repomap_file, VENDORS_DIR) try: - repomap_data = repositoriesmapping.RepoMapData.load_from_dict(json_data) + repomap_data = RepoMapData.load_from_dict(json_data) source_major = get_source_major_version() target_major = get_target_major_version() diff --git a/repos/system_upgrade/common/libraries/repomaputils.py b/repos/system_upgrade/common/libraries/repomaputils.py new file mode 100644 index 0000000000..01ee22c843 --- /dev/null +++ b/repos/system_upgrade/common/libraries/repomaputils.py @@ -0,0 +1,116 @@ +from collections import defaultdict +from leapp.models import PESIDRepositoryEntry, RepoMapEntry + + +class RepoMapData(object): + VERSION_FORMAT = '1.2.0' + + def __init__(self): + self.repositories = [] + self.mapping = {} + + def add_repository(self, data, pesid): + """ + Add new PESIDRepositoryEntry with given pesid from the provided dictionary. + + :param data: A dict containing the data of the added repository. The dictionary structure corresponds + to the repositories entries in the repository mapping JSON schema. + :type data: Dict[str, str] + :param pesid: PES id of the repository family that the newly added repository belongs to. + :type pesid: str + """ + self.repositories.append(PESIDRepositoryEntry( + repoid=data['repoid'], + channel=data['channel'], + rhui=data.get('rhui', ''), + repo_type=data['repo_type'], + arch=data['arch'], + major_version=data['major_version'], + pesid=pesid + )) + + def get_repositories(self, valid_major_versions): + """ + Return the list of PESIDRepositoryEntry object matching the specified major versions. + """ + return [repo for repo in self.repositories if repo.major_version in valid_major_versions] + + def add_mapping(self, source_major_version, target_major_version, source_pesid, target_pesid): + """ + Add a new mapping entry that is mapping the source pesid to the destination pesid(s), + relevant in an IPU from the supplied source major version to the supplied target + major version. + + :param str source_major_version: Specifies the major version of the source system + for which the added mapping applies. + :param str target_major_version: Specifies the major version of the target system + for which the added mapping applies. + :param str source_pesid: PESID of the source repository. + :param Union[str|List[str]] target_pesid: A single target PESID or a list of target + PESIDs of the added mapping. + """ + # NOTE: it could be more simple, but I prefer to be sure the input data + # contains just one map per source PESID. + key = '{}:{}'.format(source_major_version, target_major_version) + rmap = self.mapping.get(key, defaultdict(set)) + self.mapping[key] = rmap + if isinstance(target_pesid, list): + rmap[source_pesid].update(target_pesid) + else: + rmap[source_pesid].add(target_pesid) + + def get_mappings(self, src_major_version, dst_major_version): + """ + Return the list of RepoMapEntry objects for the specified upgrade path. + + IOW, the whole mapping for specified IPU. + """ + key = '{}:{}'.format(src_major_version, dst_major_version) + rmap = self.mapping.get(key, None) + if not rmap: + return None + map_list = [] + for src_pesid in sorted(rmap.keys()): + map_list.append(RepoMapEntry(source=src_pesid, target=sorted(rmap[src_pesid]))) + return map_list + + @staticmethod + def load_from_dict(data): + if data['version_format'] != RepoMapData.VERSION_FORMAT: + raise ValueError( + 'The obtained repomap data has unsupported version of format.' + ' Get {} required {}' + .format(data['version_format'], RepoMapData.VERSION_FORMAT) + ) + + repomap = RepoMapData() + + # Load reposiories + existing_pesids = set() + for repo_family in data['repositories']: + existing_pesids.add(repo_family['pesid']) + for repo in repo_family['entries']: + repomap.add_repository(repo, repo_family['pesid']) + + # Load mappings + for mapping in data['mapping']: + for entry in mapping['entries']: + if not isinstance(entry['target'], list): + raise ValueError( + 'The target field of a mapping entry is not a list: {}' + .format(entry) + ) + + for pesid in [entry['source']] + entry['target']: + if pesid not in existing_pesids: + raise ValueError( + 'The {} pesid is not related to any repository.' + .format(pesid) + ) + repomap.add_mapping( + source_major_version=mapping['source_major_version'], + target_major_version=mapping['target_major_version'], + source_pesid=entry['source'], + target_pesid=entry['target'], + ) + return repomap From 7fbdecf3cf2da58a3b60432fd890bf25c920c840 Mon Sep 17 00:00:00 2001 From: Roman Prilipskii Date: Tue, 16 Apr 2024 15:35:10 +0400 Subject: [PATCH 03/14] Add the missing function to the new library --- repos/system_upgrade/common/libraries/repomaputils.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/repos/system_upgrade/common/libraries/repomaputils.py b/repos/system_upgrade/common/libraries/repomaputils.py index 01ee22c843..f37ad30936 100644 --- a/repos/system_upgrade/common/libraries/repomaputils.py +++ b/repos/system_upgrade/common/libraries/repomaputils.py @@ -1,4 +1,4 @@ -from collections import defaultdict +from collections import defaultdict from leapp.models import PESIDRepositoryEntry, RepoMapEntry @@ -35,6 +35,12 @@ def get_repositories(self, valid_major_versions): """ return [repo for repo in self.repositories if repo.major_version in valid_major_versions] + def get_version_repoids(self, major_version): + """ + Return the list of repository ID strings for repositories matching the specified major version. + """ + return [repo.repoid for repo in self.repositories if repo.major_version == major_version] + def add_mapping(self, source_major_version, target_major_version, source_pesid, target_pesid): """ Add a new mapping entry that is mapping the source pesid to the destination pesid(s), From 51af210dd463691b4f09036a94131918f308bd30 Mon Sep 17 00:00:00 2001 From: Roman Prilipskii Date: Wed, 24 Apr 2024 03:26:48 +0400 Subject: [PATCH 04/14] Add multiple repomap support to PES Events Scanner --- .../libraries/pes_events_scanner.py | 9 +++--- .../libraries/setuptargetrepos.py | 30 ++++++------------- .../common/libraries/repomaputils.py | 22 ++++++++++++-- 3 files changed, 33 insertions(+), 28 deletions(-) diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py index c737f88060..a756b81574 100644 --- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py @@ -7,6 +7,7 @@ from leapp.libraries.actor import peseventsscanner_repomap from leapp.libraries.actor.pes_event_parsing import Action, get_pes_events, Package from leapp.libraries.common.config import version +from leapp.libraries.common.repomaputils import combine_repomap_messages from leapp.libraries.stdlib import api from leapp.libraries.stdlib.config import is_verbose from leapp.models import ( @@ -348,15 +349,13 @@ def get_pesid_to_repoid_map(target_pesids): :return: Dictionary mapping the target_pesids to their corresponding repoid """ - repositories_map_msgs = api.consume(RepositoriesMapping) - repositories_map_msg = next(repositories_map_msgs, None) - if list(repositories_map_msgs): - api.current_logger().warning('Unexpectedly received more than one RepositoriesMapping message.') - if not repositories_map_msg: + repositories_map_msgs = list(api.consume(RepositoriesMapping)) + if not repositories_map_msgs: raise StopActorExecutionError( 'Cannot parse RepositoriesMapping data properly', details={'Problem': 'Did not receive a message with mapped repositories'} ) + repositories_map_msg = combine_repomap_messages(repositories_map_msgs) rhui_info = next(api.consume(RHUIInfo), RHUIInfo(provider='')) diff --git a/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos.py b/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos.py index 1029fc0331..827eb2628d 100644 --- a/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos.py +++ b/repos/system_upgrade/common/actors/setuptargetrepos/libraries/setuptargetrepos.py @@ -1,6 +1,7 @@ from leapp.libraries.actor import setuptargetrepos_repomap from leapp.libraries.common.config.version import get_source_major_version +from leapp.libraries.common.repomaputils import combine_repomap_messages from leapp.libraries.stdlib import api from leapp.models import ( CustomTargetRepository, @@ -67,26 +68,6 @@ def _get_used_repo_dict(): return used -def _combine_repomap_messages(mapping_list): - """ - Combine multiple repository mapping messages into one. - Needed because we might get more than one message if there are vendors present. - """ - combined_mapping = [] - combined_repositories = [] - # Depending on whether there are any vendors present, we might get more than one message. - for msg in mapping_list: - combined_mapping.extend(msg.mapping) - combined_repositories.extend(msg.repositories) - - combined_repomapping = RepositoriesMapping( - mapping=combined_mapping, - repositories=combined_repositories - ) - - return combined_repomapping - - def _get_mapped_repoids(repomap, src_repoids): mapped_repoids = set() src_maj_ver = get_source_major_version() @@ -149,9 +130,12 @@ def process(): vendor_repos = _get_vendor_custom_repos(enabled_repoids, repo_mapping_list) custom_repos.extend(vendor_repos) + api.current_logger().debug( + "Vendor repolist: {}".format([repo.repoid for repo in vendor_repos]) + ) # Setup repomap handler - repo_mappig_msg = _combine_repomap_messages(repo_mapping_list) + repo_mappig_msg = combine_repomap_messages(repo_mapping_list) rhui_info = next(api.consume(RHUIInfo), RHUIInfo(provider='')) repomap = setuptargetrepos_repomap.RepoMapDataHandler(repo_mappig_msg, cloud_provider=rhui_info.provider) @@ -207,6 +191,10 @@ def process(): custom_repos = [repo for repo in custom_repos if repo.repoid not in excluded_repoids] custom_repos = sorted(custom_repos, key=lambda x: x.repoid) + api.current_logger().debug( + "Final repolist: {}".format([repo.repoid for repo in custom_repos]) + ) + # produce message about skipped repositories enabled_repoids_with_mapping = _get_mapped_repoids(repomap, enabled_repoids) skipped_repoids = enabled_repoids & set(used_repoids_dict.keys()) - enabled_repoids_with_mapping diff --git a/repos/system_upgrade/common/libraries/repomaputils.py b/repos/system_upgrade/common/libraries/repomaputils.py index f37ad30936..8647390370 100644 --- a/repos/system_upgrade/common/libraries/repomaputils.py +++ b/repos/system_upgrade/common/libraries/repomaputils.py @@ -1,6 +1,5 @@ from collections import defaultdict -from leapp.models import PESIDRepositoryEntry, RepoMapEntry - +from leapp.models import PESIDRepositoryEntry, RepoMapEntry, RepositoriesMapping class RepoMapData(object): VERSION_FORMAT = '1.2.0' @@ -120,3 +119,22 @@ def load_from_dict(data): target_pesid=entry['target'], ) return repomap + +def combine_repomap_messages(mapping_list): + """ + Combine multiple RepositoryMapping messages into one. + Needed because we might get more than one message if there are vendors present. + """ + combined_mapping = [] + combined_repositories = [] + # Depending on whether there are any vendors present, we might get more than one message. + for msg in mapping_list: + combined_mapping.extend(msg.mapping) + combined_repositories.extend(msg.repositories) + + combined_repomapping = RepositoriesMapping( + mapping=combined_mapping, + repositories=combined_repositories + ) + + return combined_repomapping From 326040ffeab9f5c4f87d8a924fd720d808d60928 Mon Sep 17 00:00:00 2001 From: Roman Prilipskii Date: Thu, 25 Apr 2024 04:21:48 +0400 Subject: [PATCH 05/14] Ensure the repositories data is available to the vendors-checking actors --- repos/system_upgrade/common/actors/systemfacts/actor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repos/system_upgrade/common/actors/systemfacts/actor.py b/repos/system_upgrade/common/actors/systemfacts/actor.py index 59b12c8705..85d4a09e29 100644 --- a/repos/system_upgrade/common/actors/systemfacts/actor.py +++ b/repos/system_upgrade/common/actors/systemfacts/actor.py @@ -47,7 +47,7 @@ class SystemFactsActor(Actor): GrubCfgBios, Report ) - tags = (IPUWorkflowTag, FactsPhaseTag,) + tags = (IPUWorkflowTag, FactsPhaseTag.Before,) def process(self): self.produce(systemfacts.get_sysctls_status()) From 8800a2e22aae9656fff0b3bde016be50f70395b4 Mon Sep 17 00:00:00 2001 From: Roman Prilipskii Date: Thu, 25 Apr 2024 06:41:14 +0400 Subject: [PATCH 06/14] Add target directory to the PES fetch function --- .../actors/peseventsscanner/libraries/pes_event_parsing.py | 1 + repos/system_upgrade/common/libraries/fetch.py | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py index 35bcec73f7..a9af4e9b2c 100644 --- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py @@ -69,6 +69,7 @@ def get_pes_events(pes_json_directory, pes_json_filename): try: events_data = fetch.load_data_asset(api.current_actor(), pes_json_filename, + asset_directory=pes_json_directory, asset_fulltext_name='PES events file', docs_url='', docs_title='') diff --git a/repos/system_upgrade/common/libraries/fetch.py b/repos/system_upgrade/common/libraries/fetch.py index 1ca26170dd..3bfe12e8c6 100644 --- a/repos/system_upgrade/common/libraries/fetch.py +++ b/repos/system_upgrade/common/libraries/fetch.py @@ -142,7 +142,8 @@ def load_data_asset(actor_requesting_asset, asset_filename, asset_fulltext_name, docs_url, - docs_title): + docs_title, + asset_directory="/etc/leapp/files"): """ Load the content of the data asset with given asset_filename @@ -174,7 +175,7 @@ def load_data_asset(actor_requesting_asset, try: # The asset family ID has the form (major, minor), include only `major` in the URL - raw_asset_contents = read_or_fetch(asset_filename, data_stream=data_stream_major) + raw_asset_contents = read_or_fetch(asset_filename, directory=asset_directory, data_stream=data_stream_major) asset_contents = json.loads(raw_asset_contents) except ValueError: msg = 'The {0} file (at {1}) does not contain a valid JSON object.'.format(asset_fulltext_name, asset_filename) From 10d34de9ceb669ad31146b69010ac3f186717181 Mon Sep 17 00:00:00 2001 From: Monstrofil Date: Tue, 18 Jun 2024 11:30:53 +0300 Subject: [PATCH 07/14] Allow packageinfo to be explicitly empty list in pes files Raise errors only when packageinfo is missing, but allow empty lists which mean that no actions required. --- .../actors/peseventsscanner/libraries/pes_event_parsing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py index a9af4e9b2c..198aaab5ce 100644 --- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py @@ -76,7 +76,7 @@ def get_pes_events(pes_json_directory, pes_json_filename): if not events_data: return None - if not events_data.get('packageinfo'): + if events_data.get('packageinfo') is None: raise ValueError('Found PES data with invalid structure') all_events = list(chain(*[parse_entry(entry) for entry in events_data['packageinfo']])) From db2f12c52720a6dcbb4693615c4f12a9a486a295 Mon Sep 17 00:00:00 2001 From: Roman Prilipskii Date: Thu, 25 Apr 2024 06:41:20 +0400 Subject: [PATCH 08/14] Implement package reinstallation --- etc/leapp/transaction/to_reinstall | 3 ++ .../filterrpmtransactionevents/actor.py | 5 +++- .../rpmtransactionconfigtaskscollector.py | 28 +++++++++++++++---- .../common/files/rhel_upgrade.py | 4 +++ .../common/libraries/dnfplugin.py | 1 + .../common/models/rpmtransactiontasks.py | 1 + 6 files changed, 35 insertions(+), 7 deletions(-) create mode 100644 etc/leapp/transaction/to_reinstall diff --git a/etc/leapp/transaction/to_reinstall b/etc/leapp/transaction/to_reinstall new file mode 100644 index 0000000000..c6694a8e36 --- /dev/null +++ b/etc/leapp/transaction/to_reinstall @@ -0,0 +1,3 @@ +### List of packages (each on new line) to be reinstalled to the upgrade transaction +### Useful for packages that have identical version strings but contain binary changes between major OS versions +### Packages that aren't installed will be skipped diff --git a/repos/system_upgrade/common/actors/filterrpmtransactionevents/actor.py b/repos/system_upgrade/common/actors/filterrpmtransactionevents/actor.py index e0d89d9f18..52f93ef3f4 100644 --- a/repos/system_upgrade/common/actors/filterrpmtransactionevents/actor.py +++ b/repos/system_upgrade/common/actors/filterrpmtransactionevents/actor.py @@ -32,6 +32,7 @@ def process(self): to_remove = set() to_keep = set() to_upgrade = set() + to_reinstall = set() modules_to_enable = {} modules_to_reset = {} for event in self.consume(RpmTransactionTasks, PESRpmTransactionTasks): @@ -39,13 +40,14 @@ def process(self): to_install.update(event.to_install) to_remove.update(installed_pkgs.intersection(event.to_remove)) to_keep.update(installed_pkgs.intersection(event.to_keep)) + to_reinstall.update(installed_pkgs.intersection(event.to_reinstall)) modules_to_enable.update({'{}:{}'.format(m.name, m.stream): m for m in event.modules_to_enable}) modules_to_reset.update({'{}:{}'.format(m.name, m.stream): m for m in event.modules_to_reset}) to_remove.difference_update(to_keep) # run upgrade for the rest of RH signed pkgs which we do not have rule for - to_upgrade = installed_pkgs - (to_install | to_remove) + to_upgrade = installed_pkgs - (to_install | to_remove | to_reinstall) self.produce(FilteredRpmTransactionTasks( local_rpms=list(local_rpms), @@ -53,5 +55,6 @@ def process(self): to_remove=list(to_remove), to_keep=list(to_keep), to_upgrade=list(to_upgrade), + to_reinstall=list(to_reinstall), modules_to_reset=list(modules_to_reset.values()), modules_to_enable=list(modules_to_enable.values()))) diff --git a/repos/system_upgrade/common/actors/rpmtransactionconfigtaskscollector/libraries/rpmtransactionconfigtaskscollector.py b/repos/system_upgrade/common/actors/rpmtransactionconfigtaskscollector/libraries/rpmtransactionconfigtaskscollector.py index fb6ae8ff44..70f07387ad 100644 --- a/repos/system_upgrade/common/actors/rpmtransactionconfigtaskscollector/libraries/rpmtransactionconfigtaskscollector.py +++ b/repos/system_upgrade/common/actors/rpmtransactionconfigtaskscollector/libraries/rpmtransactionconfigtaskscollector.py @@ -18,21 +18,37 @@ def load_tasks_file(path, logger): return [] +def filter_out(installed_rpm_names, to_filter, debug_msg): + # These are the packages that aren't installed on the system. + filtered_ok = [pkg for pkg in to_filter if pkg not in installed_rpm_names] + + # And these ones are the ones that are. + filtered_out = list(set(to_filter) - set(filtered_ok)) + if filtered_out: + api.current_logger().debug( + debug_msg + + '\n- ' + '\n- '.join(filtered_out) + ) + # We may want to use either of the two sets. + return filtered_ok, filtered_out + + def load_tasks(base_dir, logger): # Loads configuration files to_install, to_keep, and to_remove from the given base directory rpms = next(api.consume(InstalledRedHatSignedRPM)) rpm_names = [rpm.name for rpm in rpms.items] + to_install = load_tasks_file(os.path.join(base_dir, 'to_install'), logger) + install_debug_msg = 'The following packages from "to_install" file will be ignored as they are already installed:' # we do not want to put into rpm transaction what is already installed (it will go to "to_upgrade" bucket) - to_install_filtered = [pkg for pkg in to_install if pkg not in rpm_names] + to_install_filtered, _ = filter_out(rpm_names, to_install, install_debug_msg) - filtered = set(to_install) - set(to_install_filtered) - if filtered: - api.current_logger().debug( - 'The following packages from "to_install" file will be ignored as they are already installed:' - '\n- ' + '\n- '.join(filtered)) + to_reinstall = load_tasks_file(os.path.join(base_dir, 'to_reinstall'), logger) + reinstall_debug_msg = 'The following packages from "to_reinstall" file will be ignored as they are not installed:' + _, to_reinstall_filtered = filter_out(rpm_names, to_reinstall, reinstall_debug_msg) return RpmTransactionTasks( to_install=to_install_filtered, + to_reinstall=to_reinstall_filtered, to_keep=load_tasks_file(os.path.join(base_dir, 'to_keep'), logger), to_remove=load_tasks_file(os.path.join(base_dir, 'to_remove'), logger)) diff --git a/repos/system_upgrade/common/files/rhel_upgrade.py b/repos/system_upgrade/common/files/rhel_upgrade.py index 34f7b8f918..acba532ca5 100644 --- a/repos/system_upgrade/common/files/rhel_upgrade.py +++ b/repos/system_upgrade/common/files/rhel_upgrade.py @@ -184,6 +184,7 @@ def run(self): to_install = self.plugin_data['pkgs_info']['to_install'] to_remove = self.plugin_data['pkgs_info']['to_remove'] to_upgrade = self.plugin_data['pkgs_info']['to_upgrade'] + to_reinstall = self.plugin_data['pkgs_info']['to_reinstall'] # Modules to enable self._process_entities(entities=[available_modules_to_enable], @@ -196,6 +197,9 @@ def run(self): self._process_entities(entities=to_install, op=self.base.install, entity_name='Package') # Packages to be upgraded self._process_entities(entities=to_upgrade, op=self.base.upgrade, entity_name='Package') + # Packages to be reinstalled + self._process_entities(entities=to_reinstall, op=self.base.reinstall, entity_name='Package') + self.base.distro_sync() if self.opts.tid[0] == 'check': diff --git a/repos/system_upgrade/common/libraries/dnfplugin.py b/repos/system_upgrade/common/libraries/dnfplugin.py index ffde211fd9..8533fb04ff 100644 --- a/repos/system_upgrade/common/libraries/dnfplugin.py +++ b/repos/system_upgrade/common/libraries/dnfplugin.py @@ -92,6 +92,7 @@ def build_plugin_data(target_repoids, debug, test, tasks, on_aws): 'to_install': tasks.to_install, 'to_remove': tasks.to_remove, 'to_upgrade': tasks.to_upgrade, + 'to_reinstall': tasks.to_reinstall, 'modules_to_enable': ['{}:{}'.format(m.name, m.stream) for m in tasks.modules_to_enable], }, 'dnf_conf': { diff --git a/repos/system_upgrade/common/models/rpmtransactiontasks.py b/repos/system_upgrade/common/models/rpmtransactiontasks.py index 7e2870d08e..05d4e94197 100644 --- a/repos/system_upgrade/common/models/rpmtransactiontasks.py +++ b/repos/system_upgrade/common/models/rpmtransactiontasks.py @@ -10,6 +10,7 @@ class RpmTransactionTasks(Model): to_keep = fields.List(fields.String(), default=[]) to_remove = fields.List(fields.String(), default=[]) to_upgrade = fields.List(fields.String(), default=[]) + to_reinstall = fields.List(fields.String(), default=[]) modules_to_enable = fields.List(fields.Model(Module), default=[]) modules_to_reset = fields.List(fields.Model(Module), default=[]) From b70f0e7f9369bfad9cc4e5022f24c5f19972a88e Mon Sep 17 00:00:00 2001 From: Roman Prilipskii Date: Thu, 25 Apr 2024 06:41:20 +0400 Subject: [PATCH 09/14] Add the missing reinstallation ID --- .../actors/peseventsscanner/libraries/pes_event_parsing.py | 1 + 1 file changed, 1 insertion(+) diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py index 198aaab5ce..0a5f46e038 100644 --- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_event_parsing.py @@ -58,6 +58,7 @@ class Action(IntEnum): MERGED = 5 MOVED = 6 RENAMED = 7 + REINSTALLED = 8 def get_pes_events(pes_json_directory, pes_json_filename): From b495b89e4631b964634776f2ff0477cb76076fb1 Mon Sep 17 00:00:00 2001 From: Roman Prilipskii Date: Thu, 25 Apr 2024 07:49:18 +0400 Subject: [PATCH 10/14] TODO: Reinstallation --- .../libraries/pes_events_scanner.py | 21 +++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py index a756b81574..00ab58922c 100644 --- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py @@ -133,6 +133,7 @@ def compute_pkg_changes_between_consequent_releases(source_installed_pkgs, logger = api.current_logger() # Start with the installed packages and modify the set according to release events target_pkgs = set(source_installed_pkgs) + pkgs_to_reinstall = set() release_events = [e for e in events if e.to_release == release] @@ -169,9 +170,12 @@ def compute_pkg_changes_between_consequent_releases(source_installed_pkgs, target_pkgs = target_pkgs.difference(event.in_pkgs) target_pkgs = target_pkgs.union(event.out_pkgs) + if (event.action == Action.REINSTALLED and is_any_in_pkg_present): + pkgs_to_reinstall = pkgs_to_reinstall.union(event.in_pkgs) + pkgs_to_demodularize = pkgs_to_demodularize.difference(event.in_pkgs) - return (target_pkgs, pkgs_to_demodularize) + return (target_pkgs, pkgs_to_demodularize, pkgs_to_reinstall) def remove_undesired_events(events, relevant_to_releases): @@ -237,15 +241,18 @@ def compute_packages_on_target_system(source_pkgs, events, releases): did_processing_cross_major_version = True pkgs_to_demodularize = {pkg for pkg in target_pkgs if pkg.modulestream} - target_pkgs, pkgs_to_demodularize = compute_pkg_changes_between_consequent_releases(target_pkgs, events, - release, seen_pkgs, - pkgs_to_demodularize) + target_pkgs, pkgs_to_demodularize, pkgs_to_reinstall = compute_pkg_changes_between_consequent_releases + ( + target_pkgs, events, + release, seen_pkgs, + pkgs_to_demodularize + ) seen_pkgs = seen_pkgs.union(target_pkgs) demodularized_pkgs = {Package(pkg.name, pkg.repository, None) for pkg in pkgs_to_demodularize} demodularized_target_pkgs = target_pkgs.difference(pkgs_to_demodularize).union(demodularized_pkgs) - return (demodularized_target_pkgs, pkgs_to_demodularize) + return (demodularized_target_pkgs, pkgs_to_demodularize, pkgs_to_reinstall) def compute_rpm_tasks_from_pkg_set_diff(source_pkgs, target_pkgs, pkgs_to_demodularize): @@ -511,7 +518,8 @@ def process(): events = remove_undesired_events(events, releases) # Apply events - compute what packages should the target system have - target_pkgs, pkgs_to_demodularize = compute_packages_on_target_system(source_pkgs, events, releases) + # TODO: bring back the reinstallation of packages + target_pkgs, pkgs_to_demodularize, pkgs_to_reinstall = compute_packages_on_target_system(source_pkgs, events, releases) # Packages coming out of the events have PESID as their repository, however, we need real repoid target_pkgs = replace_pesids_with_repoids_in_packages(target_pkgs, repoids_of_source_pkgs) @@ -527,4 +535,5 @@ def process(): # Compare the packages on source system and the computed packages on target system and determine what to install rpm_tasks = compute_rpm_tasks_from_pkg_set_diff(source_pkgs, target_pkgs, pkgs_to_demodularize) if rpm_tasks: + rpm_tasks.to_reinstall = pkgs_to_reinstall api.produce(rpm_tasks) From 06eaee5cffdda0a05145db73f177ef5eb4e813c3 Mon Sep 17 00:00:00 2001 From: Oleksandr Date: Tue, 18 Jun 2024 13:32:41 +0300 Subject: [PATCH 11/14] Bring back the package reinstallation --- .../actors/peseventsscanner/libraries/pes_events_scanner.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py index 00ab58922c..3abbf49581 100644 --- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py @@ -35,7 +35,7 @@ VENDORS_DIR = "/etc/leapp/files/vendors.d" -TransactionConfiguration = namedtuple('TransactionConfiguration', ('to_install', 'to_remove', 'to_keep')) +TransactionConfiguration = namedtuple('TransactionConfiguration', ('to_install', 'to_remove', 'to_keep', 'to_reinstall')) def get_cloud_provider_name(cloud_provider_variant): @@ -86,7 +86,7 @@ def get_transaction_configuration(): These configuration files have higher priority than PES data. :return: RpmTransactionTasks model instance """ - transaction_configuration = TransactionConfiguration(to_install=[], to_remove=[], to_keep=[]) + transaction_configuration = TransactionConfiguration(to_install=[], to_remove=[], to_keep=[], to_reinstall=[]) _Pkg = partial(Package, repository=None, modulestream=None) @@ -94,6 +94,7 @@ def get_transaction_configuration(): transaction_configuration.to_install.extend(_Pkg(name=pkg_name) for pkg_name in tasks.to_install) transaction_configuration.to_remove.extend(_Pkg(name=pkg_name) for pkg_name in tasks.to_remove) transaction_configuration.to_keep.extend(_Pkg(name=pkg_name) for pkg_name in tasks.to_keep) + transaction_configuration.to_reinstall.extend(_Pkg(name=pkg_name) for pkg_name in tasks.to_reinstall) return transaction_configuration @@ -518,7 +519,6 @@ def process(): events = remove_undesired_events(events, releases) # Apply events - compute what packages should the target system have - # TODO: bring back the reinstallation of packages target_pkgs, pkgs_to_demodularize, pkgs_to_reinstall = compute_packages_on_target_system(source_pkgs, events, releases) # Packages coming out of the events have PESID as their repository, however, we need real repoid From 9885a8f315b75a808da41f00a4dafc554a885aee Mon Sep 17 00:00:00 2001 From: Oleksandr Date: Tue, 18 Jun 2024 14:44:07 +0300 Subject: [PATCH 12/14] Fix type errors in in code --- .../actors/peseventsscanner/libraries/pes_events_scanner.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py index 3abbf49581..5c8ada44d9 100644 --- a/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py +++ b/repos/system_upgrade/common/actors/peseventsscanner/libraries/pes_events_scanner.py @@ -242,8 +242,7 @@ def compute_packages_on_target_system(source_pkgs, events, releases): did_processing_cross_major_version = True pkgs_to_demodularize = {pkg for pkg in target_pkgs if pkg.modulestream} - target_pkgs, pkgs_to_demodularize, pkgs_to_reinstall = compute_pkg_changes_between_consequent_releases - ( + target_pkgs, pkgs_to_demodularize, pkgs_to_reinstall = compute_pkg_changes_between_consequent_releases( target_pkgs, events, release, seen_pkgs, pkgs_to_demodularize @@ -535,5 +534,5 @@ def process(): # Compare the packages on source system and the computed packages on target system and determine what to install rpm_tasks = compute_rpm_tasks_from_pkg_set_diff(source_pkgs, target_pkgs, pkgs_to_demodularize) if rpm_tasks: - rpm_tasks.to_reinstall = pkgs_to_reinstall + rpm_tasks.to_reinstall = sorted(pkgs_to_reinstall) api.produce(rpm_tasks) From e5c90a4b98f53fe45e398673f1b5ecf0c052337d Mon Sep 17 00:00:00 2001 From: Oleksandr Shyshatskyi Date: Wed, 19 Jun 2024 11:33:02 +0300 Subject: [PATCH 13/14] Add additional trusted gpg certificates directory --- .../libraries/missinggpgkey.py | 41 +++++++++++-------- 1 file changed, 23 insertions(+), 18 deletions(-) diff --git a/repos/system_upgrade/common/actors/missinggpgkeysinhibitor/libraries/missinggpgkey.py b/repos/system_upgrade/common/actors/missinggpgkeysinhibitor/libraries/missinggpgkey.py index 1880986dcf..d6f64eea44 100644 --- a/repos/system_upgrade/common/actors/missinggpgkeysinhibitor/libraries/missinggpgkey.py +++ b/repos/system_upgrade/common/actors/missinggpgkeysinhibitor/libraries/missinggpgkey.py @@ -112,7 +112,10 @@ def _get_path_to_gpg_certs(): # only beta is special in regards to the GPG signing keys if target_product_type == 'beta': certs_dir = '{}beta'.format(target_major_version) - return os.path.join(api.get_common_folder_path(GPG_CERTS_FOLDER), certs_dir) + return [ + "/etc/leapp/files/vendors.d/rpm-gpg/", + os.path.join(api.get_common_folder_path(GPG_CERTS_FOLDER), certs_dir) + ] def _expand_vars(path): @@ -169,14 +172,15 @@ def _get_pubkeys(installed_rpms): """ pubkeys = _pubkeys_from_rpms(installed_rpms) certs_path = _get_path_to_gpg_certs() - for certname in os.listdir(certs_path): - key_file = os.path.join(certs_path, certname) - fps = _read_gpg_fp_from_file(key_file) - if fps: - pubkeys += fps - # TODO: what about else: ? - # The warning is now logged in _read_gpg_fp_from_file. We can raise - # the priority of the message or convert it to report though. + for trusted_dir in certs_path: + for certname in os.listdir(trusted_dir): + key_file = os.path.join(trusted_dir, certname) + fps = _read_gpg_fp_from_file(key_file) + if fps: + pubkeys += fps + # TODO: what about else: ? + # The warning is now logged in _read_gpg_fp_from_file. We can raise + # the priority of the message or convert it to report though. return pubkeys @@ -270,11 +274,11 @@ def _report(title, summary, keys, inhibitor=False): ) hint = ( 'Check the path to the listed GPG keys is correct, the keys are valid and' - ' import them into the host RPM DB or store them inside the {} directory' + ' import them into the host RPM DB or store them inside on of the {} directories' ' prior the upgrade.' ' If you want to proceed the in-place upgrade without checking any RPM' ' signatures, execute leapp with the `--nogpgcheck` option.' - .format(_get_path_to_gpg_certs()) + .format(','.format(_get_path_to_gpg_certs())) ) groups = [reporting.Groups.REPOSITORY] if inhibitor: @@ -305,8 +309,8 @@ def _report(title, summary, keys, inhibitor=False): def _report_missing_keys(keys): summary = ( 'Some of the target repositories require GPG keys that are not installed' - ' in the current RPM DB or are not stored in the {trust_dir} directory.' - .format(trust_dir=_get_path_to_gpg_certs()) + ' in the current RPM DB or are not stored in the {trust_dir} directories.' + .format(trust_dir=','.join(_get_path_to_gpg_certs())) ) _report('Detected unknown GPG keys for target system repositories', summary, keys, True) @@ -380,11 +384,12 @@ def _report_repos_missing_keys(repos): def register_dnfworkaround(): - api.produce(DNFWorkaround( - display_name='import trusted gpg keys to RPM DB', - script_path=api.current_actor().get_common_tool_path('importrpmgpgkeys'), - script_args=[_get_path_to_gpg_certs()], - )) + for trust_certs_dir in _get_path_to_gpg_certs(): + api.produce(DNFWorkaround( + display_name='import trusted gpg keys to RPM DB', + script_path=api.current_actor().get_common_tool_path('importrpmgpgkeys'), + script_args=[trust_certs_dir], + )) @suppress_deprecation(TMPTargetRepositoriesFacts) From 5e54252c8e0cee410eeb0e4c57f20917a5b54109 Mon Sep 17 00:00:00 2001 From: Monstrofil Date: Wed, 19 Jun 2024 17:01:55 +0300 Subject: [PATCH 14/14] Added missing signatures --- .../common/actors/redhatsignedrpmscanner/actor.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/repos/system_upgrade/common/actors/redhatsignedrpmscanner/actor.py b/repos/system_upgrade/common/actors/redhatsignedrpmscanner/actor.py index dbb3e66668..36daf322bc 100644 --- a/repos/system_upgrade/common/actors/redhatsignedrpmscanner/actor.py +++ b/repos/system_upgrade/common/actors/redhatsignedrpmscanner/actor.py @@ -15,8 +15,10 @@ '4eb84e71f2ee9d55', 'a963bbdbf533f4fa', '6c7cb6ef305d49d6'], + 'cloudlinux': ['8c55a6628608cb71'], 'almalinux': ['51d6647ec21ad6ea', - 'd36cb86cb86b3716'], + 'd36cb86cb86b3716', + '2ae81e8aced7258b'], 'rocky': ['15af5dac6d745a60', '702d426d350d275d'], 'ol': ['72f97b74ec551f03',