Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

AlmaLinux vendors.d functionaloty rebased on top of v0.19 #114

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@ ENV/

# visual studio code configuration
.vscode
*.code-workspace

# pycharm
.idea
3 changes: 3 additions & 0 deletions etc/leapp/transaction/to_reinstall
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
### List of packages (each on new line) to be reinstalled to the upgrade transaction
### Useful for packages that have identical version strings but contain binary changes between major OS versions
### Packages that aren't installed will be skipped
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
from leapp.actors import Actor
from leapp.libraries.stdlib import api
from leapp.models import (
RepositoriesFacts,
VendorSourceRepos,
ActiveVendorList,
)
from leapp.tags import FactsPhaseTag, IPUWorkflowTag


class CheckEnabledVendorRepos(Actor):
"""
Create a list of vendors whose repositories are present on the system and enabled.
Only those vendors' configurations (new repositories, PES actions, etc.)
will be included in the upgrade process.
"""

name = "check_enabled_vendor_repos"
consumes = (RepositoriesFacts, VendorSourceRepos)
produces = (ActiveVendorList)
tags = (IPUWorkflowTag, FactsPhaseTag.Before)

def process(self):
vendor_mapping_data = {}
active_vendors = set()

# Make a dict for easy mapping of repoid -> corresponding vendor name.
for vendor_src_repodata in api.consume(VendorSourceRepos):
for vendor_src_repo in vendor_src_repodata.source_repoids:
vendor_mapping_data[vendor_src_repo] = vendor_src_repodata.vendor

# Is the repo listed in the vendor map as from_repoid present on the system?
for repos_facts in api.consume(RepositoriesFacts):
for repo_file in repos_facts.repositories:
for repo_data in repo_file.data:
self.log.debug(
"Looking for repository {} in vendor maps".format(repo_data.repoid)
)
if repo_data.enabled and repo_data.repoid in vendor_mapping_data:
# If the vendor's repository is present in the system and enabled, count the vendor as active.
new_vendor = vendor_mapping_data[repo_data.repoid]
self.log.debug(
"Repository {} found and enabled, enabling vendor {}".format(
repo_data.repoid, new_vendor
)
)
active_vendors.add(new_vendor)

if active_vendors:
self.log.debug("Active vendor list: {}".format(active_vendors))
api.produce(ActiveVendorList(data=list(active_vendors)))
else:
self.log.info("No active vendors found, vendor list not generated")
Original file line number Diff line number Diff line change
Expand Up @@ -32,26 +32,29 @@ def process(self):
to_remove = set()
to_keep = set()
to_upgrade = set()
to_reinstall = set()
modules_to_enable = {}
modules_to_reset = {}
for event in self.consume(RpmTransactionTasks, PESRpmTransactionTasks):
local_rpms.update(event.local_rpms)
to_install.update(event.to_install)
to_remove.update(installed_pkgs.intersection(event.to_remove))
to_keep.update(installed_pkgs.intersection(event.to_keep))
to_reinstall.update(installed_pkgs.intersection(event.to_reinstall))
modules_to_enable.update({'{}:{}'.format(m.name, m.stream): m for m in event.modules_to_enable})
modules_to_reset.update({'{}:{}'.format(m.name, m.stream): m for m in event.modules_to_reset})

to_remove.difference_update(to_keep)

# run upgrade for the rest of RH signed pkgs which we do not have rule for
to_upgrade = installed_pkgs - (to_install | to_remove)
to_upgrade = installed_pkgs - (to_install | to_remove | to_reinstall)

self.produce(FilteredRpmTransactionTasks(
local_rpms=list(local_rpms),
to_install=list(to_install),
to_remove=list(to_remove),
to_keep=list(to_keep),
to_upgrade=list(to_upgrade),
to_reinstall=list(to_reinstall),
modules_to_reset=list(modules_to_reset.values()),
modules_to_enable=list(modules_to_enable.values())))
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,10 @@ def _get_path_to_gpg_certs():
# only beta is special in regards to the GPG signing keys
if target_product_type == 'beta':
certs_dir = '{}beta'.format(target_major_version)
return os.path.join(api.get_common_folder_path(GPG_CERTS_FOLDER), certs_dir)
return [
"/etc/leapp/files/vendors.d/rpm-gpg/",
os.path.join(api.get_common_folder_path(GPG_CERTS_FOLDER), certs_dir)
]


def _expand_vars(path):
Expand Down Expand Up @@ -169,14 +172,15 @@ def _get_pubkeys(installed_rpms):
"""
pubkeys = _pubkeys_from_rpms(installed_rpms)
certs_path = _get_path_to_gpg_certs()
for certname in os.listdir(certs_path):
key_file = os.path.join(certs_path, certname)
fps = _read_gpg_fp_from_file(key_file)
if fps:
pubkeys += fps
# TODO: what about else: ?
# The warning is now logged in _read_gpg_fp_from_file. We can raise
# the priority of the message or convert it to report though.
for trusted_dir in certs_path:
for certname in os.listdir(trusted_dir):
key_file = os.path.join(trusted_dir, certname)
fps = _read_gpg_fp_from_file(key_file)
if fps:
pubkeys += fps
# TODO: what about else: ?
# The warning is now logged in _read_gpg_fp_from_file. We can raise
# the priority of the message or convert it to report though.
return pubkeys


Expand Down Expand Up @@ -270,11 +274,11 @@ def _report(title, summary, keys, inhibitor=False):
)
hint = (
'Check the path to the listed GPG keys is correct, the keys are valid and'
' import them into the host RPM DB or store them inside the {} directory'
' import them into the host RPM DB or store them inside on of the {} directories'
' prior the upgrade.'
' If you want to proceed the in-place upgrade without checking any RPM'
' signatures, execute leapp with the `--nogpgcheck` option.'
.format(_get_path_to_gpg_certs())
.format(','.format(_get_path_to_gpg_certs()))
)
groups = [reporting.Groups.REPOSITORY]
if inhibitor:
Expand Down Expand Up @@ -305,8 +309,8 @@ def _report(title, summary, keys, inhibitor=False):
def _report_missing_keys(keys):
summary = (
'Some of the target repositories require GPG keys that are not installed'
' in the current RPM DB or are not stored in the {trust_dir} directory.'
.format(trust_dir=_get_path_to_gpg_certs())
' in the current RPM DB or are not stored in the {trust_dir} directories.'
.format(trust_dir=','.join(_get_path_to_gpg_certs()))
)
_report('Detected unknown GPG keys for target system repositories', summary, keys, True)

Expand Down Expand Up @@ -380,11 +384,12 @@ def _report_repos_missing_keys(repos):


def register_dnfworkaround():
api.produce(DNFWorkaround(
display_name='import trusted gpg keys to RPM DB',
script_path=api.current_actor().get_common_tool_path('importrpmgpgkeys'),
script_args=[_get_path_to_gpg_certs()],
))
for trust_certs_dir in _get_path_to_gpg_certs():
api.produce(DNFWorkaround(
display_name='import trusted gpg keys to RPM DB',
script_path=api.current_actor().get_common_tool_path('importrpmgpgkeys'),
script_args=[trust_certs_dir],
))


@suppress_deprecation(TMPTargetRepositoriesFacts)
Expand Down
4 changes: 3 additions & 1 deletion repos/system_upgrade/common/actors/peseventsscanner/actor.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
RepositoriesMapping,
RepositoriesSetupTasks,
RHUIInfo,
RpmTransactionTasks
RpmTransactionTasks,
ActiveVendorList,
)
from leapp.reporting import Report
from leapp.tags import FactsPhaseTag, IPUWorkflowTag
Expand All @@ -33,6 +34,7 @@ class PesEventsScanner(Actor):
RepositoriesMapping,
RHUIInfo,
RpmTransactionTasks,
ActiveVendorList,
)
produces = (ConsumedDataAsset, PESRpmTransactionTasks, RepositoriesSetupTasks, Report)
tags = (IPUWorkflowTag, FactsPhaseTag)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ class Action(IntEnum):
MERGED = 5
MOVED = 6
RENAMED = 7
REINSTALLED = 8


def get_pes_events(pes_json_directory, pes_json_filename):
Expand All @@ -69,13 +70,14 @@ def get_pes_events(pes_json_directory, pes_json_filename):
try:
events_data = fetch.load_data_asset(api.current_actor(),
pes_json_filename,
asset_directory=pes_json_directory,
asset_fulltext_name='PES events file',
docs_url='',
docs_title='')
if not events_data:
return None

if not events_data.get('packageinfo'):
if events_data.get('packageinfo') is None:
raise ValueError('Found PES data with invalid structure')

all_events = list(chain(*[parse_entry(entry) for entry in events_data['packageinfo']]))
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
from collections import defaultdict, namedtuple
from functools import partial
import os

from leapp import reporting
from leapp.exceptions import StopActorExecutionError
from leapp.libraries.actor import peseventsscanner_repomap
from leapp.libraries.actor.pes_event_parsing import Action, get_pes_events, Package
from leapp.libraries.common.config import version
from leapp.libraries.common.repomaputils import combine_repomap_messages
from leapp.libraries.stdlib import api
from leapp.libraries.stdlib.config import is_verbose
from leapp.models import (
Expand All @@ -19,7 +21,8 @@
RepositoriesMapping,
RepositoriesSetupTasks,
RHUIInfo,
RpmTransactionTasks
RpmTransactionTasks,
ActiveVendorList,
)

SKIPPED_PKGS_MSG = (
Expand All @@ -30,8 +33,9 @@
'for details.\nThe list of these packages:'
)

VENDORS_DIR = "/etc/leapp/files/vendors.d"

TransactionConfiguration = namedtuple('TransactionConfiguration', ('to_install', 'to_remove', 'to_keep'))
TransactionConfiguration = namedtuple('TransactionConfiguration', ('to_install', 'to_remove', 'to_keep', 'to_reinstall'))


def get_cloud_provider_name(cloud_provider_variant):
Expand Down Expand Up @@ -82,14 +86,15 @@ def get_transaction_configuration():
These configuration files have higher priority than PES data.
:return: RpmTransactionTasks model instance
"""
transaction_configuration = TransactionConfiguration(to_install=[], to_remove=[], to_keep=[])
transaction_configuration = TransactionConfiguration(to_install=[], to_remove=[], to_keep=[], to_reinstall=[])

_Pkg = partial(Package, repository=None, modulestream=None)

for tasks in api.consume(RpmTransactionTasks):
transaction_configuration.to_install.extend(_Pkg(name=pkg_name) for pkg_name in tasks.to_install)
transaction_configuration.to_remove.extend(_Pkg(name=pkg_name) for pkg_name in tasks.to_remove)
transaction_configuration.to_keep.extend(_Pkg(name=pkg_name) for pkg_name in tasks.to_keep)
transaction_configuration.to_reinstall.extend(_Pkg(name=pkg_name) for pkg_name in tasks.to_reinstall)
return transaction_configuration


Expand Down Expand Up @@ -129,6 +134,7 @@ def compute_pkg_changes_between_consequent_releases(source_installed_pkgs,
logger = api.current_logger()
# Start with the installed packages and modify the set according to release events
target_pkgs = set(source_installed_pkgs)
pkgs_to_reinstall = set()

release_events = [e for e in events if e.to_release == release]

Expand Down Expand Up @@ -165,9 +171,12 @@ def compute_pkg_changes_between_consequent_releases(source_installed_pkgs,
target_pkgs = target_pkgs.difference(event.in_pkgs)
target_pkgs = target_pkgs.union(event.out_pkgs)

if (event.action == Action.REINSTALLED and is_any_in_pkg_present):
pkgs_to_reinstall = pkgs_to_reinstall.union(event.in_pkgs)

pkgs_to_demodularize = pkgs_to_demodularize.difference(event.in_pkgs)

return (target_pkgs, pkgs_to_demodularize)
return (target_pkgs, pkgs_to_demodularize, pkgs_to_reinstall)


def remove_undesired_events(events, relevant_to_releases):
Expand Down Expand Up @@ -233,15 +242,17 @@ def compute_packages_on_target_system(source_pkgs, events, releases):
did_processing_cross_major_version = True
pkgs_to_demodularize = {pkg for pkg in target_pkgs if pkg.modulestream}

target_pkgs, pkgs_to_demodularize = compute_pkg_changes_between_consequent_releases(target_pkgs, events,
release, seen_pkgs,
pkgs_to_demodularize)
target_pkgs, pkgs_to_demodularize, pkgs_to_reinstall = compute_pkg_changes_between_consequent_releases(
target_pkgs, events,
release, seen_pkgs,
pkgs_to_demodularize
)
seen_pkgs = seen_pkgs.union(target_pkgs)

demodularized_pkgs = {Package(pkg.name, pkg.repository, None) for pkg in pkgs_to_demodularize}
demodularized_target_pkgs = target_pkgs.difference(pkgs_to_demodularize).union(demodularized_pkgs)

return (demodularized_target_pkgs, pkgs_to_demodularize)
return (demodularized_target_pkgs, pkgs_to_demodularize, pkgs_to_reinstall)


def compute_rpm_tasks_from_pkg_set_diff(source_pkgs, target_pkgs, pkgs_to_demodularize):
Expand Down Expand Up @@ -345,15 +356,13 @@ def get_pesid_to_repoid_map(target_pesids):
:return: Dictionary mapping the target_pesids to their corresponding repoid
"""

repositories_map_msgs = api.consume(RepositoriesMapping)
repositories_map_msg = next(repositories_map_msgs, None)
if list(repositories_map_msgs):
api.current_logger().warning('Unexpectedly received more than one RepositoriesMapping message.')
if not repositories_map_msg:
repositories_map_msgs = list(api.consume(RepositoriesMapping))
if not repositories_map_msgs:
raise StopActorExecutionError(
'Cannot parse RepositoriesMapping data properly',
details={'Problem': 'Did not receive a message with mapped repositories'}
)
repositories_map_msg = combine_repomap_messages(repositories_map_msgs)

rhui_info = next(api.consume(RHUIInfo), RHUIInfo(provider=''))

Expand Down Expand Up @@ -485,6 +494,19 @@ def process():
if not events:
return

active_vendors = []
for vendor_list in api.consume(ActiveVendorList):
active_vendors.extend(vendor_list.data)

pes_json_suffix = "_pes.json"
if os.path.isdir(VENDORS_DIR):
vendor_pesfiles = list(filter(lambda vfile: pes_json_suffix in vfile, os.listdir(VENDORS_DIR)))

for pesfile in vendor_pesfiles:
if pesfile[:-len(pes_json_suffix)] in active_vendors:
vendor_events = get_pes_events(VENDORS_DIR, pesfile)
events.extend(vendor_events)

releases = get_relevant_releases(events)
source_pkgs = get_installed_pkgs()
source_pkgs = apply_transaction_configuration(source_pkgs)
Expand All @@ -496,7 +518,7 @@ def process():
events = remove_undesired_events(events, releases)

# Apply events - compute what packages should the target system have
target_pkgs, pkgs_to_demodularize = compute_packages_on_target_system(source_pkgs, events, releases)
target_pkgs, pkgs_to_demodularize, pkgs_to_reinstall = compute_packages_on_target_system(source_pkgs, events, releases)

# Packages coming out of the events have PESID as their repository, however, we need real repoid
target_pkgs = replace_pesids_with_repoids_in_packages(target_pkgs, repoids_of_source_pkgs)
Expand All @@ -512,4 +534,5 @@ def process():
# Compare the packages on source system and the computed packages on target system and determine what to install
rpm_tasks = compute_rpm_tasks_from_pkg_set_diff(source_pkgs, target_pkgs, pkgs_to_demodularize)
if rpm_tasks:
rpm_tasks.to_reinstall = sorted(pkgs_to_reinstall)
api.produce(rpm_tasks)
Loading