diff --git a/doozerlib/assembly.py b/doozerlib/assembly.py index 2551838fd..596977fdf 100644 --- a/doozerlib/assembly.py +++ b/doozerlib/assembly.py @@ -3,7 +3,7 @@ from enum import Enum -from doozerlib.model import Missing, Model +from doozerlib.model import Missing, Model, ListModel class AssemblyTypes(Enum): @@ -13,6 +13,37 @@ class AssemblyTypes(Enum): CUSTOM = "custom" # No constraints enforced +class AssemblyIssueCode(Enum): + IMPERMISSIBLE = 0 + CONFLICTING_INHERITED_DEPENDENCY = 1 + CONFLICTING_GROUP_RPM_INSTALLED = 2 + MISMATCHED_SIBLINGS = 3 + OUTDATED_RPMS_IN_STREAM_BUILD = 4 + INCONSISTENT_RHCOS_RPMS = 5 + + +class AssemblyIssue: + """ + An encapsulation of an issue with an assembly. Some issues are critical for any + assembly and some are on relevant for assemblies we intend for GA. + """ + def __init__(self, msg: str, component: str, code: AssemblyIssueCode = AssemblyIssueCode.IMPERMISSIBLE): + """ + :param msg: A human readable message describing the issue. + :param component: The name of the entity associated with the issue, if any. + :param code: A code that that classifies the issue. Assembly definitions can optionally permit some of these codes. + """ + self.msg: str = msg + self.component: str = component or 'general' + self.code = code + + def __str__(self): + return self.msg + + def __repr__(self): + return self.msg + + def merger(a, b): """ Merges two, potentially deep, objects into a new one and returns the result. @@ -153,23 +184,37 @@ def assembly_metadata_config(releases_config: Model, assembly: str, meta_type: s return Model(dict_to_model=config_dict) -def assembly_rhcos_config(releases_config: Model, assembly: str) -> Model: +def _assembly_config_struct(releases_config: Model, assembly: str, key: str, default): """ - :param releases_config: The content of releases.yml in Model form. - :param assembly: The name of the assembly to assess - Returns the a computed rhcos config model for a given assembly. + If a key is directly under the 'assembly' (e.g. rhcos), then this method will + recurse the inheritance tree to build you a final version of that key's value. + The key may refer to a list or dict (set default value appropriately). """ if not assembly or not isinstance(releases_config, Model): return Missing _check_recursion(releases_config, assembly) target_assembly = releases_config.releases[assembly].assembly - rhcos_config_dict = target_assembly.get("rhcos", {}) + key_struct = target_assembly.get(key, default) if target_assembly.basis.assembly: # Does this assembly inherit from another? # Recursive apply ancestor assemblies - basis_rhcos_config = assembly_rhcos_config(releases_config, target_assembly.basis.assembly) - rhcos_config_dict = merger(rhcos_config_dict, basis_rhcos_config.primitive()) - return Model(dict_to_model=rhcos_config_dict) + parent_config_struct = _assembly_config_struct(releases_config, target_assembly.basis.assembly, key, default) + key_struct = merger(key_struct, parent_config_struct.primitive()) + if isinstance(default, dict): + return Model(dict_to_model=key_struct) + elif isinstance(default, list): + return ListModel(list_to_model=key_struct) + else: + raise ValueError(f'Unknown how to derive for default type: {type(default)}') + + +def assembly_rhcos_config(releases_config: Model, assembly: str) -> Model: + """ + :param releases_config: The content of releases.yml in Model form. + :param assembly: The name of the assembly to assess + Returns the a computed rhcos config model for a given assembly. + """ + return _assembly_config_struct(releases_config, assembly, 'rhcos', {}) def assembly_basis_event(releases_config: Model, assembly: str) -> typing.Optional[int]: @@ -190,21 +235,33 @@ def assembly_basis_event(releases_config: Model, assembly: str) -> typing.Option return assembly_basis_event(releases_config, target_assembly.basis.assembly) -def assembly_basis(releases_config: Model, assembly: str) -> typing.Optional[Model]: +def assembly_basis(releases_config: Model, assembly: str) -> Model: """ :param releases_config: The content of releases.yml in Model form. :param assembly: The name of the assembly to assess Returns the basis dict for a given assembly. If the assembly has no basis, - None is returned. + Model({}) is returned. """ - if not assembly or not isinstance(releases_config, Model): - return None + return _assembly_config_struct(releases_config, assembly, 'basis', {}) - _check_recursion(releases_config, assembly) - target_assembly = releases_config.releases[assembly].assembly - target_basis_config_dict = target_assembly.get("basis", {}) - if target_assembly.basis.assembly: # Does this assembly inherit from another? - # Recursive apply ancestor assemblies - basis_basis_config = assembly_basis(releases_config, target_assembly.basis.assembly) - target_basis_config_dict = merger(target_basis_config_dict, basis_basis_config.primitive()) - return Model(dict_to_model=target_basis_config_dict) + +def assembly_permits(releases_config: Model, assembly: str) -> ListModel: + """ + :param releases_config: The content of releases.yml in Model form. + :param assembly: The name of the assembly to assess + Returns the a computed permits config model for a given assembly. If no + permits are defined ListModel([]) is returned. + """ + defined_permits = _assembly_config_struct(releases_config, assembly, 'permits', []) + + # Do some basic validation here to fail fast + if assembly_type(releases_config, assembly) == AssemblyTypes.STANDARD: + if defined_permits: + raise ValueError(f'STANDARD assemblies like {assembly} do not allow "permits"') + + for permit in defined_permits: + if permit.code == AssemblyIssueCode.IMPERMISSIBLE.name: + raise ValueError(f'IMPERMISSIBLE cannot be permitted in any assembly (assembly: {assembly})') + if permit.code not in ['*', *[aic.name for aic in AssemblyIssueCode]]: + raise ValueError(f'Undefined permit code in assembly {assembly}: {permit.code}') + return defined_permits diff --git a/doozerlib/assembly_inspector.py b/doozerlib/assembly_inspector.py index 80f68d2db..3830e9e94 100644 --- a/doozerlib/assembly_inspector.py +++ b/doozerlib/assembly_inspector.py @@ -7,7 +7,7 @@ from doozerlib import util from doozerlib.image import BrewBuildImageInspector from doozerlib.rpmcfg import RPMMetadata -from doozerlib.assembly import assembly_rhcos_config, AssemblyTypes +from doozerlib.assembly import assembly_rhcos_config, AssemblyTypes, assembly_permits, AssemblyIssue, AssemblyIssueCode class AssemblyInspector: @@ -20,6 +20,7 @@ def __init__(self, runtime: Runtime, brew_session: ClientSession): self.assembly_rhcos_config = assembly_rhcos_config(self.runtime.releases_config, self.runtime.assembly) self._rpm_build_cache: Dict[int, Dict[str, Optional[Dict]]] = {} # Dict[rhel_ver] -> Dict[distgit_key] -> Optional[BuildDict] + self._permits = assembly_permits(self.runtime.releases_config, self.runtime.assembly) # If an image component has a latest build, an ImageInspector associated with the image. self._release_image_inspectors: Dict[str, Optional[BrewBuildImageInspector]] = dict() @@ -30,7 +31,22 @@ def __init__(self, runtime: Runtime, brew_session: ClientSession): else: self._release_image_inspectors[image_meta.distgit_key] = None - def check_rhcos_consistency(self, rhcos_build: rhcos.RHCOSBuildInspector) -> List[str]: + def does_permit(self, issue: AssemblyIssue) -> bool: + """ + Checks all permits for this assembly definition to see if a given issue + is actually permitted when it is time to construct a payload. + :return: Returns True if the issue is permitted to exist in the assembly payload. + """ + for permit in self._permits: + if issue.code == AssemblyIssueCode.IMPERMISSIBLE: + # permitting '*' still doesn't permit impermissible + return False + if permit.code == '*' or issue.code.name == permit.code: + if permit.component == '*' or issue.component == permit.component: + return True + return False + + def check_rhcos_issues(self, rhcos_build: rhcos.RHCOSBuildInspector) -> List[AssemblyIssue]: """ Analyzes an RHCOS build to check whether the installed packages are consistent with: 1. package NVRs defined at the group dependency level @@ -41,25 +57,42 @@ def check_rhcos_consistency(self, rhcos_build: rhcos.RHCOSBuildInspector) -> Lis """ self.runtime.logger.info(f'Checking RHCOS build for consistency: {str(rhcos_build)}...') - issues: List[str] = [] - desired_packages: Dict[str, str] = dict() # Dict[package_name] -> nvr - assembly_overrides = [] - assembly_overrides.extend(self.runtime.get_group_config().dependencies or []) - assembly_overrides.extend(self.assembly_rhcos_config.dependencies or []) - for package_entry in assembly_overrides: - if 'el8' in package_entry: # TODO: how to make group aware of RHCOS RHEL base? - nvr = package_entry['el8'] + issues: List[AssemblyIssue] = [] + required_packages: Dict[str, str] = dict() # Dict[package_name] -> nvr # Dependency specified in 'rhcos' in assembly definition + desired_packages: Dict[str, str] = dict() # Dict[package_name] -> nvr # Dependency specified at group level + el_tag = f'el{rhcos_build.get_rhel_base_version()}' + for package_entry in (self.runtime.get_group_config().dependencies or []): + if el_tag in package_entry: + nvr = package_entry[el_tag] package_name = parse_nvr(nvr)['name'] desired_packages[package_name] = nvr + for package_entry in (self.assembly_rhcos_config.dependencies or []): + if el_tag in package_entry: + nvr = package_entry[el_tag] + package_name = parse_nvr(nvr)['name'] + required_packages[package_name] = nvr + desired_packages[package_name] = nvr # Override if something else was at the group level + installed_packages = rhcos_build.get_package_build_objects() - for package_name, build_dict in installed_packages.items(): - if package_name in assembly_overrides: - required_nvr = assembly_overrides[package_name] - installed_nvr = build_dict['nvr'] - if installed_nvr != required_nvr: - issues.append(f'Expected {required_nvr} in RHCOS build but found {installed_nvr}') + for package_name, desired_nvr in desired_packages.items(): + + if package_name in required_packages and package_name not in installed_packages: + # If the dependency is specified in the 'rhcos' section of the assembly, we must find it or raise an issue. + # This is impermissible because it can simply be fixed in the assembly definition. + issues.append(AssemblyIssue(f'Expected assembly defined rhcos dependency {desired_nvr} to be installed in {rhcos_build.build_id} but that package was not installed', component='rhcos')) + if package_name in installed_packages: + installed_build_dict = installed_packages[package_name] + installed_nvr = installed_build_dict['nvr'] + if installed_nvr != desired_nvr: + # We could consider permitting this in AssemblyTypes.CUSTOM, but it means that the RHCOS build + # could not be effectively reproduced by the rebuild job. + issues.append(AssemblyIssue(f'Expected {desired_nvr} to be installed in RHCOS build {rhcos_build.build_id} but found {installed_nvr}', component='rhcos', code=AssemblyIssueCode.CONFLICTING_INHERITED_DEPENDENCY)) + + """ + If the rhcos build has RPMs from this group installed, make sure they match the NVRs associated with this assembly. + """ for dgk, assembly_rpm_build in self.get_group_rpm_build_dicts(el_ver=rhcos_build.get_rhel_base_version()).items(): if not assembly_rpm_build: continue @@ -68,11 +101,13 @@ def check_rhcos_consistency(self, rhcos_build: rhcos.RHCOSBuildInspector) -> Lis if package_name in installed_packages: installed_nvr = installed_packages[package_name]['nvr'] if assembly_nvr != installed_nvr: - issues.append(f'Expected {rhcos_build.build_id} image to contain assembly selected RPM build {assembly_nvr} but found {installed_nvr} installed') + # We could consider permitting this in AssemblyTypes.CUSTOM, but it means that the RHCOS build + # could not be effectively reproduced by the rebuild job. + issues.append(AssemblyIssue(f'Expected {rhcos_build.build_id}/{rhcos_build.brew_arch} image to contain assembly selected RPM build {assembly_nvr} but found {installed_nvr} installed', component='rhcos', code=AssemblyIssueCode.CONFLICTING_GROUP_RPM_INSTALLED)) return issues - def check_group_rpm_package_consistency(self, rpm_meta: RPMMetadata) -> List[str]: + def check_group_rpm_package_consistency(self, rpm_meta: RPMMetadata) -> List[AssemblyIssue]: """ Evaluate the current assembly builds of RPMs in the group and check whether they are consistent with the assembly definition. @@ -80,14 +115,15 @@ def check_group_rpm_package_consistency(self, rpm_meta: RPMMetadata) -> List[str :return: Returns a (potentially empty) list of reasons the rpm should be rebuilt. """ self.runtime.logger.info(f'Checking group RPM for consistency: {rpm_meta.distgit_key}...') - issues: List[str] = [] + issues: List[AssemblyIssue] = [] for rpm_meta in self.runtime.rpm_metas(): dgk = rpm_meta.distgit_key for el_ver in rpm_meta.determine_rhel_targets(): brew_build_dict = self.get_group_rpm_build_dicts(el_ver=el_ver)[dgk] if not brew_build_dict: - issues.append(f'Did not find rhel-{el_ver} build for {dgk}') + # Impermissible. The RPM should be built for each target. + issues.append(AssemblyIssue(f'Did not find rhel-{el_ver} build for {dgk}', component=dgk)) continue """ @@ -114,7 +150,8 @@ def check_group_rpm_package_consistency(self, rpm_meta: RPMMetadata) -> List[str # the desired commit. build_nvr = brew_build_dict['nvr'] if target_branch[:7] not in build_nvr: - issues.append(f'{dgk} build for rhel-{el_ver} did not find git commit {target_branch[:7]} in package RPM NVR {build_nvr}') + # Impermissible because the assembly definition can simply be changed. + issues.append(AssemblyIssue(f'{dgk} build for rhel-{el_ver} did not find git commit {target_branch[:7]} in package RPM NVR {build_nvr}', component=dgk)) except ValueError: # The meta's target branch a normal branch name # and not a git commit. When this is the case, @@ -124,7 +161,7 @@ def check_group_rpm_package_consistency(self, rpm_meta: RPMMetadata) -> List[str return issues - def check_group_image_consistency(self, build_inspector: BrewBuildImageInspector) -> List[str]: + def check_group_image_consistency(self, build_inspector: BrewBuildImageInspector) -> List[AssemblyIssue]: """ Evaluate the current assembly build and an image in the group and check whether they are consistent with :param build_inspector: The brew build to check @@ -132,9 +169,10 @@ def check_group_image_consistency(self, build_inspector: BrewBuildImageInspector """ image_meta = build_inspector.get_image_meta() self.runtime.logger.info(f'Checking group image for consistency: {image_meta.distgit_key}...') - issues: List[str] = [] + issues: List[AssemblyIssue] = [] installed_packages = build_inspector.get_all_installed_package_build_dicts() + dgk = build_inspector.get_image_meta().distgit_key """ If the assembly defined any RPM package dependencies at the group or image @@ -144,14 +182,20 @@ def check_group_image_consistency(self, build_inspector: BrewBuildImageInspector RPMs. Both assemblies and this method deal with the package level - not individual RPMs. """ - package_overrides: Dict[str, str] = image_meta.get_assembly_rpm_package_dependencies(el_ver=image_meta.branch_el_target()) - if package_overrides: - for package_name, required_nvr in package_overrides.items(): + member_package_overrides, all_package_overrides = image_meta.get_assembly_rpm_package_dependencies(el_ver=image_meta.branch_el_target()) + if member_package_overrides or all_package_overrides: + for package_name, required_nvr in all_package_overrides.items(): + if package_name in member_package_overrides and package_name not in installed_packages: + # A dependency was defined explicitly in an assembly member, but it is not installed. + # i.e. the artists expected something to be installed, but it wasn't. Raise an issue. + # Tis is impermissible because the assembly definition can easily be changed to remove the explicit dep. + issues.append(AssemblyIssue(f'Expected image to contain assembly member override dependencies NVR {required_nvr} but it was not installed', component=dgk)) + if package_name in installed_packages: installed_build_dict: Dict = installed_packages[package_name] installed_nvr = installed_build_dict['nvr'] if required_nvr != installed_nvr: - issues.append(f'Expected image to contain assembly override dependencies NVR {required_nvr} but found {installed_nvr} installed') + issues.append(AssemblyIssue(f'Expected image to contain assembly override dependencies NVR {required_nvr} but found {installed_nvr} installed', component=dgk, code=AssemblyIssueCode.CONFLICTING_INHERITED_DEPENDENCY)) """ If an image contains an RPM from the doozer group, make sure it is the current @@ -168,7 +212,7 @@ def check_group_image_consistency(self, build_inspector: BrewBuildImageInspector if package_name in installed_packages: installed_nvr = installed_packages[package_name]['nvr'] if installed_nvr != assembly_nvr: - issues.append(f'Expected image to contain assembly RPM build {assembly_nvr} but found {installed_nvr} installed') + issues.append(AssemblyIssue(f'Expected image to contain assembly RPM build {assembly_nvr} but found {installed_nvr} installed', component=dgk, code=AssemblyIssueCode.CONFLICTING_GROUP_RPM_INSTALLED)) """ Assess whether the image build has the upstream @@ -181,7 +225,8 @@ def check_group_image_consistency(self, build_inspector: BrewBuildImageInspector content_git_url, _ = self.runtime.get_public_upstream(util.convert_remote_git_to_https(content_git_url)) build_git_url = util.convert_remote_git_to_https(build_inspector.get_source_git_url()) if content_git_url != build_git_url: - issues.append(f'Expected image build git source from metadata {content_git_url} but found {build_git_url} as the source of the build') + # Impermissible as artist can just fix upstream git source in assembly definition + issues.append(AssemblyIssue(f'Expected image git source from metadata {content_git_url} but found {build_git_url} as the upstream source of the brew build', component=dgk)) try: target_branch = image_meta.config.content.source.git.branch.target @@ -192,7 +237,8 @@ def check_group_image_consistency(self, build_inspector: BrewBuildImageInspector # it perfectly matches what we find in the assembly build. build_commit = build_inspector.get_source_git_commit() if target_branch != build_commit: - issues.append(f'Expected image build git commit {target_branch} but {build_commit} was found in the build') + # Impermissible as artist can just fix the assembly definition. + issues.append(AssemblyIssue(f'Expected image build git commit {target_branch} but {build_commit} was found in the build', component=dgk)) except ValueError: # The meta's target branch a normal branch name # and not a git commit. When this is the case, diff --git a/doozerlib/brew.py b/doozerlib/brew.py index 1eaea2580..8b24c2a0f 100644 --- a/doozerlib/brew.py +++ b/doozerlib/brew.py @@ -273,7 +273,6 @@ def list_image_rpms(image_ids: List[int], session: koji.ClientSession) -> List[O :param session: instance of Brew session :return: a list of Koji/Brew RPM lists """ - tasks = [] with session.multicall(strict=True) as m: tasks = [m.listRPMs(imageID=image_id) for image_id in image_ids] return [task.result for task in tasks] @@ -285,7 +284,6 @@ def list_build_rpms(build_ids: List[int], session: koji.ClientSession) -> List[O :param session: instance of Brew session :return: a list of Koji/Brew RPM lists """ - tasks = [] with session.multicall(strict=True) as m: tasks = [m.listBuildRPMs(build) for build in build_ids] return [task.result for task in tasks] @@ -537,6 +535,7 @@ def __init__(self, koji_session_args, brew_event=None, force_instance_caching=Fa self.___brew_event = None if not brew_event else int(brew_event) super(KojiWrapper, self).__init__(*koji_session_args) self.force_instance_caching = force_instance_caching + self._gss_logged_in: bool = False # Tracks whether this instance has authenticated self.___before_timestamp = None if brew_event: self.___before_timestamp = self.getEvent(self.___brew_event)['ts'] @@ -553,7 +552,6 @@ def get_cache_size(cls): @classmethod def get_next_call_id(cls): - global _koji_call_counter, _koji_wrapper_lock with cls._koji_wrapper_lock: cid = cls._koji_call_counter cls._koji_call_counter = cls._koji_call_counter + 1 @@ -759,8 +757,9 @@ def package_result(result, cache_hit: bool): def gssapi_login(self, principal=None, keytab=None, ccache=None, proxyuser=None): # Prevent redundant logins for shared sessions. - if super().logged_in: + if self._gss_logged_in: if logger: - logger.warning(f'Attempted to login to already logged in KojiWrapper instance') + logger.warning('Attempted to login to already logged in KojiWrapper instance') return True - return super().gssapi_login(principal=principal, keytab=keytab, ccache=ccache, proxyuser=proxyuser) + self._gss_logged_in = super().gssapi_login(principal=principal, keytab=keytab, ccache=ccache, proxyuser=proxyuser) + return self._gss_logged_in diff --git a/doozerlib/build_status_detector.py b/doozerlib/build_status_detector.py index 32a2fd650..8351dab58 100644 --- a/doozerlib/build_status_detector.py +++ b/doozerlib/build_status_detector.py @@ -2,8 +2,6 @@ from multiprocessing import Lock from typing import Dict, List, Optional, Set, Iterable -from koji import ClientSession - from doozerlib import brew, util @@ -12,12 +10,13 @@ class BuildStatusDetector: A BuildStatusDetector can find builds with embargoed fixes """ - def __init__(self, session: ClientSession, logger: Optional[Logger] = None): + def __init__(self, runtime, logger: Optional[Logger] = None): """ creates a new BuildStatusDetector - :param session: a koji client session + :param runtime: The doozer runtime :param logger: a logger """ - self.koji_session = session + self.runtime = runtime + self.koji_session = runtime.build_retrying_koji_client() self.logger = logger self.shipping_statuses: Dict[int, bool] = {} # a dict for caching build shipping statues. key is build id, value is True if shipped. self.archive_lists: Dict[int, List[Dict]] = {} # a dict for caching archive lists. key is build id, value is a list of archives associated with that build. @@ -132,14 +131,31 @@ def find_unshipped_candidate_rpms(self, candidate_tag: str, event: Optional[int] :param candidate_tag: string tag name to search for candidate builds :param event: A brew event with which to limit the brew query on latest builds. - :return: a list of brew RPMs (the contents of the builds) from unshipped latest builds + :return: a list of brew RPMs records (not the package build dicts) from unshipped latest builds """ key = (candidate_tag, event) with self.cache_lock: if key not in self.unshipped_candidate_rpms_cache: - latest = self.koji_session.getLatestBuilds(candidate_tag, event=event, type="rpm") - shipped_ids = self.find_shipped_builds([b["id"] for b in latest]) - unshipped_build_ids = [build["id"] for build in latest if build["id"] not in shipped_ids] + latest_in_tag: List[Dict] = self.koji_session.getLatestBuilds(candidate_tag, event=event, type="rpm") + latest_by_package: Dict[str, Dict] = {b['package_name']: b for b in latest_in_tag} + + # Due to assemblies existing in their own conceptual build streams, + # We need to check any group members for their latest build in the + # current assembly. This may be different than what is in the tag. + for rpm_meta in self.runtime.rpm_metas(): + assembly_build_dict = rpm_meta.get_latest_build(default=None, el_target=candidate_tag) + if not assembly_build_dict: + # Likely this RPM has not been built for this RHEL version. + continue + package_name = assembly_build_dict['package_name'] + if package_name in latest_by_package: + # Override the package with whatever is latest for this assembly + latest_by_package[package_name] = assembly_build_dict + + latest_for_assembly = latest_by_package.values() # Actual latest builds with respect to assembly + + shipped_ids = self.find_shipped_builds([b["id"] for b in latest_for_assembly]) + unshipped_build_ids = [build["id"] for build in latest_for_assembly if build["id"] not in shipped_ids] rpms_lists = brew.list_build_rpms(unshipped_build_ids, self.koji_session) self.unshipped_candidate_rpms_cache[key] = [r for rpms in rpms_lists for r in rpms] diff --git a/doozerlib/cli/detect_embargo.py b/doozerlib/cli/detect_embargo.py index 4e8e4b6bf..e51d909dc 100644 --- a/doozerlib/cli/detect_embargo.py +++ b/doozerlib/cli/detect_embargo.py @@ -136,7 +136,7 @@ def detect_embargoes_in_nvrs(runtime: Runtime, nvrs: List[str]): if not b: raise DoozerFatalError(f"Unable to get {nvrs[i]} from Brew.") runtime.logger.info(f"Detecting embargoes for {len(nvrs)} builds...") - detector = bs_detector.BuildStatusDetector(brew_session, runtime.logger) + detector = bs_detector.BuildStatusDetector(runtime, runtime.logger) embargoed_build_ids = detector.find_embargoed_builds(builds, runtime.get_candidate_brew_tags()) embargoed_builds = [b for b in builds if b["id"] in embargoed_build_ids] return embargoed_builds @@ -165,7 +165,7 @@ def detect_embargoes_in_tags(runtime: Runtime, kind: str, included_tags: List[st # Builds may have duplicate entries if we query from multiple tags. Don't worry, BuildStatusDetector is smart. runtime.logger.info(f"Detecting embargoes for {len(included_builds)} builds...") - detector = bs_detector.BuildStatusDetector(brew_session, runtime.logger) + detector = bs_detector.BuildStatusDetector(runtime, runtime.logger) embargoed_build_ids = detector.find_embargoed_builds(included_builds, runtime.get_candidate_brew_tags()) embargoed_builds = [b for b in included_builds if b["id"] in embargoed_build_ids] return embargoed_builds diff --git a/doozerlib/cli/release_gen_payload.py b/doozerlib/cli/release_gen_payload.py index facc0a5b5..d3987ba79 100644 --- a/doozerlib/cli/release_gen_payload.py +++ b/doozerlib/cli/release_gen_payload.py @@ -1,4 +1,5 @@ import io +import sys import json from typing import List, Optional, Tuple, Dict, NamedTuple, Iterable, Set @@ -12,7 +13,7 @@ from doozerlib.assembly_inspector import AssemblyInspector from doozerlib.runtime import Runtime from doozerlib.util import red_print, go_suffix_for_arch, brew_arch_for_go_arch, isolate_nightly_name_components -from doozerlib.assembly import AssemblyTypes, assembly_basis +from doozerlib.assembly import AssemblyTypes, assembly_basis, AssemblyIssue, AssemblyIssueCode from doozerlib import exectools from doozerlib.model import Model @@ -51,10 +52,8 @@ def payload_imagestream_name_and_namespace(base_imagestream_name: str, base_name help="Quay REPOSITORY in ORGANIZATION to mirror into.\ndefault=ocp-v4.0-art-dev") @click.option("--exclude-arch", metavar='ARCH', required=False, multiple=True, help="Architecture (brew nomenclature) to exclude from payload generation") -@click.option('--permit-mismatched-siblings', default=False, is_flag=True, help='Ignore sibling images building from different commits') -@click.option('--permit-invalid-reference-releases', default=False, is_flag=True, help='Ignore if reference nightlies do not reflect current assembly state. Do not use outside of testing.') @pass_runtime -def release_gen_payload(runtime: Runtime, is_name: Optional[str], is_namespace: Optional[str], organization: Optional[str], repository: Optional[str], exclude_arch: Tuple[str, ...], permit_mismatched_siblings: bool, permit_invalid_reference_releases: bool): +def release_gen_payload(runtime: Runtime, is_name: Optional[str], is_namespace: Optional[str], organization: Optional[str], repository: Optional[str], exclude_arch: Tuple[str, ...]): """Generates two sets of input files for `oc` commands to mirror content and update image streams. Files are generated for each arch defined in ocp-build-data for a version, as well as a final file for @@ -112,6 +111,7 @@ def release_gen_payload(runtime: Runtime, is_name: Optional[str], is_namespace: and in more detail in state.yaml. The release-controller, per ART-2195, will read and propagate/expose this annotation in its display of the release image. """ + runtime.initialize(mode='both', clone_distgits=False, clone_source=False, prevent_cloning=True) logger = runtime.logger brew_session = runtime.build_retrying_koji_client() @@ -124,28 +124,22 @@ def release_gen_payload(runtime: Runtime, is_name: Optional[str], is_namespace: logger.info(f'Collecting latest information associated with the assembly: {runtime.assembly}') assembly_inspector = AssemblyInspector(runtime, brew_session) - logger.info(f'Checking for mismatched siblings...') + logger.info('Checking for mismatched siblings...') mismatched_siblings = PayloadGenerator.find_mismatched_siblings(assembly_inspector.get_group_release_images().values()) # A list of strings that denote inconsistencies across all payloads generated - assembly_wide_inconsistencies: List[str] = list() + assembly_issues: List[AssemblyIssue] = list() - if runtime.assembly != 'test' and mismatched_siblings and runtime.assembly_type in (AssemblyTypes.STREAM, AssemblyTypes.STANDARD): - msg = f'At least one set of image siblings were built from the same repo but different commits ({mismatched_siblings}). This is not permitted for {runtime.assembly_type} assemblies' - if permit_mismatched_siblings: - red_print(msg) - assembly_wide_inconsistencies.append(f"Mismatched siblings: {mismatched_siblings}") - else: - raise ValueError(msg) + for mismatched_bbii, sibling_bbi in mismatched_siblings: + mismatch_issue = AssemblyIssue(f'{mismatched_bbii.get_nvr()} was built from a different upstream source commit ({mismatched_bbii.get_source_git_commit()[:7]}) than one of its siblings {sibling_bbi.get_nvr()} from {sibling_bbi.get_source_git_commit()[:7]}', + component=mismatched_bbii.get_image_meta().distgit_key, + code=AssemblyIssueCode.MISMATCHED_SIBLINGS) + assembly_issues.append(mismatch_issue) report = dict() report['non_release_images'] = [image_meta.distgit_key for image_meta in runtime.get_non_release_image_metas()] report['release_images'] = [image_meta.distgit_key for image_meta in runtime.get_for_release_image_metas()] - report['mismatched_siblings'] = [build_image_inspector.get_nvr() for build_image_inspector in mismatched_siblings] report['missing_image_builds'] = [dgk for (dgk, ii) in assembly_inspector.get_group_release_images().items() if ii is None] # A list of metas where the assembly did not find a build - payload_entry_inconsistencies: Dict[str, List[str]] = dict() # Payload tag -> list of issue strings - report['payload_entry_inconsistencies'] = payload_entry_inconsistencies - report['assembly_issues'] = assembly_wide_inconsistencies # Any overall gripes with the payload if runtime.assembly_type is AssemblyTypes.STREAM: # Only nightlies have the concept of private and public payloads @@ -173,26 +167,24 @@ def release_gen_payload(runtime: Runtime, is_name: Optional[str], is_namespace: """ Make sure that RPMs belonging to this assembly/group are consistent with the assembly definition. """ - rpm_meta_inconsistencies: Dict[str, List[str]] = dict() for rpm_meta in runtime.rpm_metas(): issues = assembly_inspector.check_group_rpm_package_consistency(rpm_meta) - if issues: - rpm_meta_inconsistencies.get(rpm_meta.distgit_key, []).extend(issues) - - image_meta_inconsistencies: Dict[str, List[str]] = dict() + assembly_issues.extend(issues) """ If this is a stream assembly, images which are not using the latest builds should not reach - the release controller. + the release controller. Other assemblies are meant to be constructed from non-latest. """ if runtime.assembly == 'stream': for dgk, build_inspector in assembly_inspector.get_group_release_images().items(): if build_inspector: non_latest_rpm_nvrs = build_inspector.find_non_latest_rpms() - if non_latest_rpm_nvrs: - # This indicates an issue with scan-sources - dgk = build_inspector.get_image_meta().distgit_key - image_meta_inconsistencies.get(dgk, []).append(f'Found outdated RPMs installed in {build_inspector.get_nvr()}: {non_latest_rpm_nvrs}') + dgk = build_inspector.get_image_meta().distgit_key + for installed_nvr, newest_nvr in non_latest_rpm_nvrs: + # This indicates an issue with scan-sources or that an image is no longer successfully building. + # Impermissible as this speaks to a potentially deeper issue of images not being rebuilt + outdated_issue = AssemblyIssue(f'Found outdated RPM ({installed_nvr}) installed in {build_inspector.get_nvr()} when {newest_nvr} was available', component=dgk, code=AssemblyIssueCode.OUTDATED_RPMS_IN_STREAM_BUILD) + assembly_issues.append(outdated_issue) # Add to overall issues """ Make sure image build selected by this assembly/group are consistent with the assembly definition. @@ -200,17 +192,7 @@ def release_gen_payload(runtime: Runtime, is_name: Optional[str], is_namespace: for dgk, bbii in assembly_inspector.get_group_release_images().items(): if bbii: issues = assembly_inspector.check_group_image_consistency(bbii) - if issues: - image_meta_inconsistencies.get(dgk, []).extend(issues) - - if image_meta_inconsistencies or rpm_meta_inconsistencies: - rebuilds_required = { - 'images': image_meta_inconsistencies, - 'rpms': rpm_meta_inconsistencies, - } - red_print(f'Unable to proceed. Builds selected by this assembly/group and not compliant with the assembly definition: {assembly_inspector.get_assembly_name()}') - print(yaml.dump(rebuilds_required, default_flow_style=False, indent=2)) - exit(1) + assembly_issues.extend(issues) for arch in runtime.arches: if arch in exclude_arch: @@ -222,21 +204,20 @@ def release_gen_payload(runtime: Runtime, is_name: Optional[str], is_namespace: for tag, payload_entry in entries.items(): if payload_entry.image_meta: - # We already cached inconsistencies for each build; look them up if there are any. - payload_entry.inconsistencies.extend(image_meta_inconsistencies.get(payload_entry.image_meta.distgit_key, [])) + # We already stored inconsistencies for each image_meta; look them up if there are any. + payload_entry.issues.extend(filter(lambda ai: ai.component == payload_entry.image_meta.distgit_key, assembly_issues)) elif payload_entry.rhcos_build: - payload_entry.inconsistencies.extend(assembly_inspector.check_rhcos_consistency(payload_entry.rhcos_build)) + assembly_issues.extend(assembly_inspector.check_rhcos_issues(payload_entry.rhcos_build)) + payload_entry.issues.extend(filter(lambda ai: ai.component == 'rhcos', assembly_issues)) if runtime.assembly == 'stream': # For stream alone, we want to enforce that the very latest RPMs are installed. non_latest_rpm_nvrs = payload_entry.rhcos_build.find_non_latest_rpms() - if non_latest_rpm_nvrs: - # Raise an error, because this indicates an issue with config:scan-sources. - raise IOError(f'Found outdated RPMs installed in {payload_entry.rhcos_build}: {non_latest_rpm_nvrs}; this will likely self correct once the next RHCOS build takes place.') + for installed_nvr, newest_nvr in non_latest_rpm_nvrs: + assembly_issues.append(AssemblyIssue(f'Found outdated RPM ({installed_nvr}) installed in {payload_entry.rhcos_build} when {newest_nvr} is available', + component='rhcos', + code=AssemblyIssueCode.OUTDATED_RPMS_IN_STREAM_BUILD)) else: raise IOError(f'Unsupported PayloadEntry: {payload_entry}') - # Report any inconsistencies to in the final yaml output - if payload_entry.inconsistencies: - payload_entry_inconsistencies[tag] = payload_entry.inconsistencies # Save the default SRC=DEST input to a file for syncing by 'oc image mirror'. Why is # there no '-priv'? The true images for the assembly are what we are syncing - @@ -266,7 +247,7 @@ def release_gen_payload(runtime: Runtime, is_name: Optional[str], is_namespace: base_istream_namespace, arch, private_mode) - istream_spec = PayloadGenerator.build_payload_imagestream(imagestream_name, imagestream_namespace, istags, assembly_wide_inconsistencies) + istream_spec = PayloadGenerator.build_payload_imagestream(imagestream_name, imagestream_namespace, istags, assembly_issues) yaml.safe_dump(istream_spec, out_file, indent=2, default_flow_style=False) # Now make sure that all of the RHCOS builds contain consistent RPMs @@ -274,22 +255,34 @@ def release_gen_payload(runtime: Runtime, is_name: Optional[str], is_namespace: rhcos_builds = targeted_rhcos_builds[private_mode] rhcos_inconsistencies: Dict[str, List[str]] = PayloadGenerator.find_rhcos_build_rpm_inconsistencies(rhcos_builds) if rhcos_inconsistencies: - red_print(f'Found RHCOS inconsistencies in builds {targeted_rhcos_builds}: {rhcos_inconsistencies}') - raise IOError(f'Found RHCOS inconsistencies in builds') + assembly_issues.append(AssemblyIssue(f'Found RHCOS inconsistencies in builds {targeted_rhcos_builds}: {rhcos_inconsistencies}', component='rhcos', code=AssemblyIssueCode.INCONSISTENT_RHCOS_RPMS)) # If the assembly claims to have reference nightlies, assert that our payload # matches them exactly. nightly_match_issues = PayloadGenerator.check_nightlies_consistency(assembly_inspector) if nightly_match_issues: - msg = 'Nightlies in reference-releases did not match constructed payload:\n' + yaml.dump(nightly_match_issues) - if permit_invalid_reference_releases: - red_print(msg) - assembly_wide_inconsistencies.extend(nightly_match_issues) - else: - # Artist must remove nightly references or fix the assembly definition. - raise IOError(msg) + assembly_issues.extend(nightly_match_issues) + + assembly_issues_report: Dict[str, List[Dict]] = dict() + report['assembly_issues'] = assembly_issues_report + + overall_permitted = True + for ai in assembly_issues: + permitted = assembly_inspector.does_permit(ai) + overall_permitted &= permitted # If anything is not permitted, exit with an error + assembly_issues_report.setdefault(ai.component, []).append({ + 'code': ai.code.name, + 'msg': ai.msg, + 'permitted': permitted + }) + + report['viable'] = overall_permitted print(yaml.dump(report, default_flow_style=False, indent=2)) + if not overall_permitted: + red_print('DO NOT PROCEED WITH THIS ASSEMBLY PAYLOAD -- not all detected issues are permitted.', file=sys.stderr) + exit(1) + exit(0) class PayloadGenerator: @@ -299,8 +292,8 @@ class PayloadEntry(NamedTuple): # The destination pullspec dest_pullspec: str - # Append any inconsistencies found for the entry - inconsistencies: List[str] + # Append any issues for the assembly + issues: List[AssemblyIssue] """ If the entry is for an image in this doozer group, these values will be set. @@ -318,11 +311,11 @@ class PayloadEntry(NamedTuple): rhcos_build: Optional[RHCOSBuildInspector] = None @staticmethod - def find_mismatched_siblings(build_image_inspectors: Iterable[Optional[BrewBuildImageInspector]]) -> List[BrewBuildImageInspector]: + def find_mismatched_siblings(build_image_inspectors: Iterable[Optional[BrewBuildImageInspector]]) -> List[Tuple[BrewBuildImageInspector, BrewBuildImageInspector]]: """ Sibling images are those built from the same repository. We need to throw an error if there are sibling built from different commits. - :return: Returns a list of ImageMetadata which had conflicting upstream commits + :return: Returns a list of (BrewBuildImageInspector,BrewBuildImageInspector) where the first item is a mismatched sibling of the second """ class RepoBuildRecord(NamedTuple): build_image_inspector: BrewBuildImageInspector @@ -332,7 +325,7 @@ class RepoBuildRecord(NamedTuple): # encountered claiming it is sourced from the SOURCE_GIT_URL repo_builds: Dict[str, RepoBuildRecord] = dict() - mismatched_siblings: List[BrewBuildImageInspector] = [] + mismatched_siblings: List[Tuple[BrewBuildImageInspector, BrewBuildImageInspector]] = [] for build_image_inspector in build_image_inspectors: if not build_image_inspector: @@ -350,9 +343,8 @@ class RepoBuildRecord(NamedTuple): # Another component has build from this repo before. Make # sure it built from the same commit. if potential_conflict.source_git_commit != source_git_commit: - mismatched_siblings.append(potential_conflict.build_image_inspector) - mismatched_siblings.append(build_image_inspector) - red_print(f"The following NVRs are siblings but built from different commits: {potential_conflict.build_image_inspector.get_nvr()} and {build_image_inspector.get_nvr()}") + mismatched_siblings.append((build_image_inspector, potential_conflict.build_image_inspector)) + red_print(f"The following NVRs are siblings but built from different commits: {potential_conflict.build_image_inspector.get_nvr()} and {build_image_inspector.get_nvr()}", file=sys.stderr) else: # No conflict, so this is our first encounter for this repo; add it to our tracking dict. repo_builds[source_url] = RepoBuildRecord(build_image_inspector=build_image_inspector, source_git_commit=source_git_commit) @@ -414,7 +406,7 @@ def find_payload_entries(assembly_inspector: AssemblyInspector, arch: str, dest_ build_inspector=archive_inspector.get_brew_build_inspector(), archive_inspector=archive_inspector, dest_pullspec=PayloadGenerator.get_mirroring_destination(archive_inspector, dest_repo), - inconsistencies=list(), + issues=list(), ) # members now contains a complete map of payload tag keys, but some values may be None. This is an @@ -441,7 +433,7 @@ def find_payload_entries(assembly_inspector: AssemblyInspector, arch: str, dest_ final_members['machine-os-content'] = PayloadGenerator.PayloadEntry( dest_pullspec=rhcos_build.get_image_pullspec(), rhcos_build=rhcos_build, - inconsistencies=list(), + issues=list(), ) # Final members should have all tags populated. @@ -455,7 +447,7 @@ def build_payload_istag(payload_tag_name: str, payload_entry: PayloadEntry) -> D :return: Returns a imagestreamtag dict for a release payload imagestream. """ return { - 'annotations': PayloadGenerator._build_inconsistency_annotation(payload_entry.inconsistencies), + 'annotations': PayloadGenerator._build_inconsistency_annotation(payload_entry.issues), 'name': payload_tag_name, 'from': { 'kind': 'DockerImage', @@ -464,7 +456,7 @@ def build_payload_istag(payload_tag_name: str, payload_entry: PayloadEntry) -> D } @staticmethod - def build_payload_imagestream(imagestream_name: str, imagestream_namespace: str, payload_istags: Iterable[Dict], assembly_wide_inconsistencies: Iterable[str]) -> Dict: + def build_payload_imagestream(imagestream_name: str, imagestream_namespace: str, payload_istags: Iterable[Dict], assembly_wide_inconsistencies: Iterable[AssemblyIssue]) -> Dict: """ Builds a definition for a release payload imagestream from a set of payload istags. :param imagestream_name: The name of the imagstream to generate. @@ -490,7 +482,7 @@ def build_payload_imagestream(imagestream_name: str, imagestream_namespace: str, return istream_obj @staticmethod - def _build_inconsistency_annotation(inconsistencies: Iterable[str]): + def _build_inconsistency_annotation(inconsistencies: Iterable[AssemblyIssue]): """ :param inconsistencies: A list of strings to report as inconsistencies within an annotation. :return: Returns a dict containing an inconsistency annotation out of the specified str. @@ -500,11 +492,11 @@ def _build_inconsistency_annotation(inconsistencies: Iterable[str]): if not inconsistencies: return {} - inconsistencies = sorted(inconsistencies) - if len(inconsistencies) > 5: + msgs = sorted([i.msg for i in inconsistencies]) + if len(msgs) > 5: # an exhaustive list of the problems may be too large; that goes in the state file. - inconsistencies[5:] = ["(...and more)"] - return {"release.openshift.io/inconsistency": json.dumps(inconsistencies)} + msgs[5:] = ["(...and more)"] + return {"release.openshift.io/inconsistency": json.dumps(msgs)} @staticmethod def get_group_payload_tag_mapping(assembly_inspector: AssemblyInspector, arch: str) -> Dict[str, Optional[ArchiveImageInspector]]: @@ -523,7 +515,7 @@ def get_group_payload_tag_mapping(assembly_inspector: AssemblyInspector, arch: s # There was no build for this image found associated with the assembly. # In this case, don't put the tag_name into the imagestream. This is not good, # so be verbose. - red_print(f'Unable to find build for {dgk} for {assembly_inspector.get_assembly_name()}') + red_print(f'Unable to find build for {dgk} for {assembly_inspector.get_assembly_name()}', file=sys.stderr) continue image_meta: ImageMetadata = assembly_inspector.runtime.image_map[dgk] @@ -550,21 +542,21 @@ def get_group_payload_tag_mapping(assembly_inspector: AssemblyInspector, arch: s archive_inspector = build_inspector.get_image_archive_inspector(brew_arch) if not archive_inspector: - # This is no build for this CPU architecture for this build. Don't worry yet, - # it may be carried by an -alt image or not at all for non-x86 arches. - members[tag_name] = None - continue + # There is no build for this CPU architecture for this image_meta/build. This finding + # conflicts with the `arch not in image_meta.get_arches()` check above. + # Best to fail. + raise IOError(f'{dgk} claims to be built for {image_meta.get_arches()} but did not find {brew_arch} build for {build_inspector.get_brew_build_webpage_url()}') members[tag_name] = archive_inspector return members @staticmethod - def _check_nightly_consistency(assembly_inspector: AssemblyInspector, nightly: str, arch: str) -> List[str]: + def _check_nightly_consistency(assembly_inspector: AssemblyInspector, nightly: str, arch: str) -> List[AssemblyIssue]: runtime = assembly_inspector.runtime - def terminal_issue(msg: str): - return [msg] + def terminal_issue(msg: str) -> List[AssemblyIssue]: + return [AssemblyIssue(msg, component='reference-releases')] issues: List[str] runtime.logger.info(f'Processing nightly: {nightly}') @@ -593,7 +585,7 @@ def terminal_issue(msg: str): if not release_info.references.spec.tags: return terminal_issue(f'Could not find tags in nightly {nightly}') - issues: List[str] = list() + issues: List[AssemblyIssue] = list() payload_entries: Dict[str, PayloadGenerator.PayloadEntry] = PayloadGenerator.find_payload_entries(assembly_inspector, arch, '') for component_tag in release_info.references.spec.tags: # For each tag in the imagestream payload_tag_name: str = component_tag.name # e.g. "aws-ebs-csi-driver" @@ -610,17 +602,21 @@ def terminal_issue(msg: str): if entry.archive_inspector: if entry.archive_inspector.get_archive_digest() != pullspec_sha: - issues.append(f'{nightly} contains {payload_tag_name} sha {pullspec_sha} but assembly computed archive: {entry.archive_inspector.get_archive_id()} and {entry.archive_inspector.get_archive_pullspec()}') + # Impermissible because the artist should remove the reference nightlies from the assembly definition + issues.append(AssemblyIssue(f'{nightly} contains {payload_tag_name} sha {pullspec_sha} but assembly computed archive: {entry.archive_inspector.get_archive_id()} and {entry.archive_inspector.get_archive_pullspec()}', + component='reference-releases')) elif entry.rhcos_build: if entry.rhcos_build.get_machine_os_content_digest() != pullspec_sha: - issues.append(f'{nightly} contains {payload_tag_name} sha {pullspec_sha} but assembly computed rhcos: {entry.rhcos_build} and {entry.rhcos_build.get_machine_os_content_digest()}') + # Impermissible because the artist should remove the reference nightlies from the assembly definition + issues.append(AssemblyIssue(f'{nightly} contains {payload_tag_name} sha {pullspec_sha} but assembly computed rhcos: {entry.rhcos_build} and {entry.rhcos_build.get_machine_os_content_digest()}', + component='reference-releases')) else: raise IOError(f'Unsupported payload entry {entry}') return issues @staticmethod - def check_nightlies_consistency(assembly_inspector: AssemblyInspector) -> List[str]: + def check_nightlies_consistency(assembly_inspector: AssemblyInspector) -> List[AssemblyIssue]: """ If this assembly has reference-releases, check whether the current images selected by the assembly are an exact match for the nightly contents. @@ -629,7 +625,7 @@ def check_nightlies_consistency(assembly_inspector: AssemblyInspector) -> List[s if not basis or not basis.reference_releases: return [] - issues: List[str] = [] + issues: List[AssemblyIssue] = [] for arch, nightly in basis.reference_releases.primitive().items(): issues.extend(PayloadGenerator._check_nightly_consistency(assembly_inspector, nightly, arch)) diff --git a/doozerlib/image.py b/doozerlib/image.py index bc82ea8a8..2c0188fea 100644 --- a/doozerlib/image.py +++ b/doozerlib/image.py @@ -30,7 +30,7 @@ def __init__(self, runtime, data_obj: Dict, commitish: Optional[str] = None, clo if clone_source: runtime.resolve_source(self) - def get_assembly_rpm_package_dependencies(self, el_ver: int) -> Dict[str, str]: + def get_assembly_rpm_package_dependencies(self, el_ver: int) -> Tuple[Dict[str, str], Dict[str, str]]: """ An assembly can define RPMs which should be installed into a given member image. Those dependencies can be specified at either the individual member @@ -38,21 +38,31 @@ def get_assembly_rpm_package_dependencies(self, el_ver: int) -> Dict[str, str]: Dict[package_name] -> nvr for any package that should be installed due to these overrides. :param el_ver: Which version of RHEL to check - :return: Returns Dict[package_name] -> nvr for any assembly induced overrides. + :return: Returns a tuple with two Dict[package_name] -> nvr for any assembly induced overrides. + The first entry in the Tuple are dependencies directly specified in the member. + The second entry are dependencies specified in the group and member. """ - aggregate: Dict[str, str] = dict() # Map package_name -> nvr + direct_member_deps: Dict[str, str] = dict() # Map package_name -> nvr + aggregate_deps: Dict[str, str] = dict() # Map package_name -> nvr eltag = f'el{el_ver}' group_deps = self.runtime.get_group_config().dependencies.rpms or [] member_deps = self.config.dependencies.rpms or [] - full = [] - full.extend(group_deps) - full.extend(member_deps) # Will will process things such that late entries override early - for rpm_entry in full: + + for rpm_entry in group_deps: + if eltag in rpm_entry: # This entry has something for the requested RHEL version + nvr = rpm_entry[eltag] + package_name = parse_nvr(nvr)['name'] + aggregate_deps[package_name] = nvr + + # Perform the same process, but only for dependencies directly listed for the member + for rpm_entry in member_deps: if eltag in rpm_entry: # This entry has something for the requested RHEL version nvr = rpm_entry[eltag] package_name = parse_nvr(nvr)['name'] - aggregate[package_name] = nvr - return aggregate + direct_member_deps[package_name] = nvr + aggregate_deps[package_name] = nvr # Override anything at the group level + + return direct_member_deps, aggregate_deps def is_ancestor(self, image): """ @@ -115,11 +125,17 @@ def base_only(self): @property def is_payload(self): - return self.config.get('for_payload', False) + val = self.config.get('for_payload', False) + if val and self.base_only: + raise ValueError(f'{self.distgit_key} claims to be for_payload and base_only') + return val @property def for_release(self): - return self.config.get('for_release', True) + val = self.config.get('for_release', True) + if val and self.base_only: + raise ValueError(f'{self.distgit_key} claims to be for_release and base_only') + return val def get_payload_tag_info(self) -> Tuple[str, bool]: """ @@ -577,7 +593,7 @@ class ArchiveImageInspector: Represents and returns information about an archive image associated with a brew build. """ - def __init__(self, runtime, archive: Dict, brew_build_inspector: 'BrewBuildImageInspector' =None): + def __init__(self, runtime, archive: Dict, brew_build_inspector: 'BrewBuildImageInspector' = None): """ :param runtime: The brew build inspector associated with the build that created this archive. :param archive: The raw archive dict from brew. @@ -603,7 +619,7 @@ def get_archive_id(self) -> int: """ return self._archive['id'] - def get_image_envs(self) -> Dict[str, str]: + def get_image_envs(self) -> Dict[str, Optional[str]]: """ :return: Returns a dictionary of environment variables set for this image. """ @@ -747,6 +763,12 @@ def get_brew_build_id(self) -> int: """ return self._brew_build_id + def get_brew_build_webpage_url(self): + """ + :return: Returns a link for humans to go look at details for this brew build. + """ + return f'https://brewweb.engineering.redhat.com/brew/buildinfo?buildID={self._brew_build_id}' + def get_brew_build_dict(self) -> Dict: """ :return: Returns the koji getBuild dictionary for this iamge. @@ -926,9 +948,7 @@ def get_image_archive_inspector(self, arch: str) -> Optional[ArchiveImageInspect """ arch = brew_arch_for_go_arch(arch) # Make sure this is a brew arch found = filter(lambda ai: ai.image_arch() == arch, self.get_image_archive_inspectors()) - if not found: - return None - return list(found)[0] + return next(found, None) def is_under_embargo(self) -> bool: """ @@ -946,7 +966,7 @@ def is_under_embargo(self) -> bool: return self._cache[cn] - def find_non_latest_rpms(self) -> List[str]: + def find_non_latest_rpms(self) -> List[Tuple[str, str]]: """ If the packages installed in this image overlap packages in the candidate tag, return NVRs of the latest candidate builds that are not also installed in this image. @@ -961,7 +981,7 @@ def find_non_latest_rpms(self) -> List[str]: if same the package installed into this archive is not the same NVR. """ - candidate_brew_tag = self.runtime.get_default_candidate_brew_tag() + candidate_brew_tag = self.get_image_meta().candidate_brew_tag() # N.B. the "rpms" installed in an image archive are individual RPMs, not brew rpm package builds. # we compare against the individual RPMs from latest candidate rpm package builds. @@ -972,12 +992,12 @@ def find_non_latest_rpms(self) -> List[str]: bs_detector.find_unshipped_candidate_rpms(candidate_brew_tag, self.runtime.brew_event) } - old_nvrs = [] + old_nvrs: List[Tuple[str, str]] = [] archive_rpms = {rpm['name']: rpm for rpm in self.get_all_installed_rpm_dicts()} # we expect only a few unshipped candidates most of the the time, so we'll just search for those. for name, rpm in candidate_rpms.items(): archive_rpm = archive_rpms.get(name) if archive_rpm and rpm['nvr'] != archive_rpm['nvr']: - old_nvrs.append(archive_rpm['nvr']) + old_nvrs.append((archive_rpm['nvr'], rpm['nvr'])) return old_nvrs diff --git a/doozerlib/rhcos.py b/doozerlib/rhcos.py index 6bf5894c5..64f37dc62 100644 --- a/doozerlib/rhcos.py +++ b/doozerlib/rhcos.py @@ -3,11 +3,14 @@ import json from typing import Dict, List, Tuple, Optional +from kobo.rpmlib import parse_nvr + from tenacity import retry, stop_after_attempt, wait_fixed from urllib import request from doozerlib.util import brew_suffix_for_arch, isolate_el_version_in_release from doozerlib import exectools from doozerlib.model import Model +from doozerlib import brew RHCOS_BASE_URL = "https://releases-rhcos-art.cloud.privileged.psi.redhat.com/storage/releases" @@ -202,7 +205,7 @@ def get_package_build_objects(self) -> Dict[str, Dict]: with self.runtime.pooled_koji_client_session() as koji_api: for nvra in self.get_rpm_nvras(): rpm_def = koji_api.getRPM(nvra, strict=True) - package_build = koji_api.getBuild(rpm_def['build_id'], strict=True) + package_build = koji_api.getBuild(rpm_def['build_id'], brew.KojiWrapperOpts(caching=True), strict=True) package_name = package_build['package_name'] aggregate[package_name] = package_build @@ -234,22 +237,26 @@ def get_machine_os_content_digest(self) -> str: """ return self._build_meta['oscontainer']['digest'] - def find_non_latest_rpms(self) -> List[str]: + def find_non_latest_rpms(self) -> List[Tuple[str, str]]: """ If the packages installed in this image overlap packages in the candidate tag, return NVRs of the latest candidate builds that are not also installed in this image. This indicates that the image has not picked up the latest from candidate. Note that this is completely normal for non-STREAM assemblies. In fact, it is - normal for any assembly other than the assembly used for nightlies. In the age of - a thorough config:scan-sources, if this method returns anything, scan-sources is - likely broken. + normal for any assembly other than the assembly used for nightlies. + + Unfortunately, rhcos builds are not performed in sync with all other builds. + Thus, it is natural for them to lag behind when RPMs change. The should catch + up with the next RHCOS build. - :return: Returns a list of NVRs from the "latest" state of the specified candidate tag + :return: Returns a list of Tuple[INSTALLED_NVRs, NEWEST_NVR] where + newest is from the "latest" state of the specified candidate tag if same the package installed into this archive is not the same NVR. """ - candidate_brew_tag = self.runtime.get_default_candidate_brew_tag() + # Find the default candidate tag appropriate for the RHEL version used by this RHCOS build. + candidate_brew_tag = self.runtime.get_default_candidate_brew_tag(el_target=self.get_rhel_base_version()) # N.B. the "rpms" installed in an image archive are individual RPMs, not brew rpm package builds. # we compare against the individual RPMs from latest candidate rpm package builds. @@ -260,12 +267,16 @@ def find_non_latest_rpms(self) -> List[str]: bs_detector.find_unshipped_candidate_rpms(candidate_brew_tag, self.runtime.brew_event) } - old_nvrs = [] - installed_package_builds: Dict[str, Dict] = self.get_package_build_objects() # Maps package name to build dict. + old_nvrs: List[Tuple[str, str]] = [] + # Translate the package builds into a list of individual RPMs. Build dict[rpm_name] -> nvr for every NVR installed + # in this RHCOS build. + installed_nvr_map: Dict[str, str] = {parse_nvr(installed_nvr)['name']: installed_nvr for installed_nvr in self.get_rpm_nvrs()} # we expect only a few unshipped candidates most of the the time, so we'll just search for those. for name, rpm in candidate_rpms.items(): - build_dict = installed_package_builds.get(name) - if build_dict and rpm != build_dict['nvr']: - old_nvrs.append(build_dict['nvr']) + rpm_nvr = rpm['nvr'] + if name in installed_nvr_map: + installed_nvr = installed_nvr_map[name] + if rpm_nvr != installed_nvr: + old_nvrs.append((installed_nvr, rpm_nvr)) return old_nvrs diff --git a/doozerlib/runtime.py b/doozerlib/runtime.py index 62b3f21af..64c99f6ab 100644 --- a/doozerlib/runtime.py +++ b/doozerlib/runtime.py @@ -25,6 +25,7 @@ import pathlib from typing import Optional, List, Dict, Tuple, Union import time +import re from doozerlib import gitdata from . import logutil @@ -723,7 +724,7 @@ def shared_build_status_detector(self) -> 'BuildStatusDetector': """ with self.bs_lock: if self._build_status_detector is None: - self._build_status_detector = BuildStatusDetector(self.build_retrying_koji_client(), self.logger) + self._build_status_detector = BuildStatusDetector(self, self.logger) yield self._build_status_detector @contextmanager @@ -1493,8 +1494,22 @@ def parallel_exec(self, f, args, n_threads=None): pool.join() return ret - def get_default_candidate_brew_tag(self): - return self.branch + '-candidate' if self.branch else None + def get_el_targeted_default_branch(self, el_target: Optional[Union[str, int]] = None): + if not self.branch: + return None + if not el_target: + return self.branch + # Otherwise, the caller is asking us to determine the branch for + # a specific RHEL version. Pull apart the default group branch + # and replace it wth the targeted version. + el_ver: int = util.isolate_el_version_in_brew_tag(el_target) + match = re.match(r'^(.*)rhel-\d+(.*)$', self.branch) + el_specific_branch: str = f'{match.group(1)}rhel-{el_ver}{match.group(2)}' + return el_specific_branch + + def get_default_candidate_brew_tag(self, el_target: Optional[Union[str, int]] = None): + branch = self.get_el_targeted_default_branch(el_target=el_target) + return branch + '-candidate' if branch else None def get_candidate_brew_tags(self): """Return a set of known candidate tags relevant to this group""" diff --git a/tests/test_brew_inspector.py b/tests/test_brew_inspector.py index 707cd0b6f..bb3dba674 100644 --- a/tests/test_brew_inspector.py +++ b/tests/test_brew_inspector.py @@ -22,8 +22,7 @@ def __init__(self, logger): class TestBrewBuildImageInspector(unittest.TestCase): def setUp(self): - logging.basicConfig(level=logging.DEBUG) - self.logger = logging.getLogger() + self.logger = MagicMock(spec=logging.Logger) runtime = MockRuntime(self.logger) koji_mock = Mock() diff --git a/tests/test_build_status_detector.py b/tests/test_build_status_detector.py index 3bda6ba09..c70d98a6d 100644 --- a/tests/test_build_status_detector.py +++ b/tests/test_build_status_detector.py @@ -113,14 +113,15 @@ def test_find_shipped_builds(self): self.assertEqual(actual, expected) def test_find_unshipped_candidate_rpms(self): - latest = [dict(id=1), dict(id=2), dict(id=3)] + latest = [dict(id=1, package_name='p1'), dict(id=2, package_name='p2'), dict(id=3, package_name='p3')] shipped_ids = {2} rpms = [ # just need to get the same thing back, not inspect contents [object(), object()], [object()], ] - session = MagicMock() - detector = BuildStatusDetector(session, MagicMock()) + runtime = MagicMock() + detector = BuildStatusDetector(runtime, MagicMock()) + session = detector.koji_session session.getLatestBuilds.return_value = latest with patch.object(detector, 'find_shipped_builds') as find_shipped_builds, \ patch("doozerlib.brew.list_build_rpms") as list_build_rpms: @@ -140,6 +141,7 @@ def test_find_unshipped_candidate_rpms(self): def test_rpms_in_embargoed_tag(self): session = MagicMock() detector = BuildStatusDetector(session, MagicMock()) + session = detector.koji_session session.listTagged.return_value = [{"id": 42}] expected = {42} diff --git a/tests/test_rhcos.py b/tests/test_rhcos.py index dbec389c5..1a20cded3 100755 --- a/tests/test_rhcos.py +++ b/tests/test_rhcos.py @@ -31,8 +31,7 @@ def _urlopen_json_cm(mock_urlopen, content, rc=200): class TestRhcos(unittest.TestCase): def setUp(self): - logging.basicConfig(level=logging.DEBUG) - self.logger = logging.getLogger() + self.logger = MagicMock(spec=logging.Logger) runtime = MockRuntime(self.logger) koji_mock = Mock()