From 6efaae19fd87f6b34d2a066292f41a56c40a603d Mon Sep 17 00:00:00 2001 From: Lubomír Sedlář Date: Jul 24 2019 11:36:23 +0000 Subject: Remove debug mode This was already discouraged to not be used, and is a bad idea in current setup anyway. Removing this can simplify the code. Signed-off-by: Lubomír Sedlář --- diff --git a/bin/pungi-koji b/bin/pungi-koji index debf455..d592ed0 100755 --- a/bin/pungi-koji +++ b/bin/pungi-koji @@ -77,12 +77,6 @@ def main(): help="Path to directory with old composes. Reuse an existing repodata from the most recent compose.", ) parser.add_argument( - "--debug-mode", - action="store_true", - default=False, - help="run pungi in DEBUG mode (DANGEROUS!)", - ) - parser.add_argument( "--config", help="Config file", required=True @@ -252,7 +246,6 @@ def main(): compose = Compose(conf, topdir=compose_dir, - debug=opts.debug_mode, skip_phases=opts.skip_phase, just_phases=opts.just_phase, old_composes=opts.old_composes, diff --git a/pungi/compose.py b/pungi/compose.py index abfcb81..4bb7703 100644 --- a/pungi/compose.py +++ b/pungi/compose.py @@ -106,7 +106,7 @@ def get_compose_dir(topdir, conf, compose_type="production", compose_date=None, class Compose(kobo.log.LoggingBase): - def __init__(self, conf, topdir, debug=False, skip_phases=None, just_phases=None, old_composes=None, koji_event=None, supported=False, logger=None, notifier=None): + def __init__(self, conf, topdir, skip_phases=None, just_phases=None, old_composes=None, koji_event=None, supported=False, logger=None, notifier=None): kobo.log.LoggingBase.__init__(self, logger) # TODO: check if minimal conf values are set self.conf = conf @@ -122,9 +122,6 @@ class Compose(kobo.log.LoggingBase): self.koji_event = koji_event or conf.get("koji_event") self.notifier = notifier - # intentionally upper-case (visible in the code) - self.DEBUG = debug - # path definitions self.paths = Paths(self) @@ -142,17 +139,6 @@ class Compose(kobo.log.LoggingBase): self.supported = True self.im = Images() - if self.DEBUG: - try: - self.im.load(self.paths.compose.metadata("images.json")) - except RuntimeError: - pass - # images.json doesn't exists - except IOError: - pass - # images.json is not a valid json file, for example, it's an empty file - except ValueError: - pass self.im.compose.id = self.compose_id self.im.compose.type = self.compose_type self.im.compose.date = self.compose_date @@ -235,23 +221,22 @@ class Compose(kobo.log.LoggingBase): variants_file = self.paths.work.variants_file(arch="global") msg = "Writing variants file: %s" % variants_file - if self.DEBUG and os.path.isfile(variants_file): - self.log_warning("[SKIP ] %s" % msg) + scm_dict = self.conf["variants_file"] + if isinstance(scm_dict, dict): + file_name = os.path.basename(scm_dict["file"]) + if scm_dict["scm"] == "file": + scm_dict["file"] = os.path.join( + self.config_dir, os.path.basename(scm_dict["file"]) + ) else: - scm_dict = self.conf["variants_file"] - if isinstance(scm_dict, dict): - file_name = os.path.basename(scm_dict["file"]) - if scm_dict["scm"] == "file": - scm_dict["file"] = os.path.join(self.config_dir, os.path.basename(scm_dict["file"])) - else: - file_name = os.path.basename(scm_dict) - scm_dict = os.path.join(self.config_dir, os.path.basename(scm_dict)) - - self.log_debug(msg) - tmp_dir = self.mkdtemp(prefix="variants_file_") - get_file_from_scm(scm_dict, tmp_dir, logger=self._logger) - shutil.copy2(os.path.join(tmp_dir, file_name), variants_file) - shutil.rmtree(tmp_dir) + file_name = os.path.basename(scm_dict) + scm_dict = os.path.join(self.config_dir, os.path.basename(scm_dict)) + + self.log_debug(msg) + tmp_dir = self.mkdtemp(prefix="variants_file_") + get_file_from_scm(scm_dict, tmp_dir, logger=self._logger) + shutil.copy2(os.path.join(tmp_dir, file_name), variants_file) + shutil.rmtree(tmp_dir) tree_arches = self.conf.get("tree_arches", None) tree_variants = self.conf.get("tree_variants", None) diff --git a/pungi/phases/createrepo.py b/pungi/phases/createrepo.py index 060ba82..0af51cc 100644 --- a/pungi/phases/createrepo.py +++ b/pungi/phases/createrepo.py @@ -122,10 +122,6 @@ def create_variant_repo(compose, arch, variant, pkg_type, modules_metadata=None) return createrepo_dirs.add(repo_dir) - if compose.DEBUG and os.path.isdir(os.path.join(repo_dir, "repodata")): - compose.log_warning("[SKIP ] %s" % msg) - return - compose.log_info("[BEGIN] %s" % msg) # We only want delta RPMs for binary repos. diff --git a/pungi/phases/gather/__init__.py b/pungi/phases/gather/__init__.py index d7438a3..c10f4c8 100644 --- a/pungi/phases/gather/__init__.py +++ b/pungi/phases/gather/__init__.py @@ -91,13 +91,8 @@ class GatherPhase(PhaseBase): raise ValueError('\n'.join(errors)) def _write_manifest(self): - if self.compose.DEBUG and os.path.isfile(self.manifest_file): - self.compose.log_info( - "Skipping writing RPM manifest, already exists: %s" % self.manifest_file - ) - else: - self.compose.log_info("Writing RPM manifest: %s" % self.manifest_file) - self.manifest.dump(self.manifest_file) + self.compose.log_info("Writing RPM manifest: %s" % self.manifest_file) + self.manifest.dump(self.manifest_file) def run(self): pkg_map = gather_wrapper(self.compose, self.pkgset_phase.package_sets, @@ -517,23 +512,20 @@ def write_prepopulate_file(compose): prepopulate_file = os.path.join(compose.paths.work.topdir(arch="global"), "prepopulate.json") msg = "Writing prepopulate file: %s" % prepopulate_file - if compose.DEBUG and os.path.isfile(prepopulate_file): - compose.log_warning("[SKIP ] %s" % msg) + scm_dict = compose.conf["gather_prepopulate"] + if isinstance(scm_dict, dict): + file_name = os.path.basename(scm_dict["file"]) + if scm_dict["scm"] == "file": + scm_dict["file"] = os.path.join(compose.config_dir, os.path.basename(scm_dict["file"])) else: - scm_dict = compose.conf["gather_prepopulate"] - if isinstance(scm_dict, dict): - file_name = os.path.basename(scm_dict["file"]) - if scm_dict["scm"] == "file": - scm_dict["file"] = os.path.join(compose.config_dir, os.path.basename(scm_dict["file"])) - else: - file_name = os.path.basename(scm_dict) - scm_dict = os.path.join(compose.config_dir, os.path.basename(scm_dict)) - - compose.log_debug(msg) - tmp_dir = compose.mkdtemp(prefix="prepopulate_file_") - get_file_from_scm(scm_dict, tmp_dir, logger=compose._logger) - shutil.copy2(os.path.join(tmp_dir, file_name), prepopulate_file) - shutil.rmtree(tmp_dir) + file_name = os.path.basename(scm_dict) + scm_dict = os.path.join(compose.config_dir, os.path.basename(scm_dict)) + + compose.log_debug(msg) + tmp_dir = compose.mkdtemp(prefix="prepopulate_file_") + get_file_from_scm(scm_dict, tmp_dir, logger=compose._logger) + shutil.copy2(os.path.join(tmp_dir, file_name), prepopulate_file) + shutil.rmtree(tmp_dir) def get_prepopulate_packages(compose, arch, variant, include_arch=True): diff --git a/pungi/phases/gather/methods/method_deps.py b/pungi/phases/gather/methods/method_deps.py index e82c907..618aa3b 100644 --- a/pungi/phases/gather/methods/method_deps.py +++ b/pungi/phases/gather/methods/method_deps.py @@ -89,10 +89,6 @@ def write_pungi_config(compose, arch, variant, packages, groups, filter_packages pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch, source_name=source_name) msg = "Writing pungi config (arch: %s, variant: %s): %s" % (arch, variant, pungi_cfg) - if compose.DEBUG and os.path.isfile(pungi_cfg): - compose.log_warning("[SKIP ] %s" % msg) - return - compose.log_info(msg) repos = { @@ -160,11 +156,6 @@ def resolve_deps(compose, arch, variant, source_name=None): pungi_log = compose.paths.work.pungi_log(arch, variant, source_name=source_name) msg = "Running pungi (arch: %s, variant: %s)" % (arch, variant) - if compose.DEBUG and os.path.exists(pungi_log): - compose.log_warning("[SKIP ] %s" % msg) - with open(pungi_log, "r") as f: - res, broken_deps, _ = pungi_wrapper.parse_log(f) - return res, broken_deps compose.log_info("[BEGIN] %s" % msg) pungi_conf = compose.paths.work.pungi_conf(arch, variant, source_name=source_name) diff --git a/pungi/phases/init.py b/pungi/phases/init.py index d06a927..f41da5f 100644 --- a/pungi/phases/init.py +++ b/pungi/phases/init.py @@ -90,23 +90,20 @@ def write_global_comps(compose): comps_file_global = compose.paths.work.comps(arch="global") msg = "Writing global comps file: %s" % comps_file_global - if compose.DEBUG and os.path.isfile(comps_file_global): - compose.log_warning("[SKIP ] %s" % msg) + scm_dict = compose.conf["comps_file"] + if isinstance(scm_dict, dict): + comps_name = os.path.basename(scm_dict["file"]) + if scm_dict["scm"] == "file": + scm_dict["file"] = os.path.join(compose.config_dir, scm_dict["file"]) else: - scm_dict = compose.conf["comps_file"] - if isinstance(scm_dict, dict): - comps_name = os.path.basename(scm_dict["file"]) - if scm_dict["scm"] == "file": - scm_dict["file"] = os.path.join(compose.config_dir, scm_dict["file"]) - else: - comps_name = os.path.basename(scm_dict) - scm_dict = os.path.join(compose.config_dir, scm_dict) - - compose.log_debug(msg) - tmp_dir = compose.mkdtemp(prefix="comps_") - get_file_from_scm(scm_dict, tmp_dir, logger=compose._logger) - shutil.copy2(os.path.join(tmp_dir, comps_name), comps_file_global) - shutil.rmtree(tmp_dir) + comps_name = os.path.basename(scm_dict) + scm_dict = os.path.join(compose.config_dir, scm_dict) + + compose.log_debug(msg) + tmp_dir = compose.mkdtemp(prefix="comps_") + get_file_from_scm(scm_dict, tmp_dir, logger=compose._logger) + shutil.copy2(os.path.join(tmp_dir, comps_name), comps_file_global) + shutil.rmtree(tmp_dir) return comps_file_global @@ -115,10 +112,6 @@ def write_arch_comps(compose, arch): comps_file_arch = compose.paths.work.comps(arch=arch) msg = "Writing comps file for arch '%s': %s" % (arch, comps_file_arch) - if compose.DEBUG and os.path.isfile(comps_file_arch): - compose.log_warning("[SKIP ] %s" % msg) - return - compose.log_debug(msg) run(["comps_filter", "--arch=%s" % arch, "--no-cleanup", "--output=%s" % comps_file_arch, @@ -145,17 +138,6 @@ def write_variant_comps(compose, arch, variant): comps_file = compose.paths.work.comps(arch=arch, variant=variant) msg = "Writing comps file (arch: %s, variant: %s): %s" % (arch, variant, comps_file) - if compose.DEBUG and os.path.isfile(comps_file): - # read display_order and groups for environments (needed for live images) - comps = CompsWrapper(comps_file) - # groups = variant.groups - comps.filter_groups(variant.groups) - if compose.conf["comps_filter_environments"]: - comps.filter_environments(variant.environments) - - compose.log_warning("[SKIP ] %s" % msg) - return - compose.log_debug(msg) cmd = [ "comps_filter", @@ -194,17 +176,17 @@ def create_comps_repo(compose, arch, variant): comps_repo = compose.paths.work.comps_repo(arch=arch, variant=variant) comps_path = compose.paths.work.comps(arch=arch, variant=variant) msg = "Creating comps repo for arch '%s' variant '%s'" % (arch, variant.uid if variant else None) - if compose.DEBUG and os.path.isdir(os.path.join(comps_repo, "repodata")): - compose.log_warning("[SKIP ] %s" % msg) - else: - compose.log_info("[BEGIN] %s" % msg) - cmd = repo.get_createrepo_cmd(comps_repo, database=False, - outputdir=comps_repo, groupfile=comps_path, - checksum=createrepo_checksum) - logfile = 'comps_repo-%s' % variant if variant else 'comps_repo' - run(cmd, logfile=compose.paths.log.log_file(arch, logfile), - show_cmd=True) - compose.log_info("[DONE ] %s" % msg) + + compose.log_info("[BEGIN] %s" % msg) + cmd = repo.get_createrepo_cmd( + comps_repo, database=False, + outputdir=comps_repo, + groupfile=comps_path, + checksum=createrepo_checksum, + ) + logfile = "comps_repo-%s" % variant if variant else "comps_repo" + run(cmd, logfile=compose.paths.log.log_file(arch, logfile), show_cmd=True) + compose.log_info("[DONE ] %s" % msg) def write_module_defaults(compose): diff --git a/pungi/phases/pkgset/common.py b/pungi/phases/pkgset/common.py index 553b605..84aff99 100644 --- a/pungi/phases/pkgset/common.py +++ b/pungi/phases/pkgset/common.py @@ -48,10 +48,6 @@ def get_create_global_repo_cmd(compose, path_prefix): repo = CreaterepoWrapper(createrepo_c=createrepo_c) repo_dir_global = compose.paths.work.arch_repo(arch="global") - if compose.DEBUG and os.path.isdir(os.path.join(repo_dir_global, "repodata")): - compose.log_warning("[SKIP ] Running createrepo for the global package set") - return - # find an old compose suitable for repodata reuse old_compose_path = None update_md_path = None @@ -108,10 +104,6 @@ def _create_arch_repo(worker_thread, args, task_num): repo_dir = compose.paths.work.arch_repo(arch=arch) msg = "Running createrepo for arch '%s'" % arch - if compose.DEBUG and os.path.isdir(os.path.join(repo_dir, "repodata")): - compose.log_warning("[SKIP ] %s" % msg) - return - compose.log_info("[BEGIN] %s" % msg) cmd = repo.get_createrepo_cmd(path_prefix, update=True, database=False, skip_stat=True, pkglist=compose.paths.work.package_list(arch=arch), outputdir=repo_dir, diff --git a/pungi/phases/pkgset/sources/source_koji.py b/pungi/phases/pkgset/sources/source_koji.py index 6564963..51a8f25 100644 --- a/pungi/phases/pkgset/sources/source_koji.py +++ b/pungi/phases/pkgset/sources/source_koji.py @@ -600,104 +600,103 @@ def populate_global_pkgset(compose, koji_wrapper, path_prefix, event): inherit = compose.conf["pkgset_koji_inherit"] inherit_modules = compose.conf["pkgset_koji_inherit_modules"] - global_pkgset_path = os.path.join( - compose.paths.work.topdir(arch="global"), "pkgset_global.pickle") - if compose.DEBUG and os.path.isfile(global_pkgset_path): - msg = "Populating the global package set from tag '%s'" % compose_tags - compose.log_warning("[SKIP ] %s" % msg) - with open(global_pkgset_path, "rb") as f: - global_pkgset = pickle.load(f) - else: - global_pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet( + + global_pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet( + koji_wrapper, compose.conf["sigkeys"], logger=compose._logger, + arches=all_arches) + + old_file_cache_path = _find_old_file_cache_path(compose) + old_file_cache = None + if old_file_cache_path: + compose.log_info("Reusing old PKGSET file cache from %s" % old_file_cache_path) + old_file_cache = pungi.phases.pkgset.pkgsets.KojiPackageSet.load_old_file_cache( + old_file_cache_path + ) + global_pkgset.set_old_file_cache(old_file_cache) + + # Get package set for each compose tag and merge it to global package + # list. Also prepare per-variant pkgset, because we do not have list + # of binary RPMs in module definition - there is just list of SRPMs. + for compose_tag in compose_tags: + compose.log_info( + "Populating the global package set from tag '%s'" % compose_tag + ) + if compose_tag in pkgset_koji_tags: + extra_builds = force_list(compose.conf.get("pkgset_koji_builds", [])) + else: + extra_builds = [] + pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet( koji_wrapper, compose.conf["sigkeys"], logger=compose._logger, - arches=all_arches) + arches=all_arches, packages=packages_to_gather, + allow_invalid_sigkeys=allow_invalid_sigkeys, + populate_only_packages=populate_only_packages_to_gather, + cache_region=compose.cache_region, + extra_builds=extra_builds) + if old_file_cache: + pkgset.set_old_file_cache(old_file_cache) + # Create a filename for log with package-to-tag mapping. The tag + # name is included in filename, so any slashes in it are replaced + # with underscores just to be safe. + logfile = compose.paths.log.log_file( + None, "packages_from_%s" % compose_tag.replace("/", "_") + ) + is_traditional = compose_tag in compose.conf.get("pkgset_koji_tag", []) + should_inherit = inherit if is_traditional else inherit_modules + + # If we're processing a modular tag, we have an exact list of + # packages that will be used. This is basically a workaround for + # tagging working on build level, not rpm level. A module tag may + # build a package but not want it included. This should include + # only packages that are actually in modules. It's possible two + # module builds will use the same tag, particularly a -devel module + # is sharing a tag with its regular version. + # The ultimate goal of the mapping is to avoid a package built in modular + # tag to be used as a dependency of some non-modular package. + modular_packages = set() + for variant in compose.all_variants.values(): + for nsvc, modular_tag in variant.module_uid_to_koji_tag.items(): + if modular_tag != compose_tag: + # Not current tag, skip it + continue + for arch_modules in variant.arch_mmds.values(): + for rpm_nevra in arch_modules[nsvc].get_rpm_artifacts(): + nevra = parse_nvra(rpm_nevra) + modular_packages.add((nevra["name"], nevra["arch"])) + + pkgset.populate( + compose_tag, + event, + inherit=should_inherit, + logfile=logfile, + include_packages=modular_packages, + ) + for variant in compose.all_variants.values(): + if compose_tag in variant_tags[variant]: + + # If it's a modular tag, store the package set for the module. + for nsvc, koji_tag in variant.module_uid_to_koji_tag.items(): + if compose_tag == koji_tag: + variant.nsvc_to_pkgset[nsvc] = pkgset + + # Optimization for case where we have just single compose + # tag - we do not have to merge in this case... + if len(variant_tags[variant]) == 1: + variant.pkgset = pkgset + else: + variant.pkgset.fast_merge(pkgset) + # Optimization for case where we have just single compose + # tag - we do not have to merge in this case... + if len(compose_tags) == 1: + global_pkgset = pkgset + else: + global_pkgset.fast_merge(pkgset) - old_file_cache_path = _find_old_file_cache_path(compose) - old_file_cache = None - if old_file_cache_path: - compose.log_info("Reusing old PKGSET file cache from %s" % old_file_cache_path) - old_file_cache = pungi.phases.pkgset.pkgsets.KojiPackageSet.load_old_file_cache( - old_file_cache_path - ) - global_pkgset.set_old_file_cache(old_file_cache) - - # Get package set for each compose tag and merge it to global package - # list. Also prepare per-variant pkgset, because we do not have list - # of binary RPMs in module definition - there is just list of SRPMs. - for compose_tag in compose_tags: - compose.log_info("Populating the global package set from tag " - "'%s'" % compose_tag) - if compose_tag in pkgset_koji_tags: - extra_builds = force_list(compose.conf.get("pkgset_koji_builds", [])) - else: - extra_builds = [] - pkgset = pungi.phases.pkgset.pkgsets.KojiPackageSet( - koji_wrapper, compose.conf["sigkeys"], logger=compose._logger, - arches=all_arches, packages=packages_to_gather, - allow_invalid_sigkeys=allow_invalid_sigkeys, - populate_only_packages=populate_only_packages_to_gather, - cache_region=compose.cache_region, - extra_builds=extra_builds) - if old_file_cache: - pkgset.set_old_file_cache(old_file_cache) - # Create a filename for log with package-to-tag mapping. The tag - # name is included in filename, so any slashes in it are replaced - # with underscores just to be safe. - logfile = compose.paths.log.log_file( - None, 'packages_from_%s' % compose_tag.replace('/', '_')) - is_traditional = compose_tag in compose.conf.get('pkgset_koji_tag', []) - should_inherit = inherit if is_traditional else inherit_modules - - # If we're processing a modular tag, we have an exact list of - # packages that will be used. This is basically a workaround for - # tagging working on build level, not rpm level. A module tag may - # build a package but not want it included. This should include - # only packages that are actually in modules. It's possible two - # module builds will use the same tag, particularly a -devel module - # is sharing a tag with its regular version. - # The ultimate goal of the mapping is to avoid a package built in modular - # tag to be used as a dependency of some non-modular package. - modular_packages = set() - for variant in compose.all_variants.values(): - for nsvc, modular_tag in variant.module_uid_to_koji_tag.items(): - if modular_tag != compose_tag: - # Not current tag, skip it - continue - for arch_modules in variant.arch_mmds.values(): - for rpm_nevra in arch_modules[nsvc].get_rpm_artifacts(): - nevra = parse_nvra(rpm_nevra) - modular_packages.add((nevra["name"], nevra["arch"])) - - pkgset.populate( - compose_tag, - event, - inherit=should_inherit, - logfile=logfile, - include_packages=modular_packages, - ) - for variant in compose.all_variants.values(): - if compose_tag in variant_tags[variant]: - - # If it's a modular tag, store the package set for the module. - for nsvc, koji_tag in variant.module_uid_to_koji_tag.items(): - if compose_tag == koji_tag: - variant.nsvc_to_pkgset[nsvc] = pkgset - - # Optimization for case where we have just single compose - # tag - we do not have to merge in this case... - if len(variant_tags[variant]) == 1: - variant.pkgset = pkgset - else: - variant.pkgset.fast_merge(pkgset) - # Optimization for case where we have just single compose - # tag - we do not have to merge in this case... - if len(compose_tags) == 1: - global_pkgset = pkgset - else: - global_pkgset.fast_merge(pkgset) - with open(global_pkgset_path, 'wb') as f: - data = pickle.dumps(global_pkgset, protocol=pickle.HIGHEST_PROTOCOL) - f.write(data) + global_pkgset_path = os.path.join( + compose.paths.work.topdir(arch="global"), "pkgset_global.pickle" + ) + with open(global_pkgset_path, "wb") as f: + data = pickle.dumps(global_pkgset, protocol=pickle.HIGHEST_PROTOCOL) + f.write(data) # write global package list global_pkgset.save_file_list( @@ -711,16 +710,12 @@ def get_koji_event_info(compose, koji_wrapper): event_file = os.path.join(compose.paths.work.topdir(arch="global"), "koji-event") msg = "Getting koji event" - if compose.DEBUG and os.path.exists(event_file): - compose.log_warning("[SKIP ] %s" % msg) - result = json.load(open(event_file, "r")) + result = get_koji_event_raw(koji_wrapper, compose.koji_event, event_file) + if compose.koji_event: + compose.log_info("Setting koji event to a custom value: %s" % compose.koji_event) else: - result = get_koji_event_raw(koji_wrapper, compose.koji_event, event_file) - if compose.koji_event: - compose.log_info("Setting koji event to a custom value: %s" % compose.koji_event) - else: - compose.log_info(msg) - compose.log_info("Koji event: %s" % result["id"]) + compose.log_info(msg) + compose.log_info("Koji event: %s" % result["id"]) return result diff --git a/pungi/phases/pkgset/sources/source_repos.py b/pungi/phases/pkgset/sources/source_repos.py index 2a491e4..06014b1 100644 --- a/pungi/phases/pkgset/sources/source_repos.py +++ b/pungi/phases/pkgset/sources/source_repos.py @@ -140,16 +140,14 @@ def populate_global_pkgset(compose, file_list, path_prefix): msg = "Populating the global package set from a file list" global_pkgset_path = os.path.join(compose.paths.work.topdir(arch="global"), "packages.pickle") - if compose.DEBUG and os.path.isfile(global_pkgset_path): - compose.log_warning("[SKIP ] %s" % msg) - with open(global_pkgset_path, "rb") as f: - pkgset = pickle.load(f) - else: - compose.log_info(msg) - pkgset = pungi.phases.pkgset.pkgsets.FilelistPackageSet(compose.conf["sigkeys"], logger=compose._logger, arches=ALL_ARCHES) - pkgset.populate(file_list) - with open(global_pkgset_path, "wb") as f: - pickle.dump(pkgset, f, protocol=pickle.HIGHEST_PROTOCOL) + + compose.log_info(msg) + pkgset = pungi.phases.pkgset.pkgsets.FilelistPackageSet( + compose.conf["sigkeys"], logger=compose._logger, arches=ALL_ARCHES + ) + pkgset.populate(file_list) + with open(global_pkgset_path, "wb") as f: + pickle.dump(pkgset, f, protocol=pickle.HIGHEST_PROTOCOL) # write global package list pkgset.save_file_list(compose.paths.work.package_list(arch="global"), remove_path_prefix=path_prefix) @@ -162,10 +160,6 @@ def write_pungi_config(compose, arch, variant, repos=None, comps_repo=None, pack pungi_cfg = compose.paths.work.pungi_conf(variant=variant, arch=arch) msg = "Writing pungi config (arch: %s, variant: %s): %s" % (arch, variant, pungi_cfg) - if compose.DEBUG and os.path.isfile(pungi_cfg): - compose.log_warning("[SKIP ] %s" % msg) - return - compose.log_info(msg) packages, grps = get_packages_to_gather(compose, arch, variant)