From 236459c63f0ff02016a935406efeafd23c49dd35 Mon Sep 17 00:00:00 2001 From: Martin Basti Date: Fri, 16 Aug 2024 12:57:11 +0200 Subject: [PATCH] fix(source-containers): all cachito metadata Exporting all cachito metadata caused failure in source contianer builds as it only expected 2 archives. STONEBLD-2655 Signed-off-by: Martin Basti --- atomic_reactor/plugins/fetch_sources.py | 71 ++++++++++++------------- tests/plugins/test_fetch_sources.py | 36 +++++++------ 2 files changed, 53 insertions(+), 54 deletions(-) diff --git a/atomic_reactor/plugins/fetch_sources.py b/atomic_reactor/plugins/fetch_sources.py index a55acf75d..b7aea8deb 100644 --- a/atomic_reactor/plugins/fetch_sources.py +++ b/atomic_reactor/plugins/fetch_sources.py @@ -313,55 +313,52 @@ def _process_multiple_remote_sources(self, koji_build, archives, remote_sources_ remote_json_map = {} remote_sources = koji_build['extra']['typeinfo']['remote-sources'] wrong_archives = False - all_archives = [] + + koji_archives = {archive['filename'] for archive in archives} + metadata_archives = set() for remote_s in remote_sources: remote_archive = None remote_json = None - if len(remote_s['archives']) != 2: - self.log.error('remote source "%s" does not contain 2 archives, but "%s"', - remote_s['name'], remote_s['archives']) + for archive in remote_s['archives']: + # ignore .env.json and .config.json + suffixes = Path(archive).suffixes + if suffixes == ['.json']: + remote_json = archive + elif suffixes == ['.tar', '.gz']: + remote_archive = archive + + if not remote_json: + self.log.error('remote source json, for remote source "%s" not found ' + 'in archives "%s"', remote_s['name'], remote_s['archives']) wrong_archives = True - else: - for archive in remote_s['archives']: - if archive.endswith('.json'): - remote_json = archive - else: - remote_archive = archive - - if not remote_json: - self.log.error('remote source json, for remote source "%s" not found ' - 'in archives "%s"', remote_s['name'], remote_s['archives']) - wrong_archives = True - else: - remote_source = {} - remote_source['url'] = os.path.join(remote_sources_path, remote_archive) - remote_source['dest'] = '-'.join([koji_build['nvr'], remote_archive]) - remote_sources_urls.append(remote_source) - cachito_json_url = os.path.join(remote_sources_path, remote_json) - remote_json_map[remote_source['dest']] = cachito_json_url - all_archives.append(remote_archive) - all_archives.append(remote_json) + + if not remote_archive: + self.log.error('remote source archive tar.gz, for remote source "%s" not found ' + 'in archives "%s"', remote_s['name'], remote_s['archives']) + wrong_archives = True + + if wrong_archives: + continue + + remote_source = {} + remote_source['url'] = os.path.join(remote_sources_path, remote_archive) + remote_source['dest'] = '-'.join([koji_build['nvr'], remote_archive]) + remote_sources_urls.append(remote_source) + cachito_json_url = os.path.join(remote_sources_path, remote_json) + remote_json_map[remote_source['dest']] = cachito_json_url + metadata_archives.add(remote_archive) + metadata_archives.add(remote_json) if wrong_archives: raise RuntimeError('Problems with archives in remote sources: {}'. format(remote_sources)) - extra_archives = [] - for archive in archives: - if archive['filename'] in all_archives: - all_archives.remove(archive['filename']) - else: - extra_archives.append(archive['filename']) - - if all_archives: + missing_archives = metadata_archives - koji_archives + if missing_archives: raise RuntimeError('Remote source files from metadata missing in koji ' - 'archives: {}'.format(all_archives)) - - if extra_archives: - raise RuntimeError('Remote source archives in koji missing from ' - 'metadata: {}'.format(extra_archives)) + 'archives: {}'.format(sorted(missing_archives))) return remote_sources_urls, remote_json_map diff --git a/tests/plugins/test_fetch_sources.py b/tests/plugins/test_fetch_sources.py index 3a31532cc..4baf21b4f 100644 --- a/tests/plugins/test_fetch_sources.py +++ b/tests/plugins/test_fetch_sources.py @@ -40,17 +40,22 @@ ALL_ARCHIVE_NAMES = ['remote-source-first.json', 'remote-source-first.tar.gz', 'remote-source-second.json', 'remote-source-second.tar.gz'] RS_TYPEINFO = [{'name': 'first', 'url': 'first_url', + # compat with older builds, only 2 archives 'archives': ['remote-source-first.json', 'remote-source-first.tar.gz']}, {'name': 'second', 'url': 'second_url', - 'archives': ['remote-source-second.json', 'remote-source-second.tar.gz']}] + 'archives': ['remote-source-second.json', 'remote-source-second.tar.gz', + 'remote-source-second.config.json', 'remote-source-second.env.json']}] RS_TYPEINFO_NO_JSON = [{'name': 'first', 'url': 'first_url', 'archives': ['remote-source-first.wrong', 'remote-source-first.tar.gz']}, {'name': 'second', 'url': 'second_url', 'archives': ['remote-source-second.bad', 'remote-source-second.tar.gz']}] -RS_TYPEINFO_NO_2 = [{'name': 'first', 'url': 'first_url', - 'archives': ['remote-source-first.tar.gz']}, - {'name': 'second', 'url': 'second_url', - 'archives': ['remote-source-second.tar.gz']}] +RS_TYPEINFO_NO_TAR_GZ = [{'name': 'first', 'url': 'first_url', + 'archives': ['remote-source-first.json', 'remote-source-first.config.json', + 'remote-source-first.env.json']}, + {'name': 'second', 'url': 'second_url', + 'archives': ['remote-source-second.json', + 'remote-source-second.config.json', + 'remote-source-second.env.json']}] KOJI_BUILD_GO_RPMS = {'build_id': 100, 'nvr': 'go_image-1-1', 'name': 'go_image', 'version': 1, 'release': 1, @@ -733,7 +738,9 @@ def test_go_sources(self, requests_mock, koji_session, workflow, source_dir): with open(os.path.join(sources_dir, f"{rpm['nvr']}.src.rpm"), 'rb') as f: assert f.read() == b'Source RPM' - @pytest.mark.parametrize('typeinfo_rs', (RS_TYPEINFO, RS_TYPEINFO_NO_JSON, RS_TYPEINFO_NO_2)) + @pytest.mark.parametrize('typeinfo_rs', ( + RS_TYPEINFO, RS_TYPEINFO_NO_JSON, RS_TYPEINFO_NO_TAR_GZ + )) @pytest.mark.parametrize('archives_in_koji', (4, 3, 5)) def test_fetch_sources_multiple_remote_sources(self, typeinfo_rs, archives_in_koji, workflow, source_dir, caplog, @@ -744,7 +751,7 @@ def test_fetch_sources_multiple_remote_sources(self, typeinfo_rs, archives_in_ko extra_archive = None all_archives = deepcopy(ALL_ARCHIVE_NAMES) should_fail = True - if typeinfo_rs == RS_TYPEINFO and archives_in_koji == 4: + if typeinfo_rs == RS_TYPEINFO and archives_in_koji >= 4: should_fail = False if archives_in_koji == 3: @@ -770,22 +777,17 @@ def test_fetch_sources_multiple_remote_sources(self, typeinfo_rs, archives_in_ko exc_message = "" caplog_message = "" - if typeinfo_rs == RS_TYPEINFO_NO_2: + if typeinfo_rs == RS_TYPEINFO_NO_TAR_GZ: exc_message = 'Problems with archives in remote sources: {}'.format(typeinfo_rs) - caplog_message = ' does not contain 2 archives, but ' + caplog_message = 'remote source archive tar.gz, for remote source ' elif typeinfo_rs == RS_TYPEINFO_NO_JSON: exc_message = 'Problems with archives in remote sources: {}'.format(typeinfo_rs) caplog_message = 'remote source json, for remote source ' - else: - if archives_in_koji == 5: - exc_message = 'Remote source archives in koji missing from ' \ - 'metadata: {}'.format([extra_archive]) - - elif archives_in_koji == 3: - exc_message = 'Remote source files from metadata missing in koji ' \ - 'archives: {}'.format([missing_archive]) + elif archives_in_koji == 3: + exc_message = 'Remote source files from metadata missing in koji ' \ + 'archives: {}'.format([missing_archive]) if should_fail: with pytest.raises(PluginFailedException) as exc: