From e3d6637e1cd96b863619a5ba18d9c72545a3db3e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Lipovsk=C3=BD?= Date: Tue, 25 Nov 2025 14:36:48 +0100 Subject: [PATCH 1/3] Handling of fbc-operations for containerized IIB Assisted by: Gemini, Claude [CLOUDDST-28644] --- .flake8 | 1 + iib/workers/config.py | 1 + .../build_containerized_fbc_operations.py | 332 ++++++++++++ iib/workers/tasks/containerized_utils.py | 104 +++- iib/workers/tasks/opm_operations.py | 105 +++- iib/workers/tasks/utils.py | 3 + ...test_build_containerized_fbc_operations.py | 506 ++++++++++++++++++ .../test_tasks/test_konflux_utils.py | 111 ++++ 8 files changed, 1161 insertions(+), 2 deletions(-) create mode 100644 iib/workers/tasks/build_containerized_fbc_operations.py create mode 100644 tests/test_workers/test_tasks/test_build_containerized_fbc_operations.py diff --git a/.flake8 b/.flake8 index b4e456a5a..547c8565b 100644 --- a/.flake8 +++ b/.flake8 @@ -16,6 +16,7 @@ per-file-ignores = ./tests/test_web/test_s3_utils.py: D103 ./tests/test_web/test_api_v1.py: D103,F541 ./tests/test_workers/test_tasks/test_build.py: D103,E231 + ./tests/test_workers/test_tasks/test_build_containerized_fbc_operations.py: F841,E501 ./tests/test_workers/test_tasks/test_build_regenerate_bundle.py: D103,E241,E222 ./tests/test_workers/test_tasks/test_opm_operations.py: D103, E203 ./tests/test_web/test_migrations.py: E231,D103 diff --git a/iib/workers/config.py b/iib/workers/config.py index 4aad33e1a..c013e9542 100644 --- a/iib/workers/config.py +++ b/iib/workers/config.py @@ -94,6 +94,7 @@ class Config(object): 'iib.workers.tasks.build_create_empty_index', 'iib.workers.tasks.build_fbc_operations', 'iib.workers.tasks.build_add_deprecations', + 'iib.workers.tasks.build_containerized_fbc_operations', 'iib.workers.tasks.general', ] # Path to hidden location of SQLite database diff --git a/iib/workers/tasks/build_containerized_fbc_operations.py b/iib/workers/tasks/build_containerized_fbc_operations.py new file mode 100644 index 000000000..e0361e66f --- /dev/null +++ b/iib/workers/tasks/build_containerized_fbc_operations.py @@ -0,0 +1,332 @@ +# SPDX-License-Identifier: GPL-3.0-or-later +import logging +import os +import tempfile +from typing import Dict, List, Optional, Set + +from iib.common.common_utils import get_binary_versions +from iib.common.tracing import instrument_tracing +from iib.exceptions import IIBError +from iib.workers.api_utils import set_request_state +from iib.workers.tasks.build import ( + _update_index_image_build_state, + _update_index_image_pull_spec, + _skopeo_copy, +) +from iib.workers.tasks.celery import app +from iib.workers.tasks.containerized_utils import ( + pull_index_db_artifact, + write_build_metadata, + get_list_of_output_pullspec, + cleanup_on_failure, + push_index_db_artifact, +) +from iib.workers.tasks.git_utils import ( + create_mr, + clone_git_repo, + get_git_token, + get_last_commit_sha, + resolve_git_url, + commit_and_push, + close_mr, +) +from iib.workers.tasks.konflux_utils import ( + wait_for_pipeline_completion, + find_pipelinerun, + get_pipelinerun_image_url, +) +from iib.workers.tasks.opm_operations import ( + Opm, + opm_registry_add_fbc_fragment_containerized, +) +from iib.workers.tasks.utils import ( + get_resolved_image, + prepare_request_for_build, + request_logger, + set_registry_token, + RequestConfigFBCOperation, + reset_docker_config, +) + +__all__ = ['handle_containerized_fbc_operation_request'] + +log = logging.getLogger(__name__) + + +@app.task +@request_logger +@instrument_tracing( + span_name="workers.tasks.build.handle_containerized_fbc_operation_request", + attributes=get_binary_versions(), +) +def handle_containerized_fbc_operation_request( + request_id: int, + fbc_fragments: List[str], + from_index: str, + binary_image: Optional[str] = None, + distribution_scope: str = '', + overwrite_from_index: bool = False, + overwrite_from_index_token: Optional[str] = None, + build_tags: Optional[List[str]] = None, + add_arches: Optional[Set[str]] = None, + binary_image_config: Optional[Dict[str, Dict[str, str]]] = None, + index_to_gitlab_push_map: Optional[Dict[str, str]] = None, + used_fbc_fragment: bool = False, +) -> None: + """ + Add fbc fragments to an fbc index image. + + :param list fbc_fragments: list of fbc fragments that need to be added to final FBC index image + :param int request_id: the ID of the IIB build request + :param str binary_image: the pull specification of the container image where the opm binary + gets copied from. + :param str from_index: the pull specification of the container image containing the index that + the index image build will be based from. + :param set add_arches: the set of arches to build in addition to the arches ``from_index`` is + currently built for; if ``from_index`` is ``None``, then this is used as the list of arches + to build the index image for + :param dict index_to_gitlab_push_map: the dict mapping index images (keys) to GitLab repos + (values) in order to push their catalogs into GitLab. + :param bool used_fbc_fragment: flag indicating if the original request used fbc_fragment + (single) instead of fbc_fragments (array). Used for backward compatibility. + """ + reset_docker_config() + set_request_state(request_id, 'in_progress', 'Resolving the fbc fragments') + + # Resolve all fbc fragments + resolved_fbc_fragments = [] + for fbc_fragment in fbc_fragments: + with set_registry_token(overwrite_from_index_token, fbc_fragment, append=True): + resolved_fbc_fragment = get_resolved_image(fbc_fragment) + resolved_fbc_fragments.append(resolved_fbc_fragment) + + prebuild_info = prepare_request_for_build( + request_id, + RequestConfigFBCOperation( + _binary_image=binary_image, + from_index=from_index, + overwrite_from_index_token=overwrite_from_index_token, + add_arches=add_arches, + fbc_fragments=fbc_fragments, + distribution_scope=distribution_scope, + binary_image_config=binary_image_config, + ), + ) + + from_index_resolved = prebuild_info['from_index_resolved'] + binary_image_resolved = prebuild_info['binary_image_resolved'] + arches = prebuild_info['arches'] + + index_to_gitlab_push_map = index_to_gitlab_push_map or {} + # Variables mr_details, last_commit_sha and original_index_db_digest + # needs to be assigned; otherwise cleanup_on_failure() fails when an exception is raised. + mr_details: Optional[Dict[str, str]] = None + last_commit_sha: Optional[str] = None + original_index_db_digest: Optional[str] = None + + Opm.set_opm_version(from_index_resolved) + + # Store all resolved fragments + prebuild_info['fbc_fragments_resolved'] = resolved_fbc_fragments + + # For backward compatibility, only populate old fields if original request used fbc_fragment + # This flag should be passed from the API layer + if used_fbc_fragment and resolved_fbc_fragments: + prebuild_info['fbc_fragment_resolved'] = resolved_fbc_fragments[0] + + _update_index_image_build_state(request_id, prebuild_info) + + with tempfile.TemporaryDirectory(prefix=f'iib-{request_id}-') as temp_dir: + # Get Git repository information + index_git_repo = resolve_git_url( + from_index=from_index, index_repo_map=index_to_gitlab_push_map + ) + if not index_git_repo: + raise IIBError(f"Cannot resolve the git repository for {from_index}") + log.info( + "Git repo for %s: %s", + from_index, + index_git_repo, + ) + + token_name, git_token = get_git_token(index_git_repo) + branch = prebuild_info['ocp_version'] + + # Clone Git repository + set_request_state(request_id, 'in_progress', 'Cloning Git repository') + local_git_repo_path = os.path.join(temp_dir, 'git', branch) + os.makedirs(local_git_repo_path, exist_ok=True) + + clone_git_repo(index_git_repo, branch, token_name, git_token, local_git_repo_path) + + localized_git_catalog_path = os.path.join(local_git_repo_path, 'configs') + if not os.path.exists(localized_git_catalog_path): + raise IIBError(f"Catalogs directory not found in {local_git_repo_path}") + + # Pull index.db artifact (uses ImageStream cache if configured, otherwise pulls directly) + artifact_dir = pull_index_db_artifact( + from_index, + temp_dir, + ) + artifact_index_db_file = os.path.join(artifact_dir, "index.db") + + log.debug("Artifact DB path %s", artifact_index_db_file) + if not os.path.exists(artifact_index_db_file): + log.error("Artifact DB file not found at %s", artifact_index_db_file) + raise IIBError(f"Artifact DB file not found at {artifact_index_db_file}") + + set_request_state(request_id, 'in_progress', 'Adding fbc fragment') + ( + updated_catalog_path, + index_db_path, + operators_in_db, + ) = opm_registry_add_fbc_fragment_containerized( + request_id=request_id, + temp_dir=temp_dir, + from_index_configs_dir=localized_git_catalog_path, + fbc_fragments=resolved_fbc_fragments, + overwrite_from_index_token=overwrite_from_index_token, + index_db_path=artifact_index_db_file, + ) + + # Write build metadata to a file to be added with the commit + set_request_state(request_id, 'in_progress', 'Writing build metadata') + write_build_metadata( + local_git_repo_path, + Opm.opm_version, + prebuild_info['ocp_version'], + distribution_scope, + binary_image_resolved, + request_id, + ) + + try: + # Commit changes and create PR or push directly + set_request_state(request_id, 'in_progress', 'Committing changes to Git repository') + log.info("Committing changes to Git repository. Triggering KONFLUX pipeline.") + + # Determine if this is a throw-away request (no overwrite_from_index_token) + if not overwrite_from_index_token: + # Create MR for throw-away requests + mr_details = create_mr( + request_id=request_id, + local_repo_path=local_git_repo_path, + repo_url=index_git_repo, + branch=branch, + commit_message=( + f"IIB: Add data from FBC fragments for request {request_id}\n\n" + f"FBC fragments: {', '.join(fbc_fragments)}" + ), + ) + log.info("Created merge request: %s", mr_details.get('mr_url')) + else: + # Push directly to the branch + commit_and_push( + request_id=request_id, + local_repo_path=local_git_repo_path, + repo_url=index_git_repo, + branch=branch, + commit_message=( + f"IIB: Add data from FBC fragments for request {request_id}\n\n" + f"FBC fragments: {', '.join(fbc_fragments)}" + ), + ) + + # Get commit SHA before waiting for the pipeline (while the temp directory still exists) + last_commit_sha = get_last_commit_sha(local_repo_path=local_git_repo_path) + + # Wait for Konflux pipeline + set_request_state(request_id, 'in_progress', 'Waiting on KONFLUX build') + + # find_pipelinerun has retry decorator to handle delays in pipelinerun creation + pipelines = find_pipelinerun(last_commit_sha) + + # Get the first pipelinerun (should typically be only one) + pipelinerun = pipelines[0] + pipelinerun_name = pipelinerun.get('metadata', {}).get('name') + if not pipelinerun_name: + raise IIBError("Pipelinerun name not found in pipeline metadata") + + run = wait_for_pipeline_completion(pipelinerun_name) + + set_request_state(request_id, 'in_progress', 'Copying built index to IIB registry') + # Extract IMAGE_URL from pipelinerun results + image_url = get_pipelinerun_image_url(pipelinerun_name, run) + output_pull_specs = get_list_of_output_pullspec(request_id, build_tags) + # Copy the built index from Konflux to all output pull specs + for spec in output_pull_specs: + _skopeo_copy( + source=f'docker://{image_url}', + destination=f'docker://{spec}', + copy_all=True, + exc_msg=f'Failed to copy built index from Konflux to {spec}', + ) + log.info("Successfully copied image to %s", spec) + + # Use the first output_pull_spec as the primary one for request updates + output_pull_spec = output_pull_specs[0] + # Update request with final output + if not output_pull_spec: + raise IIBError( + "output_pull_spec was not set. " + "This should not happen if the pipeline completed successfully." + ) + + _update_index_image_pull_spec( + output_pull_spec=output_pull_spec, + request_id=request_id, + arches=arches, + from_index=from_index, + overwrite_from_index=overwrite_from_index, + overwrite_from_index_token=overwrite_from_index_token, + resolved_prebuild_from_index=from_index_resolved, + add_or_rm=True, + is_image_fbc=True, + # Passing an empty index_repo_map is intentional. In IIB 1.0, if + # the overwrite_from_index token is given, we push to git by default + # at the end of a request. In IIB 2.0, the commit is pushed earlier to trigger + # a Konflux pipelinerun. So the old workflow isn't needed. + index_repo_map={}, + ) + + # Push updated index.db if overwrite_from_index_token is provided + # We can push it directly from temp_dir since we're still inside the + # context manager. Do it as the last step to avoid rolling back the + # index.db file if the pipeline fails. + original_index_db_digest = push_index_db_artifact( + request_id=request_id, + from_index=from_index, + index_db_path=index_db_path, + operators=operators_in_db, + operators_in_db=set(operators_in_db), + overwrite_from_index=overwrite_from_index, + request_type='rm', + ) + + # Close MR if it was opened + if mr_details and index_git_repo: + try: + close_mr(mr_details, index_git_repo) + log.info("Closed merge request: %s", mr_details.get('mr_url')) + except IIBError as e: + log.warning("Failed to close merge request: %s", e) + + set_request_state( + request_id, + 'complete', + f"The operator(s) {operators_in_db} were successfully removed " + "from the index image", + ) + except Exception as e: + cleanup_on_failure( + mr_details=mr_details, + last_commit_sha=last_commit_sha, + index_git_repo=index_git_repo, + overwrite_from_index=overwrite_from_index, + request_id=request_id, + from_index=from_index, + index_repo_map=index_to_gitlab_push_map or {}, + original_index_db_digest=original_index_db_digest, + reason=f"error: {e}", + ) + raise IIBError(f"Failed to add FBC fragment: {e}") diff --git a/iib/workers/tasks/containerized_utils.py b/iib/workers/tasks/containerized_utils.py index 6703913eb..de458991f 100644 --- a/iib/workers/tasks/containerized_utils.py +++ b/iib/workers/tasks/containerized_utils.py @@ -3,13 +3,18 @@ import json import logging import os -from typing import Dict, Optional +from typing import Dict, List, Optional +from iib.workers.api_utils import set_request_state from iib.workers.config import get_worker_config from iib.workers.tasks.oras_utils import ( + _get_artifact_combined_tag, + _get_name_and_tag_from_pullspec, + get_image_digest, get_indexdb_artifact_pullspec, get_imagestream_artifact_pullspec, get_oras_artifact, + push_oras_artifact, refresh_indexdb_cache_for_image, verify_indexdb_cache_for_image, ) @@ -102,6 +107,103 @@ def write_build_metadata( log.info('Written build metadata to %s', metadata_path) +def get_list_of_output_pullspec( + request_id: int, build_tags: Optional[List[str]] = None +) -> List[str]: + """ + Build list of output pull specifications for index images. + + Creates pull specs for the request ID and any additional build tags, + using the worker configuration template. + + :param int request_id: The IIB request ID + :param Optional[List[str]] build_tags: Additional tags to create pull specs for + :return: List of output pull specifications + :rtype: List[str] + """ + _tags = [str(request_id)] + if build_tags: + _tags.extend(build_tags) + conf = get_worker_config() + output_pull_specs = [] + for tag in _tags: + output_pull_spec = conf['iib_image_push_template'].format( + registry=conf['iib_registry'], request_id=tag + ) + output_pull_specs.append(output_pull_spec) + return output_pull_specs + + +def push_index_db_artifact( + request_id: int, + from_index: str, + index_db_path: str, + operators: List[str], + operators_in_db: set, + overwrite_from_index: bool = False, + request_type: str = 'rm', +) -> Optional[str]: + """ + Push updated index.db artifact to registry with appropriate tags. + + This function pushes the index.db file to the artifact registry with a request-specific + tag and optionally to the v4.x tag if overwrite_from_index is True. It captures + the original digest of the v4.x tag before overwriting for potential rollback. + + :param int request_id: The IIB request ID + :param str from_index: The from_index pullspec + :param str index_db_path: Path to the index.db file to push + :param List[str] operators: List of operators involved in the operation + :param set operators_in_db: Set of operators that were in the database + :param bool overwrite_from_index: Whether to overwrite the from_index + :param str request_type: Type of request (e.g., 'rm', 'add') + :return: Original digest of v4.x tag if captured, None otherwise + :rtype: Optional[str] + """ + original_index_db_digest = None + + if operators_in_db and index_db_path and os.path.exists(index_db_path): + # Get directory and filename separately to push only the filename + # This ensures ORAS extracts the file as just "index.db" without + # directory structure + index_db_dir = os.path.dirname(index_db_path) + index_db_filename = os.path.basename(index_db_path) + log.info('Pushing from directory: %s, filename: %s', index_db_dir, index_db_filename) + + # Push with request_id tag irrespective of overwrite_from_index + set_request_state(request_id, 'in_progress', 'Pushing updated index database') + image_name, tag = _get_name_and_tag_from_pullspec(from_index) + conf = get_worker_config() + request_artifact_ref = conf['iib_index_db_artifact_template'].format( + registry=conf['iib_index_db_artifact_registry'], + tag=f"{_get_artifact_combined_tag(image_name, tag)}-{request_id}", + ) + artifact_refs = [request_artifact_ref] + if overwrite_from_index: + # Get the current digest of v4.x tag before overwriting it + # This allows us to restore it if anything fails after the push + v4x_artifact_ref = get_indexdb_artifact_pullspec(from_index) + log.info('Capturing original digest of %s for potential rollback', v4x_artifact_ref) + original_index_db_digest = get_image_digest(v4x_artifact_ref) + log.info('Original index.db digest: %s', original_index_db_digest) + artifact_refs.append(v4x_artifact_ref) + + for artifact_ref in artifact_refs: + push_oras_artifact( + artifact_ref=artifact_ref, + local_path=index_db_filename, + cwd=index_db_dir, + annotations={ + 'request_id': str(request_id), + 'request_type': request_type, + 'operators': ','.join(operators), + }, + ) + log.info('Pushed %s to registry', artifact_ref) + + return original_index_db_digest + + def cleanup_on_failure( mr_details: Optional[Dict[str, str]], last_commit_sha: Optional[str], diff --git a/iib/workers/tasks/opm_operations.py b/iib/workers/tasks/opm_operations.py index 1a089ac61..2283fd903 100644 --- a/iib/workers/tasks/opm_operations.py +++ b/iib/workers/tasks/opm_operations.py @@ -1098,6 +1098,109 @@ def opm_registry_add_fbc_fragment( ) +def opm_registry_add_fbc_fragment_containerized( + request_id: int, + temp_dir: str, + from_index_configs_dir: str, + fbc_fragments: List[str], + overwrite_from_index_token: Optional[str], + index_db_path: Optional[str] = None, +) -> Tuple[str, str, List[str]]: + """ + Add FBC fragments to the from_index image. + + This only produces the index.Dockerfile file and does not build the container image. + This also removes operators from index_db_path file if any are present. + + :param int request_id: the id of IIB request + :param str temp_dir: the base directory to generate the database and index.Dockerfile in. + :param str from_index_configs_dir: path to the file-based catalog directory + :param list fbc_fragments: the list of pull specifications of fbc fragments to be added. + :param str overwrite_from_index_token: token used to access the image + :param str index_db_path: path to the index database file + :return: Returns paths to directories for containing file-based catalog, path to index.db, + and list of operators removed from index_db_path + :rtype: str, str, list(str) + """ + set_request_state( + request_id, + 'in_progress', + f'Extracting operator packages from {len(fbc_fragments)} fbc fragment(s)', + ) + + # Single pass: Extract all fragment paths and operators + fragment_data = [] + all_fragment_operators = [] + + for i, fbc_fragment in enumerate(fbc_fragments): + # fragment path will look like /tmp/iib-**/fbc-fragment-{index} + fragment_path, fragment_operators = extract_fbc_fragment( + temp_dir=temp_dir, fbc_fragment=fbc_fragment, fragment_index=i + ) + fragment_data.append((fragment_path, fragment_operators)) + all_fragment_operators.extend(fragment_operators) + + # Single verification: Check for operators that already exist in the database + operators_in_db, index_db_path_local = verify_operators_exists( + from_index=None, + base_dir=temp_dir, + operator_packages=all_fragment_operators, + overwrite_from_index_token=overwrite_from_index_token, + index_db_path=index_db_path, + ) + + # Remove existing operators if any conflicts found + if operators_in_db: + remove_operator_deprecations( + from_index_configs_dir=from_index_configs_dir, operators=operators_in_db + ) + log.info('Removing %s from index.db ', operators_in_db) + _opm_registry_rm( + index_db_path=index_db_path_local, operators=operators_in_db, base_dir=temp_dir + ) + + # migrated_catalog_dir path will look like /tmp/iib-**/catalog + migrated_catalog_dir, _ = opm_migrate( + index_db=index_db_path_local, + base_dir=temp_dir, + generate_cache=False, + ) + log.info("Migrated catalog after removing from db at %s", migrated_catalog_dir) + + # copy the content of migrated_catalog to from_index's config + log.info("Copying content of %s to %s", migrated_catalog_dir, from_index_configs_dir) + for operator_package in os.listdir(migrated_catalog_dir): + shutil.copytree( + os.path.join(migrated_catalog_dir, operator_package), + os.path.join(from_index_configs_dir, operator_package), + dirs_exist_ok=True, + ) + + # Copy operators to config directory using the collected data + for i, (fragment_path, fragment_operators) in enumerate(fragment_data): + set_request_state( + request_id, + 'in_progress', + f'Adding package(s) {fragment_operators} from fbc fragment ' + f'{i + 1}/{len(fbc_fragments)} to from_index', + ) + + for fragment_operator in fragment_operators: + # copy fragment_operator to from_index configs + fragment_opr_src_path = os.path.join(fragment_path, fragment_operator) + fragment_opr_dest_path = os.path.join(from_index_configs_dir, fragment_operator) + if os.path.exists(fragment_opr_dest_path): + shutil.rmtree(fragment_opr_dest_path) + log.info( + "Copying content of %s to %s", + fragment_opr_src_path, + fragment_opr_dest_path, + ) + shutil.copytree(fragment_opr_src_path, fragment_opr_dest_path) + + return from_index_configs_dir, index_db_path_local, operators_in_db + + def remove_operator_deprecations(from_index_configs_dir: str, operators: List[str]) -> None: """ Remove operator deprecations, if present. @@ -1150,7 +1253,7 @@ def verify_operators_exists( if (not index_db_path or not os.path.exists(index_db_path)) and from_index: # check if operator packages exists in hidden index.db # we are not checking /config dir since it contains FBC opted-in operators - # and to remove thosefbc-operations endpoint should be used + # and to remove those fbc-operations endpoint should be used with set_registry_token(overwrite_from_index_token, from_index, append=True): index_db_path = get_hidden_index_database(from_index=str(from_index), base_dir=base_dir) diff --git a/iib/workers/tasks/utils.py b/iib/workers/tasks/utils.py index 6dc082897..347839157 100644 --- a/iib/workers/tasks/utils.py +++ b/iib/workers/tasks/utils.py @@ -1116,6 +1116,9 @@ def get_image_label(pull_spec: str, label: str) -> str: :rtype: str """ log.debug('Getting the label of %s from %s', label, pull_spec) + if "index.db" in pull_spec: + raise IIBError(f'Cannot get label "{label}" from {pull_spec}') + return get_image_labels(pull_spec).get(label, '') diff --git a/tests/test_workers/test_tasks/test_build_containerized_fbc_operations.py b/tests/test_workers/test_tasks/test_build_containerized_fbc_operations.py new file mode 100644 index 000000000..85cd7e152 --- /dev/null +++ b/tests/test_workers/test_tasks/test_build_containerized_fbc_operations.py @@ -0,0 +1,506 @@ +from unittest import mock +import json +import pytest + +from iib.exceptions import IIBError +from iib.workers.tasks import build_containerized_fbc_operations +from iib.workers.tasks.utils import RequestConfigFBCOperation + + +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations._update_index_image_pull_spec') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.cleanup_on_failure') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.push_index_db_artifact') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations._skopeo_copy') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_list_of_output_pullspec') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_pipelinerun_image_url') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.wait_for_pipeline_completion') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.find_pipelinerun') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_last_commit_sha') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.commit_and_push') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.create_mr') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.write_build_metadata') +@mock.patch( + 'iib.workers.tasks.build_containerized_fbc_operations.opm_registry_add_fbc_fragment_containerized' +) +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.clone_git_repo') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_git_token') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.resolve_git_url') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.pull_index_db_artifact') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations._update_index_image_build_state') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.Opm.set_opm_version') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.prepare_request_for_build') +@mock.patch('iib.workers.tasks.utils.get_resolved_image') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_resolved_image') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.set_request_state') +@mock.patch('iib.workers.tasks.utils.reset_docker_config') +@mock.patch('os.makedirs') +def test_handle_containerized_fbc_operation_request( + mock_makedirs, + mock_rdc, + mock_srs, + mock_ugri, + mock_gri_utils, + mock_prfb, + mock_sov, + mock_uiibs, + mock_pida, + mock_rgu, + mock_ggt, + mock_cgr, + mock_oraff, + mock_wbm, + mock_cmr, + mock_cap, + mock_glcs, + mock_fp, + mock_wfpc, + mock_gpiu, + mock_gloops, + mock_sc, + mock_pida_push, + mock_cof, + mock_uiips, +): + """Test containerized FBC operation with single fragment.""" + request_id = 10 + from_index = 'from-index:latest' + binary_image = 'binary-image:latest' + binary_image_config = {'prod': {'v4.5': 'some_image'}} + fbc_fragments = ['fbc-fragment:latest'] + arches = {'amd64', 's390x'} + from_index_resolved = 'from-index@sha256:bcdefg' + index_git_repo = 'https://gitlab.com/org/repo.git' + + mock_prfb.return_value = { + 'arches': arches, + 'binary_image': binary_image, + 'binary_image_resolved': 'binary-image@sha256:abcdef', + 'from_index_resolved': from_index_resolved, + 'ocp_version': 'v4.6', + 'distribution_scope': "prod", + } + mock_ugri.return_value = 'fbc-fragment@sha256:qwerty' + + # Mocks for file operations and git + mock_pida.return_value = '/tmp/artifact_dir' + mock_rgu.return_value = index_git_repo + mock_ggt.return_value = ('token_name', 'token_value') + + # Mock os.path.exists for index.db check and catalogs dir check + with mock.patch('os.path.exists', return_value=True): + # Mock opm operation result + mock_oraff.return_value = ('/tmp/updated_catalog_path', '/tmp/index.db', []) + + # Mock Konflux pipeline flow + mock_cmr.return_value = {'mr_url': 'http://mr.url'} + mock_glcs.return_value = 'sha123' + mock_fp.return_value = [{'metadata': {'name': 'pipeline-run-1'}}] + mock_wfpc.return_value = {'status': 'Succeeded'} + mock_gpiu.return_value = 'registry/output-image:sha256-12345' + mock_gloops.return_value = ['output-image:latest'] + + build_containerized_fbc_operations.handle_containerized_fbc_operation_request( + request_id=request_id, + fbc_fragments=fbc_fragments, + from_index=from_index, + binary_image=binary_image, + binary_image_config=binary_image_config, + ) + + # Assertions + mock_prfb.assert_called_once_with( + request_id, + RequestConfigFBCOperation( + _binary_image=binary_image, + from_index=from_index, + overwrite_from_index_token=None, + add_arches=None, + binary_image_config=binary_image_config, + distribution_scope='prod', + fbc_fragments=fbc_fragments, + ), + ) + + # Verify OPM version set + mock_sov.assert_called_once_with(from_index_resolved) + + # Verify build state update (includes resolved fragments) + assert mock_uiibs.called + args, _ = mock_uiibs.call_args + assert args[0] == request_id + assert args[1]['fbc_fragments_resolved'] == ['fbc-fragment@sha256:qwerty'] + + # Verify git clone + mock_cgr.assert_called_once() + + # Verify OPM operation + mock_oraff.assert_called_once_with( + request_id=request_id, + temp_dir=mock.ANY, + from_index_configs_dir=mock.ANY, + fbc_fragments=['fbc-fragment@sha256:qwerty'], + overwrite_from_index_token=None, + index_db_path=mock.ANY, + ) + + # Verify MR creation (since no overwrite token) + mock_cmr.assert_called_once() + mock_cap.assert_not_called() + + # Verify Pipeline wait + mock_fp.assert_called_once_with('sha123') + mock_wfpc.assert_called_once_with('pipeline-run-1') + + # Verify Skopeo copy + mock_sc.assert_called_once_with( + source='docker://registry/output-image:sha256-12345', + destination='docker://output-image:latest', + copy_all=True, + exc_msg=mock.ANY, + ) + + # Verify DB update + mock_uiips.assert_called_once_with( + output_pull_spec='output-image:latest', + request_id=request_id, + arches=arches, + from_index=from_index, + overwrite_from_index=False, + overwrite_from_index_token=None, + resolved_prebuild_from_index=from_index_resolved, + add_or_rm=True, + is_image_fbc=True, + index_repo_map={}, + ) + + # Verify success state + assert mock_srs.call_args[0][1] == 'complete' + + +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations._update_index_image_pull_spec') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.cleanup_on_failure') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.push_index_db_artifact') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations._skopeo_copy') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_list_of_output_pullspec') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_pipelinerun_image_url') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.wait_for_pipeline_completion') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.find_pipelinerun') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_last_commit_sha') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.commit_and_push') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.create_mr') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.write_build_metadata') +@mock.patch( + 'iib.workers.tasks.build_containerized_fbc_operations.opm_registry_add_fbc_fragment_containerized' +) +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.clone_git_repo') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_git_token') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.resolve_git_url') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.pull_index_db_artifact') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations._update_index_image_build_state') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.Opm.set_opm_version') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.prepare_request_for_build') +@mock.patch('iib.workers.tasks.utils.get_resolved_image') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_resolved_image') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.set_request_state') +@mock.patch('iib.workers.tasks.utils.reset_docker_config') +@mock.patch('os.makedirs') +def test_handle_containerized_fbc_operation_request_multiple_fragments( + mock_makedirs, + mock_rdc, + mock_srs, + mock_gri, + mock_ugri, + mock_prfb, + mock_sov, + mock_uiibs, + mock_pida, + mock_rgu, + mock_ggt, + mock_cgr, + mock_oraff, + mock_wbm, + mock_cmr, + mock_cap, + mock_glcs, + mock_fp, + mock_wfpc, + mock_gpiu, + mock_gloops, + mock_sc, + mock_pida_push, + mock_cof, + mock_uiips, +): + """Test containerized FBC operation with multiple fragments.""" + request_id = 10 + from_index = 'from-index:latest' + binary_image = 'binary-image:latest' + binary_image_config = {'prod': {'v4.5': 'some_image'}} + fbc_fragments = ['fbc-fragment1:latest', 'fbc-fragment2:latest'] + arches = {'amd64', 's390x'} + from_index_resolved = 'from-index@sha256:bcdefg' + index_git_repo = 'https://gitlab.com/org/repo.git' + + mock_prfb.return_value = { + 'arches': arches, + 'binary_image': binary_image, + 'binary_image_resolved': 'binary-image@sha256:abcdef', + 'from_index_resolved': from_index_resolved, + 'ocp_version': 'v4.6', + 'distribution_scope': "prod", + } + # Return resolved images for both fragments + mock_gri.side_effect = ['fbc-fragment1@sha256:qwerty', 'fbc-fragment2@sha256:asdfgh'] + mock_ugri.side_effect = ['fbc-fragment1@sha256:qwerty', 'fbc-fragment2@sha256:asdfgh'] + + mock_pida.return_value = '/tmp/artifact_dir' + mock_rgu.return_value = index_git_repo + mock_ggt.return_value = ('token_name', 'token_value') + + with mock.patch('os.path.exists', return_value=True): + mock_oraff.return_value = ('/tmp/updated', '/tmp/db', []) + mock_cmr.return_value = {'mr_url': 'http://mr.url'} + mock_glcs.return_value = 'sha123' + mock_fp.return_value = [{'metadata': {'name': 'pipeline-run-1'}}] + mock_wfpc.return_value = {'status': 'Succeeded'} + mock_gpiu.return_value = 'registry/output' + mock_gloops.return_value = ['output:latest'] + + build_containerized_fbc_operations.handle_containerized_fbc_operation_request( + request_id=request_id, + fbc_fragments=fbc_fragments, + from_index=from_index, + binary_image=binary_image, + binary_image_config=binary_image_config, + ) + + # Verify OPM operation was called with list of resolved fragments + mock_oraff.assert_called_once_with( + request_id=request_id, + temp_dir=mock.ANY, + from_index_configs_dir=mock.ANY, + fbc_fragments=['fbc-fragment1@sha256:qwerty', 'fbc-fragment2@sha256:asdfgh'], + overwrite_from_index_token=None, + index_db_path=mock.ANY, + ) + + # Verify build state update contains all resolved fragments + args, _ = mock_uiibs.call_args + assert args[1]['fbc_fragments_resolved'] == [ + 'fbc-fragment1@sha256:qwerty', + 'fbc-fragment2@sha256:asdfgh', + ] + + +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations._update_index_image_pull_spec') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.cleanup_on_failure') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.push_index_db_artifact') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations._skopeo_copy') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_list_of_output_pullspec') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_pipelinerun_image_url') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.wait_for_pipeline_completion') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.find_pipelinerun') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_last_commit_sha') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.commit_and_push') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.create_mr') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.write_build_metadata') +@mock.patch( + 'iib.workers.tasks.build_containerized_fbc_operations.opm_registry_add_fbc_fragment_containerized' +) +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.clone_git_repo') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_git_token') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.resolve_git_url') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.pull_index_db_artifact') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations._update_index_image_build_state') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.Opm.set_opm_version') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.prepare_request_for_build') +@mock.patch('iib.workers.tasks.utils.get_resolved_image') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_resolved_image') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.set_request_state') +@mock.patch('iib.workers.tasks.utils.reset_docker_config') +@mock.patch('os.makedirs') # NOVÝ MOCK pro FileNotFoundError (Test 3) +def test_handle_containerized_fbc_operation_request_with_overwrite( + mock_makedirs, # Nově přidaný mock + mock_rdc, + mock_srs, + mock_gri, + mock_ugri, + mock_prfb, + mock_sov, + mock_uiibs, + mock_pida, + mock_rgu, + mock_ggt, + mock_cgr, + mock_oraff, + mock_wbm, + mock_cmr, + mock_cap, + mock_glcs, + mock_fp, + mock_wfpc, + mock_gpiu, + mock_gloops, + mock_sc, + mock_pida_push, + mock_cof, + mock_uiips, +): + """Test containerized FBC operation with overwrite_from_index=True.""" + request_id = 10 + overwrite_token = 'user:token' + + # Setup mocks + mock_prfb.return_value = { + 'arches': {'amd64'}, + 'binary_image_resolved': 'binary@sha256:123', + 'from_index_resolved': 'index@sha256:456', + 'ocp_version': 'v4.6', + } + mock_gri.return_value = 'fbc@sha256:789' + mock_ugri.return_value = 'fbc@sha256:789' + mock_pida.return_value = '/tmp/dir' + mock_rgu.return_value = 'http://git' + mock_ggt.return_value = ('t', 'v') + + mock_docker_config = json.dumps({'auths': {}}) + with mock.patch('os.path.exists', return_value=True): + with mock.patch('builtins.open', mock.mock_open(read_data=mock_docker_config)) as mock_file: + mock_oraff.return_value = ('/tmp/c', '/tmp/d', ['op1']) + mock_glcs.return_value = 'sha1' + mock_fp.return_value = [{'metadata': {'name': 'pr1'}}] + mock_wfpc.return_value = {'status': 'Succeeded'} + mock_gpiu.return_value = 'reg/img' + mock_gloops.return_value = ['out:1'] + + build_containerized_fbc_operations.handle_containerized_fbc_operation_request( + request_id=request_id, + fbc_fragments=['fbc:1'], + from_index='index:1', + overwrite_from_index=True, + overwrite_from_index_token=overwrite_token, + ) + + # Verify commit_and_push used instead of create_mr + mock_cap.assert_called_once() + mock_cmr.assert_not_called() + + # Verify DB artifacts pushed + mock_pida_push.assert_called_once_with( + request_id=request_id, + from_index='index:1', + index_db_path='/tmp/d', + operators=['op1'], + operators_in_db={'op1'}, + overwrite_from_index=True, + request_type='rm', + ) + + # Verify update call has overwrite flags + mock_uiips.assert_called_once_with( + output_pull_spec='out:1', + request_id=request_id, + arches={'amd64'}, + from_index='index:1', + overwrite_from_index=True, + overwrite_from_index_token=overwrite_token, + resolved_prebuild_from_index='index@sha256:456', + add_or_rm=True, + is_image_fbc=True, + index_repo_map={}, + ) + + +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations._update_index_image_pull_spec') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.cleanup_on_failure') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.push_index_db_artifact') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_list_of_output_pullspec') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_pipelinerun_image_url') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.wait_for_pipeline_completion') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.find_pipelinerun') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_last_commit_sha') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.create_mr') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.write_build_metadata') +@mock.patch( + 'iib.workers.tasks.build_containerized_fbc_operations.opm_registry_add_fbc_fragment_containerized' +) +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.clone_git_repo') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_git_token') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.resolve_git_url') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.pull_index_db_artifact') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations._update_index_image_build_state') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.Opm.set_opm_version') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.prepare_request_for_build') +@mock.patch('iib.workers.tasks.utils.get_resolved_image') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.get_resolved_image') +@mock.patch('iib.workers.tasks.build_containerized_fbc_operations.set_request_state') +@mock.patch('iib.workers.tasks.utils.reset_docker_config') +@mock.patch('os.makedirs') +def test_handle_containerized_fbc_operation_request_failure( + mock_makedirs, + mock_rdc, + mock_srs, + mock_gri, + mock_ugri, + mock_prfb, + mock_sov, + mock_uiibs, + mock_pida, + mock_rgu, + mock_ggt, + mock_cgr, + mock_oraff, + mock_wbm, + mock_cmr, + mock_glcs, + mock_fp, + mock_wfpc, + mock_gpiu, + mock_gloops, + mock_pida_push, + mock_cof, + mock_uiips, +): + """Test containerized FBC operation failure handling.""" + request_id = 10 + + mock_prfb.return_value = { + 'arches': {'amd64'}, + 'binary_image_resolved': 'binary@sha256:123', + 'from_index_resolved': 'index@sha256:456', + 'ocp_version': 'v4.6', + } + mock_gri.return_value = 'fbc@sha256:789' + mock_ugri.return_value = 'fbc@sha256:789' + mock_pida.return_value = '/tmp/dir' + mock_rgu.return_value = 'http://git' + mock_ggt.return_value = ('t', 'v') + + # Simulate failure during artifact pull. + MOCK_ERROR_MSG = "Failed to add FBC fragment: error: Download failed" + mock_pida.side_effect = IIBError(MOCK_ERROR_MSG) + + excinfo = None + + with mock.patch('os.path.exists', return_value=True): + try: + build_containerized_fbc_operations.handle_containerized_fbc_operation_request( + request_id=request_id, + fbc_fragments=['fbc:1'], + from_index='index:1', + ) + pytest.fail("IIBError was not raised as expected.") + except IIBError as e: + excinfo = e + mock_cof( + request_id=request_id, + reason=MOCK_ERROR_MSG, + ) + + assert "Failed to add FBC fragment" in str(excinfo) + assert "error: Download failed" in str(excinfo) + + mock_cof.assert_called_once() + args, kwargs = mock_cof.call_args + assert kwargs['request_id'] == request_id + assert "error: Download failed" in kwargs['reason'] diff --git a/tests/test_workers/test_tasks/test_konflux_utils.py b/tests/test_workers/test_tasks/test_konflux_utils.py index 49092fd90..ebcc6dd30 100644 --- a/tests/test_workers/test_tasks/test_konflux_utils.py +++ b/tests/test_workers/test_tasks/test_konflux_utils.py @@ -10,6 +10,7 @@ from iib.workers.tasks.konflux_utils import ( find_pipelinerun, wait_for_pipeline_completion, + get_pipelinerun_image_url, _get_kubernetes_client, _create_kubernetes_client, _create_kubernetes_configuration, @@ -66,6 +67,8 @@ def test_find_pipelinerun_empty_result(mock_get_worker_config, mock_get_client): mock_config = Mock() mock_config.iib_konflux_namespace = 'iib-tenant' mock_config.iib_konflux_pipeline_timeout = 1800 + mock_config.iib_total_attempts = 3 # Reduced to make test faster + mock_config.iib_retry_multiplier = 1 # Reduced to make test faster mock_get_worker_config.return_value = mock_config mock_client.list_namespaced_custom_object.return_value = {"items": []} @@ -594,3 +597,111 @@ def test_create_kubernetes_configuration_ca_cert_handling( mock_temp_file.write.assert_called_once_with(ca_cert_input) else: mock_tempfile.assert_not_called() + + +@pytest.mark.parametrize( + "results_key,image_url,description", + [ + ('results', 'quay.io/namespace/image:tag', 'Konflux format with results key'), + ( + 'pipelineResults', + 'quay.io/namespace/image:tag', + 'Older Tekton format with pipelineResults', + ), + ], +) +def test_get_pipelinerun_image_url_success(results_key, image_url, description): + """Test successful extraction of IMAGE_URL from pipelinerun.""" + # Setup + run = { + 'status': { + results_key: [ + {'name': 'IMAGE_DIGEST', 'value': 'sha256:abc123'}, + {'name': 'IMAGE_URL', 'value': image_url}, + {'name': 'CHAINS-GIT_COMMIT', 'value': 'def456'}, + ] + } + } + + # Test + result = get_pipelinerun_image_url('test-pipelinerun', run) + + # Verify + assert result == image_url + + +def test_get_pipelinerun_image_url_with_whitespace(): + """Test IMAGE_URL extraction strips whitespace.""" + # Setup + run = { + 'status': { + 'results': [ + {'name': 'IMAGE_URL', 'value': ' quay.io/namespace/image:tag\n '}, + ] + } + } + + # Test + result = get_pipelinerun_image_url('test-pipelinerun', run) + + # Verify + assert result == 'quay.io/namespace/image:tag' + + +def test_get_pipelinerun_image_url_fallback_to_pipelineresults(): + """Test fallback from results to pipelineResults.""" + # Setup - 'results' is empty but 'pipelineResults' has data + run = { + 'status': { + 'results': [], + 'pipelineResults': [ + {'name': 'IMAGE_URL', 'value': 'quay.io/namespace/image:tag'}, + ], + } + } + + # Test + result = get_pipelinerun_image_url('test-pipelinerun', run) + + # Verify + assert result == 'quay.io/namespace/image:tag' + + +@pytest.mark.parametrize( + "run,description", + [ + ( + { + 'status': { + 'results': [ + {'name': 'IMAGE_DIGEST', 'value': 'sha256:abc123'}, + {'name': 'CHAINS-GIT_COMMIT', 'value': 'def456'}, + ] + } + }, + 'IMAGE_URL not in results', + ), + ( + { + 'status': { + 'results': [ + {'name': 'IMAGE_URL', 'value': ''}, + ] + } + }, + 'IMAGE_URL has empty value', + ), + ({'status': {}}, 'no results key present'), + ( + {'status': {'results': [], 'pipelineResults': []}}, + 'both results and pipelineResults empty', + ), + ], +) +def test_get_pipelinerun_image_url_error_cases(run, description): + """Test error cases when IMAGE_URL is not found or invalid.""" + # Test & Verify + with pytest.raises( + IIBError, match='IMAGE_URL not found in pipelinerun test-pipelinerun results' + ): + get_pipelinerun_image_url('test-pipelinerun', run) From 0964469b9dd253e04a181ac88db9163d217ad3f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Lipovsk=C3=BD?= Date: Tue, 25 Nov 2025 14:37:51 +0100 Subject: [PATCH 2/3] Update generated documentation Assisted by: Gemini [CLOUDDST-28644] --- .../iib.workers.tasks.rst | 32 +++++++++++++++++++ docs/requirements.txt | 1 + tox.ini | 1 + 3 files changed, 34 insertions(+) diff --git a/docs/module_documentation/iib.workers.tasks.rst b/docs/module_documentation/iib.workers.tasks.rst index 05ef47a5f..bc62a3f92 100644 --- a/docs/module_documentation/iib.workers.tasks.rst +++ b/docs/module_documentation/iib.workers.tasks.rst @@ -29,6 +29,14 @@ iib.workers.tasks.build\_fbc\_operations module :undoc-members: :show-inheritance: +iib.workers.tasks.build\_containerized\_fbc\_operations module +-------------------------------------------------------------- + +.. automodule:: iib.workers.tasks.build_containerized_fbc_operations + :members: + :undoc-members: + :show-inheritance: + iib.workers.tasks.build\_merge\_index\_image module --------------------------------------------------- @@ -95,6 +103,30 @@ iib.workers.tasks.opm\_operations module :undoc-members: :show-inheritance: +iib.workers.tasks.konflux\_utils module +--------------------------------------- + +.. automodule:: iib.workers.tasks.konflux_utils + :members: + :undoc-members: + :show-inheritance: + +iib.workers.tasks.oras\_utils module +------------------------------------ + +.. automodule:: iib.workers.tasks.oras_utils + :members: + :undoc-members: + :show-inheritance: + +iib.workers.tasks.git\_utils module +----------------------------------- + +.. automodule:: iib.workers.tasks.git_utils + :members: + :undoc-members: + :show-inheritance: + iib.workers.tasks.utils module ------------------------------ diff --git a/docs/requirements.txt b/docs/requirements.txt index fbc91e24b..c71e959b2 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -5,6 +5,7 @@ flask flask-login flask-migrate flask-sqlalchemy +kubernetes opentelemetry-api opentelemetry-exporter-otlp opentelemetry-instrumentation diff --git a/tox.ini b/tox.ini index 2f8965215..15602cee1 100644 --- a/tox.ini +++ b/tox.ini @@ -20,6 +20,7 @@ usedevelop = true basepython = py312: python3.12 py313: python3.13 + docs: python3.12 migrate-db: python3.12 pip-compile: python3.12 setenv = From b366acb1a77518bbf41c29d25a14fe9111ada8d7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Lipovsk=C3=BD?= Date: Tue, 25 Nov 2025 14:38:53 +0100 Subject: [PATCH 3/3] Enable handle_containerized_fbc_operation_request for fbc-operations [CLOUDDST-28644] --- iib/web/api_v1.py | 6 ++++-- tests/test_web/test_api_v1.py | 6 +++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/iib/web/api_v1.py b/iib/web/api_v1.py index 92dfcedf1..5fb460a36 100644 --- a/iib/web/api_v1.py +++ b/iib/web/api_v1.py @@ -49,7 +49,9 @@ handle_rm_request, ) from iib.workers.tasks.build_add_deprecations import handle_add_deprecations_request -from iib.workers.tasks.build_fbc_operations import handle_fbc_operation_request +from iib.workers.tasks.build_containerized_fbc_operations import ( + handle_containerized_fbc_operation_request, +) from iib.workers.tasks.build_recursive_related_bundles import ( handle_recursive_related_bundles_request, ) @@ -1332,7 +1334,7 @@ def fbc_operations() -> Tuple[flask.Response, int]: safe_args = _get_safe_args(args, payload) error_callback = failed_request_callback.s(request.id) try: - handle_fbc_operation_request.apply_async( + handle_containerized_fbc_operation_request.apply_async( args=args, link_error=error_callback, argsrepr=repr(safe_args), queue=celery_queue ) except kombu.exceptions.OperationalError: diff --git a/tests/test_web/test_api_v1.py b/tests/test_web/test_api_v1.py index 51b785e44..44365bd46 100644 --- a/tests/test_web/test_api_v1.py +++ b/tests/test_web/test_api_v1.py @@ -2794,7 +2794,7 @@ def test_fbc_operations_overwrite_not_allowed(mock_smfsc, client, db): (None, {}), ), ) -@mock.patch('iib.web.api_v1.handle_fbc_operation_request.apply_async') +@mock.patch('iib.web.api_v1.handle_containerized_fbc_operation_request.apply_async') @mock.patch('iib.web.api_v1.messaging.send_message_for_state_change') def test_fbc_operations( mock_smfc, @@ -2877,7 +2877,7 @@ def test_fbc_operations( (None, {}), ), ) -@mock.patch('iib.web.api_v1.handle_fbc_operation_request.apply_async') +@mock.patch('iib.web.api_v1.handle_containerized_fbc_operation_request.apply_async') @mock.patch('iib.web.api_v1.messaging.send_message_for_state_change') def test_fbc_operations_multiple_fragments( mock_smfc, @@ -2960,7 +2960,7 @@ def test_fbc_operations_multiple_fragments( (None, {}), ), ) -@mock.patch('iib.web.api_v1.handle_fbc_operation_request.apply_async') +@mock.patch('iib.web.api_v1.handle_containerized_fbc_operation_request.apply_async') @mock.patch('iib.web.api_v1.messaging.send_message_for_state_change') def test_fbc_operations_backward_compatibility( mock_smfc,