From e30aaa27aacbbf5a7904b330556416f19f97a747 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Wed, 3 Dec 2025 01:00:03 +0530 Subject: [PATCH 01/48] feat: Add RegDef V2 validation with version detection Detect version field in RegDef files and validate against V2 schema when version is 2.0. Use logger for validation messages and move schema paths to constants. --- scripts/build_env/main.py | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/scripts/build_env/main.py b/scripts/build_env/main.py index bd4707115..ad70b676f 100644 --- a/scripts/build_env/main.py +++ b/scripts/build_env/main.py @@ -1,4 +1,6 @@ import argparse +import os +import pathlib from envgenehelper import * from envgenehelper.deployer import * @@ -17,6 +19,9 @@ CLOUD_SCHEMA = "schemas/cloud.schema.json" NAMESPACE_SCHEMA = "schemas/namespace.schema.json" ENV_SPECIFIC_RESOURCE_PROFILE_SCHEMA = "schemas/resource-profile.schema.json" +APPDEF_SCHEMA = "schemas/appdef.schema.json" +REGDEF_V1_SCHEMA = "schemas/regdef.schema.json" +REGDEF_V2_SCHEMA = "schemas/regdef-v2.schema.json" def prepare_folders_for_rendering(env_name, cluster_name, source_env_dir, templates_dir, render_dir, @@ -273,15 +278,26 @@ def validate_appregdefs(render_dir, env_name): logger.info(f"No AppDef YAMLs found in {appdef_dir}") for file in appdef_files: logger.info(f"AppDef file: {file}") - validate_yaml_by_scheme_or_fail(file, "schemas/appdef.schema.json") + validate_yaml_by_scheme_or_fail(file, APPDEF_SCHEMA) if os.path.exists(regdef_dir): regdef_files = findAllYamlsInDir(regdef_dir) if not regdef_files: logger.info(f"No RegDef YAMLs found in {regdef_dir}") for file in regdef_files: - logger.info(f"RegDef file: {file}") - validate_yaml_by_scheme_or_fail(file, "schemas/regdef.schema.json") + logger.info(f"Validating RegDef file: {file}") + + regdef_content = openYaml(file) + version = str(regdef_content.get('version', '1.0')) + + if version != '1.0': + schema_path = REGDEF_V2_SCHEMA + logger.debug(f"Using RegDef V2 schema for {os.path.basename(file)} (version: {version})") + else: + schema_path = REGDEF_V1_SCHEMA + logger.debug(f"Using RegDef V1 schema for {os.path.basename(file)}") + + validate_yaml_by_scheme_or_fail(file, schema_path) def render_environment(env_name, cluster_name, templates_dir, all_instances_dir, output_dir, g_template_version, From f556b4622eff14652a44fc181031749370bec3e3 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Wed, 3 Dec 2025 05:25:03 +0530 Subject: [PATCH 02/48] feat: Add V2 cloud registry support for artifact searcher - Add AuthConfig model for V2 authentication configuration - Add version and auth_config fields to Registry model - Add V2 routing in check_artifact_async with fallback to V1 - Add CloudAuthHelper for AWS/GCP cloud registry authentication - Add environment credential loading for V2 cloud registries - Add V2 cloud registry dependencies (boto3, google-auth) - Add comprehensive tests for V2 models and routing Preserves all V1 functionality including: - Nexus detection and URL conversion - Snapshot version resolution - URL-based artifact search --- .../artifact_searcher/artifact.py | 116 +++++++++++++++--- .../artifact_searcher/cloud_auth_helper.py | Bin 0 -> 17958 bytes .../artifact_searcher/utils/models.py | 35 +++++- python/artifact-searcher/pyproject.toml | 7 +- python/artifact-searcher/tests/__init__.py | Bin 0 -> 78 bytes .../tests/test_cloud_auth_helper.py | Bin 0 -> 11300 bytes .../artifact-searcher/tests/test_models_v2.py | Bin 0 -> 11450 bytes .../tests/test_version_routing.py | Bin 0 -> 34482 bytes scripts/build_env/handle_sd.py | 37 +++++- 9 files changed, 174 insertions(+), 21 deletions(-) create mode 100644 python/artifact-searcher/artifact_searcher/cloud_auth_helper.py create mode 100644 python/artifact-searcher/tests/__init__.py create mode 100644 python/artifact-searcher/tests/test_cloud_auth_helper.py create mode 100644 python/artifact-searcher/tests/test_models_v2.py create mode 100644 python/artifact-searcher/tests/test_version_routing.py diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index c62ee60ae..c454957b5 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -5,6 +5,7 @@ import tempfile from pathlib import Path from typing import Any, Optional +from functools import partial from urllib.parse import urljoin, urlparse, urlunparse from zipfile import ZipFile import xml.etree.ElementTree as ET @@ -147,12 +148,12 @@ def create_app_artifacts_local_path(app_name, app_version): async def download(session, artifact_info: ArtifactInfo) -> ArtifactInfo: - """ - Downloads an artifact to a local directory: ///filename.extension - Sets full local path of artifact to artifact info - Returns: - ArtifactInfo: Object containing related information about the artifact - """ + """Downloads an artifact to a local directory""" + # Skip download if already downloaded (V2 cloud artifacts) + if artifact_info.local_path: + logger.info(f"Artifact already downloaded (V2): {artifact_info.local_path}") + return artifact_info + url = artifact_info.url app_local_path = create_app_artifacts_local_path(artifact_info.app_name, artifact_info.app_version) artifact_local_path = os.path.join(app_local_path, os.path.basename(url)) @@ -255,19 +256,100 @@ async def _attempt_check( async def check_artifact_async( - app: Application, artifact_extension: FileExtension, version: str + app: Application, artifact_extension: FileExtension, version: str, + env_creds: Optional[dict] = None ) -> Optional[tuple[str, tuple[str, str]]] | None: - """ - Resolves the full artifact URL and the first repository where it was found. - Supports both release and snapshot versions. - - Returns: - Optional[tuple[str, tuple[str, str]]]: A tuple containing: - - str: Full URL to the artifact. - - tuple[str, str]: A pair of (repository name, repository pointer/alias in CMDB). - Returns None if the artifact could not be resolved - """ + """Routes to V2 (cloud-aware) or V1 (URL-based) search based on Registry version""" + registry_version = getattr(app.registry, 'version', "1.0") + + if registry_version == "2.0": + logger.info(f"Detected RegDef V2 for {app.name}, attempting cloud-aware search") + try: + return await _check_artifact_v2_async(app, artifact_extension, version, env_creds) + except Exception as e: + logger.warning(f"V2 artifact search failed for {app.name}: {e}. Falling back to V1.") + return await _check_artifact_v1_async(app, artifact_extension, version) + else: + logger.debug(f"Using V1 artifact search for {app.name} (version={registry_version})") + return await _check_artifact_v1_async(app, artifact_extension, version) + + +async def _check_artifact_v2_async(app: Application, artifact_extension: FileExtension, version: str, + env_creds: Optional[dict]) -> Optional[tuple[str, tuple[str, str]]]: + """V2 artifact search using Maven Client with cloud authentication""" + if not env_creds: + logger.warning(f"V2 registry but no env_creds provided for {app.name}, falling back to V1") + return await _check_artifact_v1_async(app, artifact_extension, version) + + auth_config_ref = getattr(app.registry.maven_config, 'auth_config', None) + if not auth_config_ref: + logger.warning(f"V2 registry but no maven authConfig reference for {app.name}, falling back to V1") + return await _check_artifact_v1_async(app, artifact_extension, version) + + try: + from artifact_searcher.cloud_auth_helper import CloudAuthHelper + from qubership_pipelines_common_library.v1.maven_client import Artifact as MavenArtifact + + auth_config = CloudAuthHelper.resolve_auth_config(app.registry, "maven") + + if not auth_config or not auth_config.provider: + logger.warning(f"V2 registry but no cloud provider for {app.name}, falling back to V1") + return await _check_artifact_v1_async(app, artifact_extension, version) + + if auth_config.provider not in ["aws", "gcp"]: + logger.warning(f"V2 registry with unsupported provider '{auth_config.provider}' for {app.name}, falling back to V1") + return await _check_artifact_v1_async(app, artifact_extension, version) + + logger.info(f"Creating Maven Client searcher for {app.name} with provider={auth_config.provider}") + loop = asyncio.get_event_loop() + + searcher = await loop.run_in_executor(None, CloudAuthHelper.create_maven_searcher, app.registry, env_creds) + + maven_artifact = MavenArtifact(artifact_id=app.artifact_id, version=version, extension=artifact_extension.value) + logger.info(f"Searching for {artifact_extension.value} artifact {app.artifact_id}:{version} using Maven Client") + urls = await loop.run_in_executor(None, partial(searcher.find_artifact_urls, artifact=maven_artifact)) + + if not urls: + logger.warning(f"No {artifact_extension.value} artifacts found for {app.artifact_id}:{version} via Maven Client") + return None + + maven_relative_path = urls[0] + logger.info(f"Found {artifact_extension.value} artifact via Maven Client at: {maven_relative_path}") + + # Download artifact using Maven Client + app_local_path = create_app_artifacts_local_path(app.name, version) + artifact_filename = os.path.basename(maven_relative_path) + local_path = os.path.join(app_local_path, artifact_filename) + os.makedirs(os.path.dirname(local_path), exist_ok=True) + + def download_with_searcher(): + searcher.download_artifact(maven_relative_path, str(local_path)) + return local_path + + downloaded_path = await loop.run_in_executor(None, download_with_searcher) + logger.info(f"Downloaded {artifact_extension.value} artifact to: {downloaded_path}") + + # Construct full URL for tracking + registry_domain = app.registry.maven_config.repository_domain_name + folder = version_to_folder_name(version) + if folder == "releases": + repo_path = app.registry.maven_config.target_release + elif folder == "staging": + repo_path = app.registry.maven_config.target_staging + else: + repo_path = app.registry.maven_config.target_snapshot + + full_url = f"{registry_domain.rstrip('/')}/{repo_path.rstrip('/')}/{maven_relative_path}" + return full_url, ("v2_downloaded", local_path) + + except Exception as e: + logger.error(f"Error in V2 search: {e}", exc_info=True) + raise + +async def _check_artifact_v1_async(app: Application, artifact_extension: FileExtension, + version: str) -> Optional[tuple[str, tuple[str, str]]]: + """V1 artifact search using URL-based approach""" result = await _attempt_check(app, version, artifact_extension) if result is not None: return result diff --git a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py new file mode 100644 index 0000000000000000000000000000000000000000..b657c5088dfa325d745ae0db0be3e72c30e3dffa GIT binary patch literal 17958 zcmeHP?QUd85p9WoNW8-^`QeF(2ZG=SNJd!BDnyV3IZ6;R*2p^^dma3R$GaP6<)`4C zcoSX&IH${Jy8GVhd*^!WMMB8(&diiRrYUESV)|7#rhvlq_8Rd^ka!!G{44X?uQ z!UE5(@$XF-g>C%mn?<<9h_f&cNAdLvpGV;(#+_mQDQ1kq38-=2K0cj>Pw?*vK7A8@ zj(_K?wMH1j@6W;#lsJEZcgE3=!Vd#~&%y<`Ahlb}<#;|l!MiJbGs4q5e18TWPVuQ6 z{UyF3r{wSmtA2zLpWyQxPfIG4{uGkm;I~P|zN1_>aTaHi+q>}V_<4jG{7F3h4OCvn z@k?m(3|!12kJRQlK40Q>iD&aD!$st87Qdkuk0R}9coRMjQ^ry( zal4pLsZa2Pl7595q&WvKl3@|P!{G%akQnxn^>2fVh!`n#lGRkrqCa~F4 z$UK6SSKxRQWjO-%bG&xYHi*4Nx9?126k z(O%|^;vM5*CwgP4htjU1rB~vO zY+_m@=eb>)c$3EG))zfPvvv7A#_IF%*Ql>?MgO9_4yQT2(%)*lG8(%z)Qxf{u-Dhn zV2Lmb#BkeE$;R~ccP$@Cmb`BvlC+>#TIm*%b%V7_{Lu)#dXmd`XVFh-Eut9nnZ-GC z*#wqJwq~B(TKRmb+pUO1!oU(q#fAB9!g65(^Pb zBzrSENG82*W{XlY?dVIk%6ZzvXFZRQEPAD8_o3+=2}VGD<`T=vHJvlJvc^!2#j1$4 zAT8dn_i>Sj#ZB{-+?nU(NK4MPVqBHpb&sdrbbX)Is+-x@w_gow{n68h*qme$X!84< zmp+rk7X+YnZY7P?PLsQSRsNnm?twl>3_3z?5TDZ`=_z^(L%uK2Q35ZnQ6T zLHr}nrip!MGu1VgwJdr{-PHB)_aFQ-`~#9-M-9J$O^MLlD_FEE7E4cPj=qmMB(E*I z=O>@e=hQV>#&DXQbCu&_{(iIicMe^!9?%|{yUr!F@AE*ZJ?~`xC51O}&u0{uQODZ7 z9>i$8iFyc*jyRrti8rom9V63Rn~#H)?agL>z8i+^THjVbi@r=B zRd0?cuQ9f&)lPnu;Ys)bsJ%cZiLzO*X#^x-@8Bq|!(P=no+$TgwI)a0A2Z&}N`D3> zejfgS&$i!x4K8i>!!hsh-gdx5&uRQ2UTR&-B=-D1+`DyyMF}I8D_Xz9_V3&WSq#pi zx0uz8zE!%cWEjJgYP?!?j8DeBH{VBL`M+k+HmtSuIp^S@Sj+bg)k~YN=GoeNW_qk) zc{A%%I?6YO{@z0CeLVo9J_i00jk)(div1|+x++VRnL?mm$|ju*`L)Wd)DkS&hcZF_-*gnvY-5k)J#j9yQx=* z`g-2+DRS8#v1YeKL%k=L=&60fi+dtu>PIDy%g5&Lht<@?y*#H_--`Ww;`MEe=R0_T z@L+=Ixj~*`Bv{5s-C+Cd$G26i&HP2#nrqQV414lgIGr1e&=uT|8EdkNf9|Nb57-57 z`I&$aN}kaEejl^)9rB0pQvT!`mDic%mN)GE6ziGPJa^*B%un&_=X6B`V~M;r`_zmM z%W7Ijm_(}SWQr@4BYlmFYhp9CqKqfbm*AL4#y&%P1z!fyG+B63bcxw%1^NQd1wQWS zJBp6%Z#qqM4nW}$xZC8Bx;+R#>*-L4dRf6y=XrOt^hN6{E1%T;NgP`y(5E`uix<6e zMv7+5_qp-#nA;Yd$NkyY0<7Vo+WW8L~d#&N9cDCnV}vF z{l1?|_!N1J$j;M-cGlu9BUM7gWS!Mf#$Cw5lUU+_o^P7x#7nL-WiqRyeSYfs$fDAp zH3cfOhmyL6slwBaAXU9J8aC2u0`cFSZAu#tp_|)YxTE!%{chp0uV5RZ4Od}yNay|R zm2ZW=;OPVz;t1!{-y=_KhhO3MF#H60gWs{PIKnrys?p|CTK@vG=@XV6{p=@wb3ZkU zl#Hi-jYxOKeOoG?LeN^gOEE#L`|C?9Wy}xBYt&ofCEcMhKTOfBG4)s$FQvV6`kp;) z|B~yj+nzOBZfErnEoeEC?<`BMYs)r2Lv7in@Z7dkHp_M0#+2u`rDxyLGrep?M1B3x zr&3>`%t993^1zy87N>T$!Tr249PQ7$SX_fUPCt@LueGknc0?0a^USv84j`9xSYERI z8qte;=~TtHZ!5J=`)I@chlsUZhTTkJ4uAY<#TNQ~)(g38?U%laI!L$IwklPf<)poc z!zLSd7eae=VeHPLRo3gTNsC)PwEJiBb7`P29}Zc`&Jn)AdaR22Vn3Z#_;>i;$IU#R zgziDp>0BfCuu*1PyOox1FqT^FyB9jG#*VC9-c&Ih@4g4e-Ew{gv;GY>X6Ewll^@rZ zp@l0xr|Vm}b|U3rSSeYGg}j1Ho}F-jBg>uzd$o_3u3h(SF7GbTv{?-|Z>+SdP3~ma z`=d?hh3&hbsv&Z8)ic#v>vl+GbL*h6gKt&VI97FIR6W)?t=&b`y6bfoYxmNcTCgna z`(fKs_o!b+=x66QYN**-pOK^W|9IT&9+SyyO_>vFx?_sAo)NAT8 z^`UZ!pnnQUcM-OgQIk7<|DV)EB|ouVqR-)-*ZSwxjADz;+fI7zlU$$PI+Ygt zE!Hc^aoE?e?A)J!)2XjCYdc2%P0U%uF58Q=SJtVC=H<0nt>(&OF!7joQD~EXge~{D zWk;=^JL+-fBPip7Suo|g^)mkOQ@~mT`h0m>EPDma(CO}EZW%_JcK3bJX1J1a{!Vdc zc^>z9_E$2#Z(2V=vN768A^)64G^-7bEcIXSppk0I(akK&zxv|Y+$-2pQAGZUF?>?) z877?=>S4(aXz*OWM3`86vT0r~K2d`X>;?xvgDDMi3eVZVL~ZM%|g zX;sgkOV7_oJVoxVRUP#BVHvKDDWtXgV2wNi|5tOjI;riPy$_4$JC9WH4$I=*RetjP z@+QuYA8u|j-frRv&SP%k^k-6@4oU|#Udr3)O$1fs=KPoULfe16#XatqK-TkE(QHNh zs7uMaS=OcVC=JQl-P)XmZ@~!>->b56+Kk$1f+u9+`V=Bd|pWv)P#ccEL*RgJWZ+dAcSU@it)u-iRZq$|MU*1*M|kR-{PHF?kBrYIN^qW z``P1k{mM@oZ`OOlkJKCMt^oUWv-7Y!WPNG><3wA3-L+HGk5Wb{vxa1{YORTY-D}t! zyVH^4?EO}Fv?zZ+fNxlpWPf>XCoRm>?DO;Q*3#c=PWVrskAltHbyP~X9?@Z>h!8_r){3T Xj{e%c7iux>$ewPcwl#dsTUPiFXesJc literal 0 HcmV?d00001 diff --git a/python/artifact-searcher/artifact_searcher/utils/models.py b/python/artifact-searcher/artifact_searcher/utils/models.py index d2d918d9f..9f26ad6ca 100644 --- a/python/artifact-searcher/artifact_searcher/utils/models.py +++ b/python/artifact-searcher/artifact_searcher/utils/models.py @@ -25,6 +25,7 @@ class MavenConfig(BaseSchema): repository_domain_name: str = Field(json_schema_extra={"error_message": "Application registry does not define URL"}) snapshot_group: Optional[str] = "" release_group: Optional[str] = "" + auth_config: Optional[str] = None # V2: Reference to authConfig key in Registry.auth_config is_nexus: bool = False @@ -51,6 +52,34 @@ def detect_nexus(self): self.is_nexus = False return self + + +class AuthConfig(BaseSchema): + """RegDef V2 authentication configuration""" + credentials_id: str + auth_type: Optional[str] = None # shortLived, longLived + provider: Optional[str] = None # aws, gcp, azure + auth_method: Optional[str] = None # secret, assume_role, service_account, federation, user_pass + + # AWS-specific fields + aws_region: Optional[str] = None + aws_domain: Optional[str] = None + aws_role_arn: Optional[str] = None + aws_role_session_prefix: Optional[str] = "nc-devops-m2m-session" + + # GCP-specific fields + gcp_reg_project: Optional[str] = None + gcp_reg_pool_id: Optional[str] = None + gcp_reg_provider_id: Optional[str] = None + gcp_reg_sa_email: Optional[str] = None + + # Azure-specific fields (future support) + azure_tenant_id: Optional[str] = None + azure_acr_resource: Optional[str] = None + azure_acr_name: Optional[str] = None + azure_artifacts_resource: Optional[str] = None + + class DockerConfig(BaseSchema): snapshot_uri: Optional[str] = "" staging_uri: Optional[str] = "" @@ -103,7 +132,7 @@ class ArtifactInfo(BaseSchema): class Registry(BaseSchema): - credentials_id: Optional[str] = "" + credentials_id: Optional[str] = "" # V1 backward compatibility name: str maven_config: MavenConfig docker_config: DockerConfig @@ -112,6 +141,10 @@ class Registry(BaseSchema): npm_config: Optional[NpmConfig] = None helm_config: Optional[HelmConfig] = None helm_app_config: Optional[HelmAppConfig] = None + + # V2 fields + version: Optional[str] = "1.0" # Default "1.0" for backward compatibility + auth_config: Optional[dict[str, AuthConfig]] = None # V2: Dictionary of named auth configurations class Application(BaseSchema): diff --git a/python/artifact-searcher/pyproject.toml b/python/artifact-searcher/pyproject.toml index 738f19a0d..d583aadb4 100644 --- a/python/artifact-searcher/pyproject.toml +++ b/python/artifact-searcher/pyproject.toml @@ -17,7 +17,12 @@ dependencies = [ "asyncio~=3.4.3", "aioresponses~=0.7.8", "pytest-asyncio~=1.0.0", - "pytest-aiohttp~=1.1.0" + "pytest-aiohttp~=1.1.0", + # V2 cloud registry support dependencies + "boto3>=1.39.4,<2.0.0", + "google-auth~=2.34.0", + # Maven Client library for V2 cloud registry support + "qubership-pipelines-common-library>=0.2.5,<1.0.0" ] [project.optional-dependencies] diff --git a/python/artifact-searcher/tests/__init__.py b/python/artifact-searcher/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f670b2f318c8e350e244719601394a5e4f71fa71 GIT binary patch literal 78 zcmezWPl*8pc^SAE92trjN*FR3(ijpMl7TE;hG2$NAiD_2%K);97!(-77>s~q2#{9{ LR8b7phcE#EM`jFi literal 0 HcmV?d00001 diff --git a/python/artifact-searcher/tests/test_cloud_auth_helper.py b/python/artifact-searcher/tests/test_cloud_auth_helper.py new file mode 100644 index 0000000000000000000000000000000000000000..fdd1eb55978259ee27b4512b423768a5c9695f96 GIT binary patch literal 11300 zcmeHNZBH9V5Z=#~`X8>&hk{g_Hhr(e2Lv>slr$o#ny9Kyjxpe3Y-ArHwBoO~eV!dA zw{y35=QC+YB81G@dwa9<`poR?{rgYH@$X~zk$dWfZtgOC5_jmPZiefo8{^x#J8&EL zpSffA5VOYa4F6~Tsr-4;7`ap2zB&e^=K}3hOm<+l~XSIRY1RaB~XI55O6J71!Oz_z`AOdo#%V8D^-y zHu2m9a*i;A<9m?hCFW0{TYF~@mJxW?xz_bIfTYkuAR9i^_Tbgnhm1z&2p z4s;l|a9k>-rPqPiGHp5W9@h6>l|qsOFS*#N>wV!85>`p1^z?cLx>NW8=}hSCmqJ8@SQEpkE|^|N{^t{N$VWcx}ev`HN&?rT2nj#E!u+~N{Xk5x-K}) zM+MVVGsWv_)aF+eo|9X?=sWRfuJ;1Q2QiYKo8d~tNDc2Pq{`#q7_%}wF^AW!;ZNg& z2uXR@0?%A))MRd98wKhq*3aXSq}t5D z?h&pNJY(f-rqhG~E3G_&$J%&B(x>D`fb?@q#Pf|0iPZ1S@fy87R35l;sh}0AJ zsmI5md*x?m?h}KNs$sp_9A3==D#0g?f(4@&Lc6>kJj# z41S*10L&WfL~tw?8xl1(!a|EKlH=J&VEqx+9i+ieb8f9d-8p0#_#Da%+nh&ZbizJ> zbU|1dm2+bpd!!2703Xb{&G{t80V|)j`{800Mx?3V|K|MH?{`DX+UyeEp|`vQ-zpDn z%PKJ5VNl+}eLnjN@!5*4vU_G-O9Um?(v9e#OS9{C@PEhs0uTQY|F_^-kKDIz+daeh zo_p+m#`mUt(m^#V-g|*6Y6ef#6)xOQxHHDsGxsZOJHaa3pu2-T=q}dT!;0Hj@d19n zz-q71^7EyO*v0SXpc`sWCA>1D_E$K+sO7f14t?HsVOAia_^r*S+m<*riP7A9G# zHHNEVs}7k$D^~kc+4mG%I)3*mUGtm|L6VrS@lM!Xy>ittmhYUxUOsp5?7h*8nPGNu zy+lOM@JGa8tZ!0p9ybv&^8Wq)5!ELatk`*@1Vi+9{DUG2HY zsIyZcn9ma$>Y8UM+MQ>fWfX7V4$m$0Y$X9!?*sYEk;0m13<`of#Xe(s_s>r4efOpN z%6;v=0p5S-zDK{EV?C@a{YY|9Jl*Me#IqgWo|mNW{sPyOioK3_M^pGveXzqBMojo! z=j8oUk*&#_Qt;fywkwc_nyZdhi^g%jMC(>Mk?qa-;1MG5Y=Iik%HQ4xA0Wp+_P$v3 zy?y95?BOlzjneO8XNYGejKVQIFYH#c3wMIG=!-+tVeE_ZR6wxzPmJ-T(fY8?Q48I9 zR~v+5=b!!38PI0%7QD=^r=FBxWn$eJe=l%l&wI5s+7&TN&SeylSP@U)P5OHcGMCXm zJc*HROMZ4Shd+L3$fbSwvaHMCn4S=7=1gDbxbzxtn`_HBj%kLZ&2*J8gD#gJ!=AaK z%3^u&0({8HCo?BA`XYHyJ+A0S@%@TqzW5$oYiGgI5mx704f%dsPZ(Lv@+}T~wh`{C zt23b0HFMo^Sy=6*IhP#8C9dYBoQ;(#wIq31@=Iu$oB6QTXH{=xH2X63u02^Ith|ds z4?lywh`2n}kWNi5kFJ|*DeS+rtpVR(9}nw7-Bq;l``Gh!kq|#A*YmY78W=xAAX*nS zui^mTfFPIm@Y!y#u$-@J_maJKzB43o${J_r>z|4L7D`baA0c-)shHIZ|L<6>)1K|^ zN~a|r3pR4J=H|8B3$b~{p2_mm{w7tn*3Q-nc^og7ov0(?Hs93CJ*}x?wzYCz<-7|y zN3`6j*>zbBzWX_+%0@TMJhm2Y&NNnU*4O2&EmljGD&Ikz0(B%-n32aAX`5fpk=0J{ rnF0$Z@Kbs3RC-6{TKjE!;fI5kQrXzI?KIbH0Uc;rv0mBAM8p38&SJ!v literal 0 HcmV?d00001 diff --git a/python/artifact-searcher/tests/test_models_v2.py b/python/artifact-searcher/tests/test_models_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..a6e2cbd96e56ea90ff6424deafc1a287a306f29d GIT binary patch literal 11450 zcmdT~+in{-5an}${)3>rI7Kbj?&Tps9)ubQ3N%4r8|gz)2#PPVT3-ZNu9F7&^|t45 zIvx&})UIR&bzsPncFDuxT+YmJ_0Qk^z`rlU7vU(JhMTa!s~66~b(rJ*0M{mAAOFwr z?M>Lj|4ZCE!IcYqJ`L|M<1}1h))d$IPAo&^w<+e#BH{vbX5oFD!?k$#3@cq@L@$i- zX@Sva_;idbb%I{$`o-g)>j%6Nvclkl}0 z@e=F&8Fx}2vya)w;g2XueU#{I3JDjG_fvep&rf_&!_+yo zOQaD-bMNPv#~HE*UcJNpEzExA_9GThn^FV6gu{v^XcOw5HoSsn%zCOnU3&z%DD@af z&++=;*3{8`$T)}G+`|<{Pcfbr`w3V2kh6#XBKLOG=6l@hXL>R$UHb^SoB>f0E}|YL z;b-8}a>VvFHH;bLxyGnzlxcpCAyU-m>Dn)WPu~pbXWm!Ref|k?yx7hVI zFvfUa;L~m^3jOaA<7p*DxrA1>Ays;Z{-b;U=q#lB5$g>LQLz=i!k>;ady97}YPtgD zr!E+~jA8N8LB$6~(FR{{%aw zy$!Eq@l{Y$D;sLf1BlF5S0F}#qBevH3s#1Xl(jwx#jv%HS{_B-Y?Z+dQ!SkZgrnZ59B_%3`O ze!$4o{~kM6cuS{}*CM>m6Kb(8C|!7BnFYJ7A@V45j1Y;;SZcLA+Wp;<2Q1=R7_IUFq5ZOYxPKq#0b8?i#}}C4eKYnx=bpL zWOdEzZGm^zoN1--3dqcF$_nQgwTPN{7IyHb6(%z_tB9RgV~B_BU{-Apx#nI(kG%!5 zt|WMa`xWX~ezy-n(pXLXOXbm;(>$qLJaTC`+n!vsLHj6%tFsV&_1c>D=0Txigfm;5qi7I$@W+tS`**^Gt78 zHEreyswLkI5XbicWj<`8u`kzrE}a0eNnw!bz{=_8YEvZ63KQ(Ur=Wq!?F)TCXl*8^qD!zlTB)rui* zF5?@f-&dE+Ut*cM_cB^aXLh3355f05{#&{d-bFsizAP*=?bN}a#{9YlWbzL^}}z-mU*@? z%}V}}3W>YttJy>4GO!NW1>3&`KhWFmB2S#)3Tril|cW)r zM+}|XNbj$LUCt+l-K`Hcz2rNs=K+hSE7XVW8uz>Gj+@4d%K5ownI~X~!+6el3yYuN zn=l%A^8%5_r%Ys#H{h^z-|J?l-T(TLRj@Sn{S%|+(}E^btg=cuQ!p!%OQ`5JlFU@xJT+ib?5PT$4OrxvgM zY`@A@Do>f4y5tsH`|;-&lXcTMCFj?AhE=ZXl1n_du8LXCA)hXD=s9Q22W{tmU-g_} zh$#mt&gYsf9v|ylT=*DPo-K1ND<|bt`=!O`{#k^c9n#vqhim^WMF}}QJuIIuHY;(F zaotK>{ck%~i9C0&@nl}ProY7C`H{#`pFcrm&L@&H_ZtqW#k!pD-(dDdJd2T&uzl$K zCf55?V9HYq;rt`4bc?&So#fWiKE4!z8j1Z_N7qkYYx}nOJ{xOx(k{F#tIgJ`W7Uo& zMc`?otUHW)^uJ~Dzjo4jwfAMU7OQ}D&tP@Eyt_8cHccynzKbNAJVnis^V3v|sq~KT zpA{p2_vbwM-C91-aXiV@C$sB&3ghD;J+AJbr#6)5uUHd{9jq!bNq1t;&K2v=3U!D2 zP7iB4m+k5)zD}w2)T`G&CTrTWA3(cl&n!=UsdKy9UI&%2H&G86a=zm(Rm^f8Gi<-# zF38K+xjimcJHu5u{aLR6mdYc!PtL%Gh@EE5pudUZJ@3QdR%} literal 0 HcmV?d00001 diff --git a/python/artifact-searcher/tests/test_version_routing.py b/python/artifact-searcher/tests/test_version_routing.py new file mode 100644 index 0000000000000000000000000000000000000000..21f27b0a27e101235c83aa6b4ab159c4e7efad35 GIT binary patch literal 34482 zcmeI5ZEqa65y$s+fqsXp{31#bof^AofdU39Bu?w5NsJ_IlLmz#+mftCmIO(*;}rSo zZT~;cjmhCYo+QiXL=et+%)%EYq z`^~2Qy{+%h_54&%5A^IvzfX02ruGN=wXfehx<1zRQF?bb-8oIq_w?+Mz5(}{TJhbW zoTZ-g7#PPv^RzihFnHsnd8yVmvi4J+(0WgA9qN0##SeNCpYNrSKTWWA1pA5JJZK*3 zckKCrM$J=T=64wXf$sa5=IZ}gGy1Oe{-=6>Uu_>XpDtRwr#4%{;LC)A^6p9M>(QdO z*yE1I3%(qS+k&vG_NRjUMBh%EdHPkhspqnuW-(L${Kb{r7WCt0TkwyCiN^`zwnn(4 zne8_3g*P9mH~MDwyzxwL9O~OM-9N0_f+@p3Ep2^P|Bk;f1}?xpDzT76^DyXwaW1+^Z@3Mnh!Ob zqlAL=R9;* zFn#_T8Xt7NR;|3h)2849jTQ(`h1218_q8I(WVp!RLK;8SDtZ5X{hq1aVWKOelkK!} zXQK8~!8%F!In_A6((h-&VWe#zJ<_4%fV)l26Wy5d_e+e`m_Q@F?8TcyxTQ7vEUjgf za&hf9)ap#M0=A4AkOD|pej6o04{M@_dGi}xp=zQPN}+1tWb=Iy(Z{Qjim`C%M5 zba06tgma-zX3wgyo;$jCl(@vCH1zRAt` zIcEJDXE_dah!)@N;Y{xf`oUU3!_?H;r+tvSU46#8@mJaYmN*B=TJzjPjbbK_W3?ZL z9-sj3qMfiTk(hJ+iqd6X=@KL~GCFcCBjI!18f;g(_eAuJ#Q+B{qrIZ+g0@@^-RLc+ zWV^1O-Z~PMexIm%Os@2=GM>V$fp4(jN-8o=D!D7-W0thT>S?2ll^YFoqk*nl1HJ9~ zV2_|HyZH~zC@!mg1Y zKFI@+&?0GQ`lG z3%hx62&3Lxl!1QNuWzS?Qqya;O2#PH=iNtdYf08QB~7y}Z>$l=>Wwwxn6PW)z4)u- zT_l2O-8zck5_F1=Eg`-h`^IFnWo?(WX6DN55FuHYgf69Z#7aGG>v7&;Pgv9iIZ7N9 ze1pY*NC#jKk()Hjs9$n|FFtMC%I%#Of^n+n=D$-FL!AbdE%ZVL;^k6NB%;_>%itA0 zmc!W+UB5egbqW@!dCxs`FkMmYmASQ?`;tKy zS1%)CbwwM(UaU5mmeKE~gN&OJCmXM*#w;D@G|xW51GF!tPn3eInm-xF;qo@cxE)|s zam27{P1#4eH6~*pa4Wb+YFc(b_Yj|541Ub%6DP~?v{KO=1U5k3Omkt8ss6W zUDdpQ^blMh%tQg-Cuw@FReC9D4^^Q3&??A<$fLEUy=Xmv%LCI^*ws|~lz5%m(~wlk zbqFP8MA>waK64Z^#^{NCc6p?p?Wye!^XEKrk}KlMN({!w=ayE5C_XE~Dn$w2uISBC zAFJ&JM%!kIKKXD3YP;rWJU$fRn>~7(;={;YiqKd91yicN3sB@0j1K* z*3Z{B_EXn}UTjsrP<_T<_1O|`?o~8Vs`{dV(j(K?>wIKKZT@9eL_CjJ98;MAo* z)33Jl`->>}bMbjw#<%f)75Y;1P*?*JCL?>a*<_fAY8?(<&c(e7$dHk=# zTj|o$7m0{QEdk!+9rG*UU02E|vB!>o zK3IT zQPB$?%KOxbaL+z2>DPog%gg3*TwH(HuB@xn9{+6a%RYR2bO#;@w#c#81NlmgQ9C}> z&+9`mI_T(JYkw-L_c});p68qUr1#Xg-N%AUE!67n!C6iHQ~RzKeS96dsEj+I!SimJ zzO}FAebgOMvc$+%e0DMTQ4>Y@@!8KuOIeKoQg}l)boBB}>*Q5#ufeePGHGd0hWfd< zE*q!ywmxwhto3ByJhOi5l6X9IiMN>|K-1*y^l?p<;ly5Cqtx{w%0D_ zK518;YcNlzGjIp3MISGffj!XGYWPi;L0KM2KTq#JPHp0zY3K7>YWrb=V3CF#xt%pO z+j3p}E#J}aYiIWVgG7^0gvYX9Mzg6;-u;~UtqXRZe>}BeuJN!3**${wSLX`&D3GP^ zXl>sqFz|ySyWKO-bJ>$*cp)5+9k<9Yv`|w0rl8o_1yCZ^`&02HdX{-vT!nYBAgS<< z9N|6rl`)O?NZj|KWG3sI`$vim_ka5m{U2-mcAA7oWOC1UMtdgK#%fz#L(KUciDt>{ z+iuUuKSXG|D52{f^y+73P&@It^ztW)T0LmCM923!;be+3N+CA9E-GnrDfTpX`_I&v zcl7yAqJuu_uTLv*D)8&oj_z`@-e!V4IT!Vvik>-r0Ug9{|GT=2^f8(oo0rdz6}uPY z(q6N zGK~xq`M9q2i7S~;4l3z=O0UrlC!qA8Jm{X%u(qNI^t^iKC_QI)?+{I zw6!$zhEBJoJ&qH#9cu2iKC+enN_YWB>_&`L3-=dL&_NER^p(u2Z_|(WZqlm{YWL|b z7Plp~QlHb}8Y)Mk>Uqy!W&HZ$JMP&zU(!#OH)C(b$^zbv+Oec^mz!uOyb)USJ!kOy zqo!`H^Nv+Cj>pC$Cqx=8$5qp-(aAj7GKZo8=iSR;e@c(_TI9aV6qGB+ZV8fm%Ir^lAx@*$9r0@RFX_*< z&6cd{FZJ6`5B!sC`3DIW@UHI67PISZ*ICV9#BTh=GQP8SbbDb5j!PG-qpD#dPw=Pc zu{~QVs?e3S3~}tWWly#v>YlpJxU1W+${syHBIdER<5nd$N~$Wq8;!dhB!7=JS)Pp7 zEd@*Y``cnnERw}PpS2g0@X1S9DHbk+0dfIjE zlQZ65jZ+02hvX;m&X5>rP9hcN>!0h_v>J!JGA7&5*qqacHxA6#cUA#B`ds~x1KW@v zd7^(g&5n9@X2KkXXvs$&HNW5W>)W|Aa2z*O15hP8*V>zX$UC(}#LB79nwYB<{4;(Y zf?IM|cQQic+-VdVGoE!dkI_BaBj-~4?M$Z%zed>6S_N{vb5XeFkHJ-arwk6!`6^cU z;8#CS`^mu2W6_vt{BAkErqwY$!|8%{CJVcpyIcF(nCv0i<+g#z`Zd``pQx&2OP|kW zA%3iXKa!RDnd+zSHDBxgr?L~Ny&aQ*YoZzR4s7=UryVh_HI4Q!dUB|~zi$2}Npz%M z?g{RFMcqG9KcA|{d+PB|`u$M7eXoe~p4Run=9_8^pXm2Dg4ngfKZd*USK0oS#+2jR zW>uC^Y3yfCE?_M@MvHH2_On~vR`FTFz1k=0q{WrX@%YNeE1As~ z(nVXMvUooBdYEyHN_ShC5wibkd)})if88g(x_o-9GGo3|vhcA6%_CCNXJb&m))Yn#x~l%+LH~3`tIHlIXwp9+Ooc!_;372_D>3u=78T?zn*rmkyiJEUz_uVhH-ILvR zx8uaG-I~~+zMT;SG%jajvPYj4AWrnYer2lVG?MRxSt1BTVr;L$Fg|JHkvHR5GCfxj zA8^{v@Tbyc^x<#f15USoCEMb=4dU+byYgc5Gcpf;vKIq+pu6Om#JbBs;%Ore|C|gX2JA^4x=128j&fjq#efs#TY=2AR zU*5xa%WaiC{Bo9#m3Vy<{{8hz$Dfw>ZoGQqy?0S|*S+ZbmOiIv*xxqqeR=MA-F~{$ zB>4BwoAA@UM)bLCt@f$5Q~h?A=j}QPKMjs}yPxjn(O)~sDxYR+nLup2@|<)kn}``% zrP#i7AAcp?-qaI-Wqge5@#VeRq0io3&E21GW8HvRbXyB=DB8Ov2Sod%{i~a^GPSg` zUCTts+1VB2;P|>_B(goCcqiKB@!70-JN;Sbk~f*X?&HuJ!#5u3FulJCj}#A!>_}H8 zW~R7E{XfvTrtQ|VrL?&2EGm`-7MSHi$)c8{scF%EzX>1fG8~JQT-FV zHGlOcGoAH3+-oW~<%Kkc07-Nb`XW4Dg#qU4{N|5SQ_-^x#DXC2#A177@db$(6l zUm#cPf9=llh5Zx#My*B78w@*yI5WMeXP0}^GU3a*{c(La^<)CL+##Q zxA#(yw+llh&6eNWqqP|5x1I8OtHSL46pK9`$)hc|cZ6zeasatDVSN9H>yvu#g4v`7 zam+6AcjImaAmoumAJlv8t9`pJY~2>N@);3%kNU9A_4$@rPP@lG>}l_|ah1ohnz*_n zTv3nOKA~#bjN90{a|vwak@R7!#mk3Rs%O=oe`(O(qTvBp8h=h$;q0W ziTPMpY**@>XiE-BP9+6r6|5A2-qC>>Pc4=0wze}QF*l6)_*l_>yuy0R-4X=#DUehS^ k9@Vbesf?+7o}A;ruMro(^h&@!h=!kKAdZjH@RDKvADTg2>Hq)$ literal 0 HcmV?d00001 diff --git a/scripts/build_env/handle_sd.py b/scripts/build_env/handle_sd.py index 381d5b1d5..fc1d29ae7 100644 --- a/scripts/build_env/handle_sd.py +++ b/scripts/build_env/handle_sd.py @@ -300,13 +300,46 @@ def download_sds_with_version(env, base_sd_path, sd_version, effective_merge_mod extract_sds_from_json(env, base_sd_path, sd_data_json, effective_merge_mode) +def _get_environment_credentials() -> dict: + """Get credentials from environment for V2 cloud registry support.""" + env_creds = {} + aws_access_key = os.getenv("AWS_ACCESS_KEY_ID") + aws_secret_key = os.getenv("AWS_SECRET_ACCESS_KEY") + if aws_access_key and aws_secret_key: + env_creds["aws-keys"] = { + "username": aws_access_key, + "password": aws_secret_key + } + logger.debug("Loaded AWS credentials from environment") + + gcp_sa_json_path = os.getenv("GCP_SA_JSON_PATH") + if gcp_sa_json_path and path.exists(gcp_sa_json_path): + try: + with open(gcp_sa_json_path) as f: + env_creds["gcp-sa"] = {"secret": f.read()} + logger.debug("Loaded GCP service account from file") + except Exception as e: + logger.warning(f"Failed to load GCP credentials from {gcp_sa_json_path}: {e}") + gcp_sa_json = os.getenv("GCP_SA_JSON") + if gcp_sa_json: + env_creds["gcp-sa"] = {"secret": gcp_sa_json} + logger.debug("Loaded GCP service account from environment variable") + + if env_creds: + logger.info(f"Loaded {len(env_creds)} credential set(s) for V2 cloud registry support") + else: + logger.debug("No V2 cloud credentials found in environment (V1 will still work)") + + return env_creds + + def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine) -> dict[str, object]: if 'SNAPSHOT' in version: raise ValueError("SNAPSHOT is not supported version of Solution Descriptor artifacts") - # TODO: check if job would fail without plugins app_def = get_appdef_for_app(f"{app_name}:{version}", app_name, plugins) - artifact_info = asyncio.run(artifact.check_artifact_async(app_def, artifact.FileExtension.JSON, version)) + env_creds = _get_environment_credentials() + artifact_info = asyncio.run(artifact.check_artifact_async(app_def, artifact.FileExtension.JSON, version, env_creds)) if not artifact_info: raise ValueError( f'Solution descriptor content was not received for {app_name}:{version}') From 186f16646a017a0e15edd186ed81878b54e2e970 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Sun, 14 Dec 2025 12:06:25 +0530 Subject: [PATCH 03/48] fix: V2 artifact searcher cleanup and improvements - Simplified retry logic (2 retries, 5s fixed delay) - Reduced timeouts to reasonable values (60s search, 120s download) - Removed debug/diagnostic code - Removed unnecessary test files - Clean up code style --- .../artifact_searcher/artifact.py | 241 +++++++++++------- .../artifact_searcher/cloud_auth_helper.py | Bin 17958 -> 9102 bytes .../artifact_searcher/test_artifact.py | 41 +++ .../artifact_searcher/utils/models.py | 29 +-- python/artifact-searcher/tests/__init__.py | Bin 78 -> 0 bytes .../tests/test_cloud_auth_helper.py | Bin 11300 -> 0 bytes .../artifact-searcher/tests/test_models_v2.py | Bin 11450 -> 0 bytes .../tests/test_version_routing.py | Bin 34482 -> 0 bytes 8 files changed, 197 insertions(+), 114 deletions(-) delete mode 100644 python/artifact-searcher/tests/__init__.py delete mode 100644 python/artifact-searcher/tests/test_cloud_auth_helper.py delete mode 100644 python/artifact-searcher/tests/test_models_v2.py delete mode 100644 python/artifact-searcher/tests/test_version_routing.py diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index c454957b5..222fd6d8c 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -3,21 +3,27 @@ import re import shutil import tempfile +import xml.etree.ElementTree as ET +from functools import partial from pathlib import Path from typing import Any, Optional -from functools import partial from urllib.parse import urljoin, urlparse, urlunparse from zipfile import ZipFile -import xml.etree.ElementTree as ET import aiohttp import requests from loguru import logger from requests.auth import HTTPBasicAuth -from artifact_searcher.utils.models import Registry, Application, FileExtension, Credentials, ArtifactInfo + from artifact_searcher.utils.constants import DEFAULT_REQUEST_TIMEOUT +from artifact_searcher.utils.models import Application, ArtifactInfo, Credentials, FileExtension, Registry + +WORKSPACE = os.getenv("WORKSPACE", Path(tempfile.gettempdir()) / "zips") -WORKSPACE = limit = os.getenv("WORKSPACE", Path(tempfile.gettempdir()) / "zips") +# V2 timeouts for cloud registries +V2_SEARCH_TIMEOUT = 60 # Timeout for find_artifact_urls +V2_DOWNLOAD_TIMEOUT = 120 # Timeout for download_artifact +V2_HTTP_TIMEOUT = (30, 60) # (connect, read) for HTTP requests def convert_nexus_repo_url_to_index_view(url: str) -> str: @@ -28,7 +34,6 @@ def convert_nexus_repo_url_to_index_view(url: str) -> str: if not parts or parts[-1] != "repository": return url - # Build new path new_parts = parts[:-1] + ["service", "rest", "repository", "browse"] new_path = "/".join(new_parts) + "/" @@ -82,30 +87,23 @@ async def resolve_snapshot_version_async( node_extension = node.findtext("extension", default="") value = node.findtext("value") - if node_classifier == classifier and node_extension == extension: + if node_classifier == classifier and node_extension == extension.value: stop_event.set() logger.info(f"Resolved snapshot version {version} to {value}") return value logger.warning(f"No matching snapshotVersion found for {app.artifact_id} in {metadata_url}") + return None except Exception as e: logger.warning(f"Error resolving snapshot version from {metadata_url}: {e}") + return None -def version_to_folder_name(version: str): - """ - Normalizes version string for folder naming. - - If version is timestamped snapshot (e.g. '1.0.0-20240702.123456-1'), it replaces the timestamp suffix with - '-SNAPSHOT'. Otherwise, returns the version unchanged - """ +def version_to_folder_name(version: str) -> str: + """Normalize timestamped snapshot version to -SNAPSHOT folder name.""" snapshot_pattern = re.compile(r"-\d{8}\.\d{6}-\d+$") - if snapshot_pattern.search(version): - folder = snapshot_pattern.sub("-SNAPSHOT", version) - else: - folder = version - return folder + return snapshot_pattern.sub("-SNAPSHOT", version) if snapshot_pattern.search(version) else version def download_json_content(url: str) -> dict[str, Any]: @@ -148,12 +146,10 @@ def create_app_artifacts_local_path(app_name, app_version): async def download(session, artifact_info: ArtifactInfo) -> ArtifactInfo: - """Downloads an artifact to a local directory""" - # Skip download if already downloaded (V2 cloud artifacts) if artifact_info.local_path: - logger.info(f"Artifact already downloaded (V2): {artifact_info.local_path}") + logger.info(f"Artifact already downloaded: {artifact_info.local_path}") return artifact_info - + url = artifact_info.url app_local_path = create_app_artifacts_local_path(artifact_info.app_name, artifact_info.app_version) artifact_local_path = os.path.join(app_local_path, os.path.basename(url)) @@ -194,7 +190,6 @@ async def check_artifact_by_full_url_async( def get_repo_value_pointer_dict(registry: Registry): - """Permanent set of repositories for searching of artifacts""" maven = registry.maven_config repos = { maven.target_snapshot: "targetSnapshot", @@ -213,7 +208,6 @@ def get_repo_pointer(repo_value: str, registry: Registry): async def _attempt_check( app: Application, version: str, artifact_extension: FileExtension, registry_url: str | None = None ) -> Optional[tuple[str, tuple[str, str]]]: - """Helper function to attempt artifact check with a given registry URL""" folder = version_to_folder_name(version) check_artifact_stop_event = asyncio.Event() resolve_snapshot_stop_event = asyncio.Event() @@ -224,11 +218,11 @@ async def _attempt_check( async with aiohttp.ClientSession() as session: resolved_version = version - resolve_snapshot_coros = [ - (resolve_snapshot_version_async(session, app, version, repo[0], resolve_snapshot_stop_event, extension=artifact_extension)) - for repo in repos_dict.items() - ] if version.endswith("-SNAPSHOT"): + resolve_snapshot_coros = [ + (resolve_snapshot_version_async(session, app, version, repo[0], resolve_snapshot_stop_event, extension=artifact_extension)) + for repo in repos_dict.items() + ] async with asyncio.TaskGroup() as resolve_snapshot_tg: resolve_snapshot_tasks = [resolve_snapshot_tg.create_task(coro) for coro in resolve_snapshot_coros] for task in resolve_snapshot_tasks: @@ -258,10 +252,9 @@ async def _attempt_check( async def check_artifact_async( app: Application, artifact_extension: FileExtension, version: str, env_creds: Optional[dict] = None -) -> Optional[tuple[str, tuple[str, str]]] | None: - """Routes to V2 (cloud-aware) or V1 (URL-based) search based on Registry version""" +) -> Optional[tuple[str, tuple[str, str]]]: registry_version = getattr(app.registry, 'version', "1.0") - + if registry_version == "2.0": logger.info(f"Detected RegDef V2 for {app.name}, attempting cloud-aware search") try: @@ -276,80 +269,141 @@ async def check_artifact_async( async def _check_artifact_v2_async(app: Application, artifact_extension: FileExtension, version: str, env_creds: Optional[dict]) -> Optional[tuple[str, tuple[str, str]]]: - """V2 artifact search using Maven Client with cloud authentication""" - if not env_creds: - logger.warning(f"V2 registry but no env_creds provided for {app.name}, falling back to V1") - return await _check_artifact_v1_async(app, artifact_extension, version) - - auth_config_ref = getattr(app.registry.maven_config, 'auth_config', None) - if not auth_config_ref: - logger.warning(f"V2 registry but no maven authConfig reference for {app.name}, falling back to V1") + if not env_creds or not getattr(app.registry.maven_config, 'auth_config', None): return await _check_artifact_v1_async(app, artifact_extension, version) - + try: from artifact_searcher.cloud_auth_helper import CloudAuthHelper from qubership_pipelines_common_library.v1.maven_client import Artifact as MavenArtifact - - auth_config = CloudAuthHelper.resolve_auth_config(app.registry, "maven") - - if not auth_config or not auth_config.provider: - logger.warning(f"V2 registry but no cloud provider for {app.name}, falling back to V1") + except ImportError: + return await _check_artifact_v1_async(app, artifact_extension, version) + + auth_config = CloudAuthHelper.resolve_auth_config(app.registry, "maven") + if not auth_config or auth_config.provider not in ["aws", "gcp", "artifactory", "nexus"]: + return await _check_artifact_v1_async(app, artifact_extension, version) + + logger.info(f"V2 search for {app.name} with provider={auth_config.provider}") + loop = asyncio.get_running_loop() + + try: + searcher = await loop.run_in_executor(None, CloudAuthHelper.create_maven_searcher, app.registry, env_creds) + except Exception as e: + logger.warning(f"Failed to create V2 searcher for {app.name}: {e}") + return await _check_artifact_v1_async(app, artifact_extension, version) + + artifact_string = f"{app.group_id}:{app.artifact_id}:{version}" + maven_artifact = MavenArtifact.from_string(artifact_string) + maven_artifact.extension = artifact_extension.value + + max_retries = 2 + last_error = None + local_path = None + maven_url = None + + for attempt in range(max_retries): + try: + if attempt > 0: + logger.info(f"Retry {attempt} for {app.name} after 5s delay...") + await asyncio.sleep(5) + searcher = await loop.run_in_executor(None, CloudAuthHelper.create_maven_searcher, app.registry, env_creds) + + # Wrap find_artifact_urls with timeout to prevent indefinite hangs + urls = await asyncio.wait_for( + loop.run_in_executor(None, partial(searcher.find_artifact_urls, artifact=maven_artifact)), + timeout=V2_SEARCH_TIMEOUT + ) + if not urls: + logger.warning(f"No artifacts found for {app.artifact_id}:{version}") + return None + + maven_url = urls[0] + logger.info(f"Found V2 artifact: {maven_url}") + + local_path = os.path.join(create_app_artifacts_local_path(app.name, version), os.path.basename(maven_url)) + os.makedirs(os.path.dirname(local_path), exist_ok=True) + + download_success = await _v2_download_with_fallback( + searcher, maven_url, local_path, auth_config, app.registry, env_creds + ) + + if download_success: + logger.info(f"V2 artifact downloaded: {local_path}") + break + raise TimeoutError(f"V2 download failed for {maven_url}") + + except asyncio.TimeoutError: + last_error = TimeoutError(f"V2 search timed out after {V2_SEARCH_TIMEOUT}s") + logger.warning(f"V2 search timed out for {app.name} (attempt {attempt + 1}/{max_retries})") + if attempt < max_retries - 1: + continue return await _check_artifact_v1_async(app, artifact_extension, version) - - if auth_config.provider not in ["aws", "gcp"]: - logger.warning(f"V2 registry with unsupported provider '{auth_config.provider}' for {app.name}, falling back to V1") + + except Exception as e: + last_error = e + error_str = str(e).lower() + if attempt < max_retries - 1 and any(x in error_str for x in ["401", "unauthorized", "forbidden", "expired", "timeout"]): + logger.warning(f"V2 error for {app.name}: {e}, retrying...") + continue + logger.warning(f"V2 failed after {max_retries} attempts for {app.name}: {e}") return await _check_artifact_v1_async(app, artifact_extension, version) - - logger.info(f"Creating Maven Client searcher for {app.name} with provider={auth_config.provider}") - loop = asyncio.get_event_loop() - - searcher = await loop.run_in_executor(None, CloudAuthHelper.create_maven_searcher, app.registry, env_creds) - - maven_artifact = MavenArtifact(artifact_id=app.artifact_id, version=version, extension=artifact_extension.value) - logger.info(f"Searching for {artifact_extension.value} artifact {app.artifact_id}:{version} using Maven Client") - urls = await loop.run_in_executor(None, partial(searcher.find_artifact_urls, artifact=maven_artifact)) - - if not urls: - logger.warning(f"No {artifact_extension.value} artifacts found for {app.artifact_id}:{version} via Maven Client") - return None - - maven_relative_path = urls[0] - logger.info(f"Found {artifact_extension.value} artifact via Maven Client at: {maven_relative_path}") - - # Download artifact using Maven Client - app_local_path = create_app_artifacts_local_path(app.name, version) - artifact_filename = os.path.basename(maven_relative_path) - local_path = os.path.join(app_local_path, artifact_filename) - os.makedirs(os.path.dirname(local_path), exist_ok=True) - - def download_with_searcher(): - searcher.download_artifact(maven_relative_path, str(local_path)) - return local_path - - downloaded_path = await loop.run_in_executor(None, download_with_searcher) - logger.info(f"Downloaded {artifact_extension.value} artifact to: {downloaded_path}") - - # Construct full URL for tracking + else: + logger.warning(f"V2 failed after {max_retries} attempts: {last_error}") + return await _check_artifact_v1_async(app, artifact_extension, version) + + if auth_config.provider == "aws": registry_domain = app.registry.maven_config.repository_domain_name - folder = version_to_folder_name(version) - if folder == "releases": - repo_path = app.registry.maven_config.target_release - elif folder == "staging": - repo_path = app.registry.maven_config.target_staging - else: - repo_path = app.registry.maven_config.target_snapshot - - full_url = f"{registry_domain.rstrip('/')}/{repo_path.rstrip('/')}/{maven_relative_path}" - return full_url, ("v2_downloaded", local_path) - + folder_name = version_to_folder_name(version) + repo_path = app.registry.maven_config.target_snapshot if folder_name.endswith("-SNAPSHOT") else app.registry.maven_config.target_release + full_url = f"{registry_domain.rstrip('/')}/{repo_path.rstrip('/')}/{maven_url}" + else: + full_url = maven_url + + return full_url, ("v2_downloaded", local_path) + + +async def _v2_download_with_fallback(searcher, url: str, local_path: str, auth_config, + registry: Registry, env_creds: dict) -> bool: + loop = asyncio.get_running_loop() + + try: + await asyncio.wait_for( + loop.run_in_executor(None, lambda: searcher.download_artifact(url, str(local_path))), + timeout=V2_DOWNLOAD_TIMEOUT + ) + return True + except asyncio.TimeoutError: + logger.warning(f"Searcher download timed out after {V2_DOWNLOAD_TIMEOUT}s") + except Exception as e: + logger.warning(f"Searcher download failed: {e}") + + if auth_config.provider not in ["gcp", "artifactory", "nexus"]: + return False + + try: + from artifact_searcher.cloud_auth_helper import CloudAuthHelper + headers = {} + if auth_config.provider == "gcp": + sa_json = CloudAuthHelper.get_gcp_credentials_from_registry(registry, env_creds) + if sa_json: + token = CloudAuthHelper.get_gcp_access_token(sa_json) + if token: + headers["Authorization"] = f"Bearer {token}" + + response = requests.get(url, headers=headers, timeout=V2_HTTP_TIMEOUT, stream=True) + response.raise_for_status() + with open(local_path, 'wb') as f: + for chunk in response.iter_content(chunk_size=8192): + if chunk: + f.write(chunk) + logger.info(f"Direct HTTP download successful: {local_path}") + return True except Exception as e: - logger.error(f"Error in V2 search: {e}", exc_info=True) - raise + logger.warning(f"Direct HTTP download failed: {e}") + return False async def _check_artifact_v1_async(app: Application, artifact_extension: FileExtension, version: str) -> Optional[tuple[str, tuple[str, str]]]: - """V1 artifact search using URL-based approach""" result = await _attempt_check(app, version, artifact_extension) if result is not None: return result @@ -368,6 +422,7 @@ async def _check_artifact_v1_async(app: Application, artifact_extension: FileExt logger.debug("Domain is same after editing, skipping retry") logger.warning("Artifact not found") + return None def unzip_file(artifact_id: str, app_name: str, app_version: str, zip_url: str): diff --git a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py index b657c5088dfa325d745ae0db0be3e72c30e3dffa..952340d68a2ef13a629db7b416610e154bc844c2 100644 GIT binary patch literal 9102 zcmd5>TT|o66@K@x=x82nSBw~TlT^jEP;tD>&Lne5F)(=mN+qPW(T*%>)DjF!@!#{E zzNuTthDq&i;s>y$?$hV;ooi>Cs;nFKvnY#EroPvFw5rPuYwoJ7STp_f$1G`P>_gRL zWfA8iwInasZQYtB__gM>eiheEwu+M`5t$K2vh|Fe z@O37d`ffDBpZSR4Bl-B!Uh!I7XH`^X70|?x zMPsPHqh|}v$Vbh`h<{CZ)vz~G+T*$|YjxP*YIexpmj&mIBe>;5Tx_4=3iPa8mo&F^;>THY5A{cad!GTT6|i(dAs=g+hZ_t zTDN>F_k%lL#km0QqZjNn+wih&*s84AprCU$Ny?(&uvnwk&REUk)cl%`ems7)ID2~< zeLP;AEMFg=M5k}w9e+4G1!t4*U(VQHUQTgTlE*@@WnQ)^8TU2ME3OqB1cCY~x%l)Q zOK25NQFdpfA6PF-VgebPEdKp*#vs>>85@@M-Hg5GUt2K^L2V>;^*7OAL$cw`b(zX1 zDPMuADDxW_14IEAg4o2IG`CorF^f~cozEEn2zdgDreMnUjx6NQNgRdMO9dE0D7ZLL zCrS3zdxcwb8k5AB;sJcEc+HE1O96-keX^C$2>XL&qC1Ng*1T!!LSvF1&_~J$!r*+( zp^T=sq8h)@J~npLF@`Du?Wt55kpUA$*;s1NI9Ts6l7+EH|Mp!N?6!YOP(jD^L7 z$D-8Z3PB(Z+2SV7^7twTxbAZVxXv_=w!+W+PE4lLhvx{Vm6;W*5-e!=rLI0*brFWE zy1dC!_#snzRFZ|}v3sgkr^DNGyez4CO0J7|qm-(3$2tYg7%y%jS}5j>vdKAoZN|v- zm(tPbTc>Qtu^i&cvn(NY6$_p+1Ph1X&{-d4spn_(3eVsu+c)(6LGS5Ub5M8xz}O46 zD9Yk)Q?`O3^m8GU#%P&_Q^zpEui=4wEh`+Y*n{N(aV9wXC%=;!3U*#v6$5IT#>2Qn znnw#Tvee{S7^l92*p`!XP2$p9C$OYjFMT9P<0cMf&f_VGm5HoC;6)&tbQ_d8rwkdM zI~(i~TaZfxG=!agnmIImC@|sCahBu}-Q$?sX-V-&H=8!uAT20l@`_q|JXJ8`3lDk>4h(I)g;B z7Sh2+YI#?fiTGF4O(;!jUX>ziPzOY0r%7AqGH1&Gf_T!|1M%ZnZ30-#CPtfj+4=0` ztrsFd5>9}>VGV*=H3{~epbTe+HJ2!2O-(XqVHobq#I^qe9d)#i2UbLaRf6TF&Irg) z=?pS>p!b=F9s2~O3VX;Qi5F4jSp!zoISxa#5NG7Pn8@{0cEk>RIG{hp1iYYf%T`;qN}99lrm4hyp9EZ^Hapsre1z5Kb(r#-{U6|4KcjR!G8*v2*Q=f{ zkP1)B$W1O&l^F_I7zX zMXbl3eka0Fb-rXF*+T;B=Cm-W~o{!C4{Nisl6XfU)e3GTy35=F}b9ruPFC zyHlliUTUBfQQmUeDXAOXT*Hhe%SLIriL*jxXv>dr7wjJdqIDxWWJb9!-iX8dKo27V zIMSB+!-G>$Y_rL5$)~-)4@fN$d@e#@n;6c$=&$NwK{FENcEon4v8Ri+9xzo~zk?H? z=r`cR1?9JLh$|KgoTtrmD_a7=)_t^VkaU5RDV)H1@dGMgr?K(rP`9- zsrco@Dpey4gafCtmZgphXHPkuNy*WrZ|^2&qk$~cUAi49zaLaww>3?jA6MB*JBQu? zqm%qW8KW!?@0F|rGY#X&;cbSQAnm2XJTf?*cqUh!e1y5op)~n<*BoB@;81;yyVA6A zL2soN!AXaX-F>1;2&v{n^}YYz=ohcGJuLq^AU^3LZcO?Aa}iHXRCnu+mPVt!mb2H< zK608>Kz)rQsEOzanJOUETu{ByBNdx;9VZ%aTPiJKo=EpUPx-FlqVwGzfkzH(Z7Sl3 z1~KqC4M*LwcdZUq&@~QL(7!NkHx-hX+SVW0=*Ql?MnAJBD%kCI^4~FLSiZIgjV;Zq zHjW=UdW!${_(23r=#Y&SrUiF?3?WOY8Y{B(Ob@vfHc;DW+dcSBZWwnxp~tts;dhA| zMrahWHaG8x9!|oPZSs@ZQCDTXXsvJY=unFUN|0uPrlPc(Sej>4X9rVXMJ6`8>&-K@ z-jaWA81>7P-&_^yILqb#QVF<2iKHLMv3JyzpD~8&Bb=g9R~(reWSagv-47bxpLoG* z8t$&pie1Y|)Q1HXIw}=2NnMRm^bz!iZd%ztK7M%LZ+EE;LqOsIP zRzDf}CSA3dxSE%7N=<=x*DE9MqaZu5W=1!RdO$uz!!v6aKWGftTOKZLbP*w4lw%^{ z^CT~bJc1;W76*(ZWyQte`PdBiZf|cxbyXc#nV?b6K1H7Wgsy4#syuFRd%PKU8HdPq z#T@-|I7UM#4LQ;G+PK~Fc+qa|?vNI?i}XGHvAnLQ30f=|_wA=sfw_%)g_}0C$xVrC zP>m&-aV0txe$YB|w=^WCH7DwxpB@YsP6q4_$C1=KMO_*E&6jVO1Z{T*GrcAWv}HZK z_r}!U3}Kuj>;OjIWFtoOOSI>B9%-78-)|8OvyLU)TgSl} e2H90tIG`vgt7M_;s6lZ4I)zjp$UcT$^!*3TMr9=c literal 17958 zcmeHP?QUd85p9WoNW8-^`QeF(2ZG=SNJd!BDnyV3IZ6;R*2p^^dma3R$GaP6<)`4C zcoSX&IH${Jy8GVhd*^!WMMB8(&diiRrYUESV)|7#rhvlq_8Rd^ka!!G{44X?uQ z!UE5(@$XF-g>C%mn?<<9h_f&cNAdLvpGV;(#+_mQDQ1kq38-=2K0cj>Pw?*vK7A8@ zj(_K?wMH1j@6W;#lsJEZcgE3=!Vd#~&%y<`Ahlb}<#;|l!MiJbGs4q5e18TWPVuQ6 z{UyF3r{wSmtA2zLpWyQxPfIG4{uGkm;I~P|zN1_>aTaHi+q>}V_<4jG{7F3h4OCvn z@k?m(3|!12kJRQlK40Q>iD&aD!$st87Qdkuk0R}9coRMjQ^ry( zal4pLsZa2Pl7595q&WvKl3@|P!{G%akQnxn^>2fVh!`n#lGRkrqCa~F4 z$UK6SSKxRQWjO-%bG&xYHi*4Nx9?126k z(O%|^;vM5*CwgP4htjU1rB~vO zY+_m@=eb>)c$3EG))zfPvvv7A#_IF%*Ql>?MgO9_4yQT2(%)*lG8(%z)Qxf{u-Dhn zV2Lmb#BkeE$;R~ccP$@Cmb`BvlC+>#TIm*%b%V7_{Lu)#dXmd`XVFh-Eut9nnZ-GC z*#wqJwq~B(TKRmb+pUO1!oU(q#fAB9!g65(^Pb zBzrSENG82*W{XlY?dVIk%6ZzvXFZRQEPAD8_o3+=2}VGD<`T=vHJvlJvc^!2#j1$4 zAT8dn_i>Sj#ZB{-+?nU(NK4MPVqBHpb&sdrbbX)Is+-x@w_gow{n68h*qme$X!84< zmp+rk7X+YnZY7P?PLsQSRsNnm?twl>3_3z?5TDZ`=_z^(L%uK2Q35ZnQ6T zLHr}nrip!MGu1VgwJdr{-PHB)_aFQ-`~#9-M-9J$O^MLlD_FEE7E4cPj=qmMB(E*I z=O>@e=hQV>#&DXQbCu&_{(iIicMe^!9?%|{yUr!F@AE*ZJ?~`xC51O}&u0{uQODZ7 z9>i$8iFyc*jyRrti8rom9V63Rn~#H)?agL>z8i+^THjVbi@r=B zRd0?cuQ9f&)lPnu;Ys)bsJ%cZiLzO*X#^x-@8Bq|!(P=no+$TgwI)a0A2Z&}N`D3> zejfgS&$i!x4K8i>!!hsh-gdx5&uRQ2UTR&-B=-D1+`DyyMF}I8D_Xz9_V3&WSq#pi zx0uz8zE!%cWEjJgYP?!?j8DeBH{VBL`M+k+HmtSuIp^S@Sj+bg)k~YN=GoeNW_qk) zc{A%%I?6YO{@z0CeLVo9J_i00jk)(div1|+x++VRnL?mm$|ju*`L)Wd)DkS&hcZF_-*gnvY-5k)J#j9yQx=* z`g-2+DRS8#v1YeKL%k=L=&60fi+dtu>PIDy%g5&Lht<@?y*#H_--`Ww;`MEe=R0_T z@L+=Ixj~*`Bv{5s-C+Cd$G26i&HP2#nrqQV414lgIGr1e&=uT|8EdkNf9|Nb57-57 z`I&$aN}kaEejl^)9rB0pQvT!`mDic%mN)GE6ziGPJa^*B%un&_=X6B`V~M;r`_zmM z%W7Ijm_(}SWQr@4BYlmFYhp9CqKqfbm*AL4#y&%P1z!fyG+B63bcxw%1^NQd1wQWS zJBp6%Z#qqM4nW}$xZC8Bx;+R#>*-L4dRf6y=XrOt^hN6{E1%T;NgP`y(5E`uix<6e zMv7+5_qp-#nA;Yd$NkyY0<7Vo+WW8L~d#&N9cDCnV}vF z{l1?|_!N1J$j;M-cGlu9BUM7gWS!Mf#$Cw5lUU+_o^P7x#7nL-WiqRyeSYfs$fDAp zH3cfOhmyL6slwBaAXU9J8aC2u0`cFSZAu#tp_|)YxTE!%{chp0uV5RZ4Od}yNay|R zm2ZW=;OPVz;t1!{-y=_KhhO3MF#H60gWs{PIKnrys?p|CTK@vG=@XV6{p=@wb3ZkU zl#Hi-jYxOKeOoG?LeN^gOEE#L`|C?9Wy}xBYt&ofCEcMhKTOfBG4)s$FQvV6`kp;) z|B~yj+nzOBZfErnEoeEC?<`BMYs)r2Lv7in@Z7dkHp_M0#+2u`rDxyLGrep?M1B3x zr&3>`%t993^1zy87N>T$!Tr249PQ7$SX_fUPCt@LueGknc0?0a^USv84j`9xSYERI z8qte;=~TtHZ!5J=`)I@chlsUZhTTkJ4uAY<#TNQ~)(g38?U%laI!L$IwklPf<)poc z!zLSd7eae=VeHPLRo3gTNsC)PwEJiBb7`P29}Zc`&Jn)AdaR22Vn3Z#_;>i;$IU#R zgziDp>0BfCuu*1PyOox1FqT^FyB9jG#*VC9-c&Ih@4g4e-Ew{gv;GY>X6Ewll^@rZ zp@l0xr|Vm}b|U3rSSeYGg}j1Ho}F-jBg>uzd$o_3u3h(SF7GbTv{?-|Z>+SdP3~ma z`=d?hh3&hbsv&Z8)ic#v>vl+GbL*h6gKt&VI97FIR6W)?t=&b`y6bfoYxmNcTCgna z`(fKs_o!b+=x66QYN**-pOK^W|9IT&9+SyyO_>vFx?_sAo)NAT8 z^`UZ!pnnQUcM-OgQIk7<|DV)EB|ouVqR-)-*ZSwxjADz;+fI7zlU$$PI+Ygt zE!Hc^aoE?e?A)J!)2XjCYdc2%P0U%uF58Q=SJtVC=H<0nt>(&OF!7joQD~EXge~{D zWk;=^JL+-fBPip7Suo|g^)mkOQ@~mT`h0m>EPDma(CO}EZW%_JcK3bJX1J1a{!Vdc zc^>z9_E$2#Z(2V=vN768A^)64G^-7bEcIXSppk0I(akK&zxv|Y+$-2pQAGZUF?>?) z877?=>S4(aXz*OWM3`86vT0r~K2d`X>;?xvgDDMi3eVZVL~ZM%|g zX;sgkOV7_oJVoxVRUP#BVHvKDDWtXgV2wNi|5tOjI;riPy$_4$JC9WH4$I=*RetjP z@+QuYA8u|j-frRv&SP%k^k-6@4oU|#Udr3)O$1fs=KPoULfe16#XatqK-TkE(QHNh zs7uMaS=OcVC=JQl-P)XmZ@~!>->b56+Kk$1f+u9+`V=Bd|pWv)P#ccEL*RgJWZ+dAcSU@it)u-iRZq$|MU*1*M|kR-{PHF?kBrYIN^qW z``P1k{mM@oZ`OOlkJKCMt^oUWv-7Y!WPNG><3wA3-L+HGk5Wb{vxa1{YORTY-D}t! zyVH^4?EO}Fv?zZ+fNxlpWPf>XCoRm>?DO;Q*3#c=PWVrskAltHbyP~X9?@Z>h!8_r){3T Xj{e%c7iux>$ewPcwl#dsTUPiFXesJc diff --git a/python/artifact-searcher/artifact_searcher/test_artifact.py b/python/artifact-searcher/artifact_searcher/test_artifact.py index 3f09b212b..eb7c6fa6b 100644 --- a/python/artifact-searcher/artifact_searcher/test_artifact.py +++ b/python/artifact-searcher/artifact_searcher/test_artifact.py @@ -88,3 +88,44 @@ def mock_get(url, *args, **kwargs): sample_url = f"{base_url.rstrip('/repository/')}{index_path}repo/com/example/app/1.0.0-SNAPSHOT/app-1.0.0-20240702.123456-1.json" assert full_url == sample_url, f"expected: {sample_url}, received: {full_url}" + +async def test_v2_registry_routes_to_cloud_auth(monkeypatch): + auth_cfg = models.AuthConfig( + credentials_id="aws-creds", + provider="aws", + auth_method="secret", + aws_domain="test-domain", + aws_region="us-east-1", + ) + mvn_cfg = models.MavenConfig( + target_snapshot="snapshots", + target_staging="staging", + target_release="releases", + repository_domain_name="https://test.codeartifact.us-east-1.amazonaws.com/maven/repo/", + auth_config="aws-maven", + ) + dcr_cfg = models.DockerConfig() + reg = models.Registry( + name="aws-registry", + maven_config=mvn_cfg, + docker_config=dcr_cfg, + version="2.0", + auth_config={"aws-maven": auth_cfg}, + ) + app = models.Application( + name="test-app", + artifact_id="test-artifact", + group_id="com.test", + registry=reg, + solution_descriptor=False, + ) + env_creds = {"aws-creds": {"username": "key", "password": "secret"}} + + async def mock_v2_async(*args, **kwargs): + return ("http://url", ("v2_downloaded", "/tmp/artifact.json")) + + monkeypatch.setattr("artifact_searcher.artifact._check_artifact_v2_async", mock_v2_async) + + result = await check_artifact_async(app, models.FileExtension.JSON, "1.0.0", env_creds) + assert result is not None + assert result[1][0] == "v2_downloaded" diff --git a/python/artifact-searcher/artifact_searcher/utils/models.py b/python/artifact-searcher/artifact_searcher/utils/models.py index 9f26ad6ca..03500f864 100644 --- a/python/artifact-searcher/artifact_searcher/utils/models.py +++ b/python/artifact-searcher/artifact_searcher/utils/models.py @@ -25,7 +25,7 @@ class MavenConfig(BaseSchema): repository_domain_name: str = Field(json_schema_extra={"error_message": "Application registry does not define URL"}) snapshot_group: Optional[str] = "" release_group: Optional[str] = "" - auth_config: Optional[str] = None # V2: Reference to authConfig key in Registry.auth_config + auth_config: Optional[str] = None is_nexus: bool = False @@ -55,29 +55,18 @@ def detect_nexus(self): class AuthConfig(BaseSchema): - """RegDef V2 authentication configuration""" - credentials_id: str - auth_type: Optional[str] = None # shortLived, longLived - provider: Optional[str] = None # aws, gcp, azure - auth_method: Optional[str] = None # secret, assume_role, service_account, federation, user_pass - - # AWS-specific fields + credentials_id: Optional[str] = None + auth_type: Optional[str] = None + provider: Optional[str] = None + auth_method: Optional[str] = None aws_region: Optional[str] = None aws_domain: Optional[str] = None aws_role_arn: Optional[str] = None aws_role_session_prefix: Optional[str] = "nc-devops-m2m-session" - - # GCP-specific fields gcp_reg_project: Optional[str] = None gcp_reg_pool_id: Optional[str] = None gcp_reg_provider_id: Optional[str] = None gcp_reg_sa_email: Optional[str] = None - - # Azure-specific fields (future support) - azure_tenant_id: Optional[str] = None - azure_acr_resource: Optional[str] = None - azure_acr_name: Optional[str] = None - azure_artifacts_resource: Optional[str] = None class DockerConfig(BaseSchema): @@ -132,7 +121,7 @@ class ArtifactInfo(BaseSchema): class Registry(BaseSchema): - credentials_id: Optional[str] = "" # V1 backward compatibility + credentials_id: Optional[str] = "" name: str maven_config: MavenConfig docker_config: DockerConfig @@ -141,10 +130,8 @@ class Registry(BaseSchema): npm_config: Optional[NpmConfig] = None helm_config: Optional[HelmConfig] = None helm_app_config: Optional[HelmAppConfig] = None - - # V2 fields - version: Optional[str] = "1.0" # Default "1.0" for backward compatibility - auth_config: Optional[dict[str, AuthConfig]] = None # V2: Dictionary of named auth configurations + version: Optional[str] = "1.0" + auth_config: Optional[dict[str, AuthConfig]] = None class Application(BaseSchema): diff --git a/python/artifact-searcher/tests/__init__.py b/python/artifact-searcher/tests/__init__.py deleted file mode 100644 index f670b2f318c8e350e244719601394a5e4f71fa71..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 78 zcmezWPl*8pc^SAE92trjN*FR3(ijpMl7TE;hG2$NAiD_2%K);97!(-77>s~q2#{9{ LR8b7phcE#EM`jFi diff --git a/python/artifact-searcher/tests/test_cloud_auth_helper.py b/python/artifact-searcher/tests/test_cloud_auth_helper.py deleted file mode 100644 index fdd1eb55978259ee27b4512b423768a5c9695f96..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 11300 zcmeHNZBH9V5Z=#~`X8>&hk{g_Hhr(e2Lv>slr$o#ny9Kyjxpe3Y-ArHwBoO~eV!dA zw{y35=QC+YB81G@dwa9<`poR?{rgYH@$X~zk$dWfZtgOC5_jmPZiefo8{^x#J8&EL zpSffA5VOYa4F6~Tsr-4;7`ap2zB&e^=K}3hOm<+l~XSIRY1RaB~XI55O6J71!Oz_z`AOdo#%V8D^-y zHu2m9a*i;A<9m?hCFW0{TYF~@mJxW?xz_bIfTYkuAR9i^_Tbgnhm1z&2p z4s;l|a9k>-rPqPiGHp5W9@h6>l|qsOFS*#N>wV!85>`p1^z?cLx>NW8=}hSCmqJ8@SQEpkE|^|N{^t{N$VWcx}ev`HN&?rT2nj#E!u+~N{Xk5x-K}) zM+MVVGsWv_)aF+eo|9X?=sWRfuJ;1Q2QiYKo8d~tNDc2Pq{`#q7_%}wF^AW!;ZNg& z2uXR@0?%A))MRd98wKhq*3aXSq}t5D z?h&pNJY(f-rqhG~E3G_&$J%&B(x>D`fb?@q#Pf|0iPZ1S@fy87R35l;sh}0AJ zsmI5md*x?m?h}KNs$sp_9A3==D#0g?f(4@&Lc6>kJj# z41S*10L&WfL~tw?8xl1(!a|EKlH=J&VEqx+9i+ieb8f9d-8p0#_#Da%+nh&ZbizJ> zbU|1dm2+bpd!!2703Xb{&G{t80V|)j`{800Mx?3V|K|MH?{`DX+UyeEp|`vQ-zpDn z%PKJ5VNl+}eLnjN@!5*4vU_G-O9Um?(v9e#OS9{C@PEhs0uTQY|F_^-kKDIz+daeh zo_p+m#`mUt(m^#V-g|*6Y6ef#6)xOQxHHDsGxsZOJHaa3pu2-T=q}dT!;0Hj@d19n zz-q71^7EyO*v0SXpc`sWCA>1D_E$K+sO7f14t?HsVOAia_^r*S+m<*riP7A9G# zHHNEVs}7k$D^~kc+4mG%I)3*mUGtm|L6VrS@lM!Xy>ittmhYUxUOsp5?7h*8nPGNu zy+lOM@JGa8tZ!0p9ybv&^8Wq)5!ELatk`*@1Vi+9{DUG2HY zsIyZcn9ma$>Y8UM+MQ>fWfX7V4$m$0Y$X9!?*sYEk;0m13<`of#Xe(s_s>r4efOpN z%6;v=0p5S-zDK{EV?C@a{YY|9Jl*Me#IqgWo|mNW{sPyOioK3_M^pGveXzqBMojo! z=j8oUk*&#_Qt;fywkwc_nyZdhi^g%jMC(>Mk?qa-;1MG5Y=Iik%HQ4xA0Wp+_P$v3 zy?y95?BOlzjneO8XNYGejKVQIFYH#c3wMIG=!-+tVeE_ZR6wxzPmJ-T(fY8?Q48I9 zR~v+5=b!!38PI0%7QD=^r=FBxWn$eJe=l%l&wI5s+7&TN&SeylSP@U)P5OHcGMCXm zJc*HROMZ4Shd+L3$fbSwvaHMCn4S=7=1gDbxbzxtn`_HBj%kLZ&2*J8gD#gJ!=AaK z%3^u&0({8HCo?BA`XYHyJ+A0S@%@TqzW5$oYiGgI5mx704f%dsPZ(Lv@+}T~wh`{C zt23b0HFMo^Sy=6*IhP#8C9dYBoQ;(#wIq31@=Iu$oB6QTXH{=xH2X63u02^Ith|ds z4?lywh`2n}kWNi5kFJ|*DeS+rtpVR(9}nw7-Bq;l``Gh!kq|#A*YmY78W=xAAX*nS zui^mTfFPIm@Y!y#u$-@J_maJKzB43o${J_r>z|4L7D`baA0c-)shHIZ|L<6>)1K|^ zN~a|r3pR4J=H|8B3$b~{p2_mm{w7tn*3Q-nc^og7ov0(?Hs93CJ*}x?wzYCz<-7|y zN3`6j*>zbBzWX_+%0@TMJhm2Y&NNnU*4O2&EmljGD&Ikz0(B%-n32aAX`5fpk=0J{ rnF0$Z@Kbs3RC-6{TKjE!;fI5kQrXzI?KIbH0Uc;rv0mBAM8p38&SJ!v diff --git a/python/artifact-searcher/tests/test_models_v2.py b/python/artifact-searcher/tests/test_models_v2.py deleted file mode 100644 index a6e2cbd96e56ea90ff6424deafc1a287a306f29d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 11450 zcmdT~+in{-5an}${)3>rI7Kbj?&Tps9)ubQ3N%4r8|gz)2#PPVT3-ZNu9F7&^|t45 zIvx&})UIR&bzsPncFDuxT+YmJ_0Qk^z`rlU7vU(JhMTa!s~66~b(rJ*0M{mAAOFwr z?M>Lj|4ZCE!IcYqJ`L|M<1}1h))d$IPAo&^w<+e#BH{vbX5oFD!?k$#3@cq@L@$i- zX@Sva_;idbb%I{$`o-g)>j%6Nvclkl}0 z@e=F&8Fx}2vya)w;g2XueU#{I3JDjG_fvep&rf_&!_+yo zOQaD-bMNPv#~HE*UcJNpEzExA_9GThn^FV6gu{v^XcOw5HoSsn%zCOnU3&z%DD@af z&++=;*3{8`$T)}G+`|<{Pcfbr`w3V2kh6#XBKLOG=6l@hXL>R$UHb^SoB>f0E}|YL z;b-8}a>VvFHH;bLxyGnzlxcpCAyU-m>Dn)WPu~pbXWm!Ref|k?yx7hVI zFvfUa;L~m^3jOaA<7p*DxrA1>Ays;Z{-b;U=q#lB5$g>LQLz=i!k>;ady97}YPtgD zr!E+~jA8N8LB$6~(FR{{%aw zy$!Eq@l{Y$D;sLf1BlF5S0F}#qBevH3s#1Xl(jwx#jv%HS{_B-Y?Z+dQ!SkZgrnZ59B_%3`O ze!$4o{~kM6cuS{}*CM>m6Kb(8C|!7BnFYJ7A@V45j1Y;;SZcLA+Wp;<2Q1=R7_IUFq5ZOYxPKq#0b8?i#}}C4eKYnx=bpL zWOdEzZGm^zoN1--3dqcF$_nQgwTPN{7IyHb6(%z_tB9RgV~B_BU{-Apx#nI(kG%!5 zt|WMa`xWX~ezy-n(pXLXOXbm;(>$qLJaTC`+n!vsLHj6%tFsV&_1c>D=0Txigfm;5qi7I$@W+tS`**^Gt78 zHEreyswLkI5XbicWj<`8u`kzrE}a0eNnw!bz{=_8YEvZ63KQ(Ur=Wq!?F)TCXl*8^qD!zlTB)rui* zF5?@f-&dE+Ut*cM_cB^aXLh3355f05{#&{d-bFsizAP*=?bN}a#{9YlWbzL^}}z-mU*@? z%}V}}3W>YttJy>4GO!NW1>3&`KhWFmB2S#)3Tril|cW)r zM+}|XNbj$LUCt+l-K`Hcz2rNs=K+hSE7XVW8uz>Gj+@4d%K5ownI~X~!+6el3yYuN zn=l%A^8%5_r%Ys#H{h^z-|J?l-T(TLRj@Sn{S%|+(}E^btg=cuQ!p!%OQ`5JlFU@xJT+ib?5PT$4OrxvgM zY`@A@Do>f4y5tsH`|;-&lXcTMCFj?AhE=ZXl1n_du8LXCA)hXD=s9Q22W{tmU-g_} zh$#mt&gYsf9v|ylT=*DPo-K1ND<|bt`=!O`{#k^c9n#vqhim^WMF}}QJuIIuHY;(F zaotK>{ck%~i9C0&@nl}ProY7C`H{#`pFcrm&L@&H_ZtqW#k!pD-(dDdJd2T&uzl$K zCf55?V9HYq;rt`4bc?&So#fWiKE4!z8j1Z_N7qkYYx}nOJ{xOx(k{F#tIgJ`W7Uo& zMc`?otUHW)^uJ~Dzjo4jwfAMU7OQ}D&tP@Eyt_8cHccynzKbNAJVnis^V3v|sq~KT zpA{p2_vbwM-C91-aXiV@C$sB&3ghD;J+AJbr#6)5uUHd{9jq!bNq1t;&K2v=3U!D2 zP7iB4m+k5)zD}w2)T`G&CTrTWA3(cl&n!=UsdKy9UI&%2H&G86a=zm(Rm^f8Gi<-# zF38K+xjimcJHu5u{aLR6mdYc!PtL%Gh@EE5pudUZJ@3QdR%} diff --git a/python/artifact-searcher/tests/test_version_routing.py b/python/artifact-searcher/tests/test_version_routing.py deleted file mode 100644 index 21f27b0a27e101235c83aa6b4ab159c4e7efad35..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 34482 zcmeI5ZEqa65y$s+fqsXp{31#bof^AofdU39Bu?w5NsJ_IlLmz#+mftCmIO(*;}rSo zZT~;cjmhCYo+QiXL=et+%)%EYq z`^~2Qy{+%h_54&%5A^IvzfX02ruGN=wXfehx<1zRQF?bb-8oIq_w?+Mz5(}{TJhbW zoTZ-g7#PPv^RzihFnHsnd8yVmvi4J+(0WgA9qN0##SeNCpYNrSKTWWA1pA5JJZK*3 zckKCrM$J=T=64wXf$sa5=IZ}gGy1Oe{-=6>Uu_>XpDtRwr#4%{;LC)A^6p9M>(QdO z*yE1I3%(qS+k&vG_NRjUMBh%EdHPkhspqnuW-(L${Kb{r7WCt0TkwyCiN^`zwnn(4 zne8_3g*P9mH~MDwyzxwL9O~OM-9N0_f+@p3Ep2^P|Bk;f1}?xpDzT76^DyXwaW1+^Z@3Mnh!Ob zqlAL=R9;* zFn#_T8Xt7NR;|3h)2849jTQ(`h1218_q8I(WVp!RLK;8SDtZ5X{hq1aVWKOelkK!} zXQK8~!8%F!In_A6((h-&VWe#zJ<_4%fV)l26Wy5d_e+e`m_Q@F?8TcyxTQ7vEUjgf za&hf9)ap#M0=A4AkOD|pej6o04{M@_dGi}xp=zQPN}+1tWb=Iy(Z{Qjim`C%M5 zba06tgma-zX3wgyo;$jCl(@vCH1zRAt` zIcEJDXE_dah!)@N;Y{xf`oUU3!_?H;r+tvSU46#8@mJaYmN*B=TJzjPjbbK_W3?ZL z9-sj3qMfiTk(hJ+iqd6X=@KL~GCFcCBjI!18f;g(_eAuJ#Q+B{qrIZ+g0@@^-RLc+ zWV^1O-Z~PMexIm%Os@2=GM>V$fp4(jN-8o=D!D7-W0thT>S?2ll^YFoqk*nl1HJ9~ zV2_|HyZH~zC@!mg1Y zKFI@+&?0GQ`lG z3%hx62&3Lxl!1QNuWzS?Qqya;O2#PH=iNtdYf08QB~7y}Z>$l=>Wwwxn6PW)z4)u- zT_l2O-8zck5_F1=Eg`-h`^IFnWo?(WX6DN55FuHYgf69Z#7aGG>v7&;Pgv9iIZ7N9 ze1pY*NC#jKk()Hjs9$n|FFtMC%I%#Of^n+n=D$-FL!AbdE%ZVL;^k6NB%;_>%itA0 zmc!W+UB5egbqW@!dCxs`FkMmYmASQ?`;tKy zS1%)CbwwM(UaU5mmeKE~gN&OJCmXM*#w;D@G|xW51GF!tPn3eInm-xF;qo@cxE)|s zam27{P1#4eH6~*pa4Wb+YFc(b_Yj|541Ub%6DP~?v{KO=1U5k3Omkt8ss6W zUDdpQ^blMh%tQg-Cuw@FReC9D4^^Q3&??A<$fLEUy=Xmv%LCI^*ws|~lz5%m(~wlk zbqFP8MA>waK64Z^#^{NCc6p?p?Wye!^XEKrk}KlMN({!w=ayE5C_XE~Dn$w2uISBC zAFJ&JM%!kIKKXD3YP;rWJU$fRn>~7(;={;YiqKd91yicN3sB@0j1K* z*3Z{B_EXn}UTjsrP<_T<_1O|`?o~8Vs`{dV(j(K?>wIKKZT@9eL_CjJ98;MAo* z)33Jl`->>}bMbjw#<%f)75Y;1P*?*JCL?>a*<_fAY8?(<&c(e7$dHk=# zTj|o$7m0{QEdk!+9rG*UU02E|vB!>o zK3IT zQPB$?%KOxbaL+z2>DPog%gg3*TwH(HuB@xn9{+6a%RYR2bO#;@w#c#81NlmgQ9C}> z&+9`mI_T(JYkw-L_c});p68qUr1#Xg-N%AUE!67n!C6iHQ~RzKeS96dsEj+I!SimJ zzO}FAebgOMvc$+%e0DMTQ4>Y@@!8KuOIeKoQg}l)boBB}>*Q5#ufeePGHGd0hWfd< zE*q!ywmxwhto3ByJhOi5l6X9IiMN>|K-1*y^l?p<;ly5Cqtx{w%0D_ zK518;YcNlzGjIp3MISGffj!XGYWPi;L0KM2KTq#JPHp0zY3K7>YWrb=V3CF#xt%pO z+j3p}E#J}aYiIWVgG7^0gvYX9Mzg6;-u;~UtqXRZe>}BeuJN!3**${wSLX`&D3GP^ zXl>sqFz|ySyWKO-bJ>$*cp)5+9k<9Yv`|w0rl8o_1yCZ^`&02HdX{-vT!nYBAgS<< z9N|6rl`)O?NZj|KWG3sI`$vim_ka5m{U2-mcAA7oWOC1UMtdgK#%fz#L(KUciDt>{ z+iuUuKSXG|D52{f^y+73P&@It^ztW)T0LmCM923!;be+3N+CA9E-GnrDfTpX`_I&v zcl7yAqJuu_uTLv*D)8&oj_z`@-e!V4IT!Vvik>-r0Ug9{|GT=2^f8(oo0rdz6}uPY z(q6N zGK~xq`M9q2i7S~;4l3z=O0UrlC!qA8Jm{X%u(qNI^t^iKC_QI)?+{I zw6!$zhEBJoJ&qH#9cu2iKC+enN_YWB>_&`L3-=dL&_NER^p(u2Z_|(WZqlm{YWL|b z7Plp~QlHb}8Y)Mk>Uqy!W&HZ$JMP&zU(!#OH)C(b$^zbv+Oec^mz!uOyb)USJ!kOy zqo!`H^Nv+Cj>pC$Cqx=8$5qp-(aAj7GKZo8=iSR;e@c(_TI9aV6qGB+ZV8fm%Ir^lAx@*$9r0@RFX_*< z&6cd{FZJ6`5B!sC`3DIW@UHI67PISZ*ICV9#BTh=GQP8SbbDb5j!PG-qpD#dPw=Pc zu{~QVs?e3S3~}tWWly#v>YlpJxU1W+${syHBIdER<5nd$N~$Wq8;!dhB!7=JS)Pp7 zEd@*Y``cnnERw}PpS2g0@X1S9DHbk+0dfIjE zlQZ65jZ+02hvX;m&X5>rP9hcN>!0h_v>J!JGA7&5*qqacHxA6#cUA#B`ds~x1KW@v zd7^(g&5n9@X2KkXXvs$&HNW5W>)W|Aa2z*O15hP8*V>zX$UC(}#LB79nwYB<{4;(Y zf?IM|cQQic+-VdVGoE!dkI_BaBj-~4?M$Z%zed>6S_N{vb5XeFkHJ-arwk6!`6^cU z;8#CS`^mu2W6_vt{BAkErqwY$!|8%{CJVcpyIcF(nCv0i<+g#z`Zd``pQx&2OP|kW zA%3iXKa!RDnd+zSHDBxgr?L~Ny&aQ*YoZzR4s7=UryVh_HI4Q!dUB|~zi$2}Npz%M z?g{RFMcqG9KcA|{d+PB|`u$M7eXoe~p4Run=9_8^pXm2Dg4ngfKZd*USK0oS#+2jR zW>uC^Y3yfCE?_M@MvHH2_On~vR`FTFz1k=0q{WrX@%YNeE1As~ z(nVXMvUooBdYEyHN_ShC5wibkd)})if88g(x_o-9GGo3|vhcA6%_CCNXJb&m))Yn#x~l%+LH~3`tIHlIXwp9+Ooc!_;372_D>3u=78T?zn*rmkyiJEUz_uVhH-ILvR zx8uaG-I~~+zMT;SG%jajvPYj4AWrnYer2lVG?MRxSt1BTVr;L$Fg|JHkvHR5GCfxj zA8^{v@Tbyc^x<#f15USoCEMb=4dU+byYgc5Gcpf;vKIq+pu6Om#JbBs;%Ore|C|gX2JA^4x=128j&fjq#efs#TY=2AR zU*5xa%WaiC{Bo9#m3Vy<{{8hz$Dfw>ZoGQqy?0S|*S+ZbmOiIv*xxqqeR=MA-F~{$ zB>4BwoAA@UM)bLCt@f$5Q~h?A=j}QPKMjs}yPxjn(O)~sDxYR+nLup2@|<)kn}``% zrP#i7AAcp?-qaI-Wqge5@#VeRq0io3&E21GW8HvRbXyB=DB8Ov2Sod%{i~a^GPSg` zUCTts+1VB2;P|>_B(goCcqiKB@!70-JN;Sbk~f*X?&HuJ!#5u3FulJCj}#A!>_}H8 zW~R7E{XfvTrtQ|VrL?&2EGm`-7MSHi$)c8{scF%EzX>1fG8~JQT-FV zHGlOcGoAH3+-oW~<%Kkc07-Nb`XW4Dg#qU4{N|5SQ_-^x#DXC2#A177@db$(6l zUm#cPf9=llh5Zx#My*B78w@*yI5WMeXP0}^GU3a*{c(La^<)CL+##Q zxA#(yw+llh&6eNWqqP|5x1I8OtHSL46pK9`$)hc|cZ6zeasatDVSN9H>yvu#g4v`7 zam+6AcjImaAmoumAJlv8t9`pJY~2>N@);3%kNU9A_4$@rPP@lG>}l_|ah1ohnz*_n zTv3nOKA~#bjN90{a|vwak@R7!#mk3Rs%O=oe`(O(qTvBp8h=h$;q0W ziTPMpY**@>XiE-BP9+6r6|5A2-qC>>Pc4=0wze}QF*l6)_*l_>yuy0R-4X=#DUehS^ k9@Vbesf?+7o}A;ruMro(^h&@!h=!kKAdZjH@RDKvADTg2>Hq)$ From 58f97aff6f414f19d64943b5b536cea7f59d1acb Mon Sep 17 00:00:00 2001 From: BackendBits Date: Sun, 14 Dec 2025 12:46:55 +0530 Subject: [PATCH 04/48] fix: minor issues on v2 --- .../artifact_searcher/utils/models.py | 2 +- scripts/build_env/handle_sd.py | 10 +++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/utils/models.py b/python/artifact-searcher/artifact_searcher/utils/models.py index 03500f864..8892d020e 100644 --- a/python/artifact-searcher/artifact_searcher/utils/models.py +++ b/python/artifact-searcher/artifact_searcher/utils/models.py @@ -1,9 +1,9 @@ from enum import Enum from typing import Optional +import requests from pydantic import BaseModel, ConfigDict, field_validator, Field, model_validator from pydantic.alias_generators import to_camel -import requests from artifact_searcher.utils.constants import DEFAULT_REQUEST_TIMEOUT diff --git a/scripts/build_env/handle_sd.py b/scripts/build_env/handle_sd.py index fc1d29ae7..03955c75a 100644 --- a/scripts/build_env/handle_sd.py +++ b/scripts/build_env/handle_sd.py @@ -334,8 +334,6 @@ def _get_environment_credentials() -> dict: def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine) -> dict[str, object]: - if 'SNAPSHOT' in version: - raise ValueError("SNAPSHOT is not supported version of Solution Descriptor artifacts") app_def = get_appdef_for_app(f"{app_name}:{version}", app_name, plugins) env_creds = _get_environment_credentials() @@ -343,7 +341,13 @@ def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine) -> if not artifact_info: raise ValueError( f'Solution descriptor content was not received for {app_name}:{version}') - sd_url, _ = artifact_info + sd_url, mvn_repo = artifact_info + mvn_repo_value, mvn_repo_extra = mvn_repo + + if mvn_repo_value == "v2_downloaded": + logger.debug(f"Reading V2 solution descriptor from local file: {mvn_repo_extra}") + with open(mvn_repo_extra, 'r') as f: + return json.load(f) return artifact.download_json_content(sd_url) From e01841418c9c112d6c1f71cbd1d5b45fc8f8f6de Mon Sep 17 00:00:00 2001 From: BackendBits Date: Sun, 14 Dec 2025 13:10:14 +0530 Subject: [PATCH 05/48] fix: changes for v2 working with Auth_config as null or empty for Artifacotry/nexus --- .../artifact-searcher/artifact_searcher/artifact.py | 12 ++++++++++-- .../artifact_searcher/cloud_auth_helper.py | 11 ++++++----- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index 222fd6d8c..62e760771 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -269,7 +269,7 @@ async def check_artifact_async( async def _check_artifact_v2_async(app: Application, artifact_extension: FileExtension, version: str, env_creds: Optional[dict]) -> Optional[tuple[str, tuple[str, str]]]: - if not env_creds or not getattr(app.registry.maven_config, 'auth_config', None): + if not getattr(app.registry.maven_config, 'auth_config', None): return await _check_artifact_v1_async(app, artifact_extension, version) try: @@ -282,6 +282,14 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt if not auth_config or auth_config.provider not in ["aws", "gcp", "artifactory", "nexus"]: return await _check_artifact_v1_async(app, artifact_extension, version) + # AWS and GCP require credentials; Artifactory/Nexus can work with anonymous access + if auth_config.provider in ["aws", "gcp"] and not env_creds: + logger.warning(f"V2 {auth_config.provider} requires credentials but env_creds is empty") + return await _check_artifact_v1_async(app, artifact_extension, version) + if auth_config.provider in ["aws", "gcp"] and auth_config.credentials_id and auth_config.credentials_id not in (env_creds or {}): + logger.warning(f"V2 {auth_config.provider} credentials '{auth_config.credentials_id}' not found in env_creds") + return await _check_artifact_v1_async(app, artifact_extension, version) + logger.info(f"V2 search for {app.name} with provider={auth_config.provider}") loop = asyncio.get_running_loop() @@ -362,7 +370,7 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt async def _v2_download_with_fallback(searcher, url: str, local_path: str, auth_config, - registry: Registry, env_creds: dict) -> bool: + registry: Registry, env_creds: Optional[dict]) -> bool: loop = asyncio.get_running_loop() try: diff --git a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py index 952340d68..5af4f331c 100644 --- a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py +++ b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py @@ -49,11 +49,11 @@ def resolve_auth_config(registry: Registry, artifact_type: str = "maven") -> Opt return auth_config @staticmethod - def resolve_credentials(auth_config: AuthConfig, env_creds: Dict[str, dict]) -> dict: + def resolve_credentials(auth_config: AuthConfig, env_creds: Optional[Dict[str, dict]]) -> dict: """Resolve credentials from env_creds based on auth_config.credentials_id.""" cred_id = auth_config.credentials_id if not cred_id: - return {} # Anonymous access + return {} # Anonymous access (Artifactory/Nexus with empty credentialsId) if not env_creds or cred_id not in env_creds: raise KeyError(f"Credential '{cred_id}' not found in env_creds") @@ -90,7 +90,7 @@ def _extract_repository_name(url: str) -> str: @staticmethod def _extract_region(url: str, auth_config: AuthConfig) -> str: - """Extract region from URL or auth_config.""" + """Extract region from URL or auth_config. Prefers explicit config over URL extraction.""" if auth_config.provider == "aws" and auth_config.aws_region: return auth_config.aws_region aws_match = re.search(r'\.([a-z0-9-]+)\.amazonaws\.com', url) @@ -99,10 +99,11 @@ def _extract_region(url: str, auth_config: AuthConfig) -> str: gcp_match = re.search(r'([a-z0-9-]+)-maven\.pkg\.dev', url) if gcp_match: return gcp_match.group(1) + logger.warning(f"Could not extract region from URL '{url}', using default 'us-east-1'") return "us-east-1" @staticmethod - def create_maven_searcher(registry: Registry, env_creds: Dict[str, dict]) -> 'MavenArtifactSearcher': + def create_maven_searcher(registry: Registry, env_creds: Optional[Dict[str, dict]]) -> 'MavenArtifactSearcher': """Create configured MavenArtifactSearcher for the registry provider.""" if MavenArtifactSearcher is None: raise ImportError("qubership_pipelines_common_library not available") @@ -194,7 +195,7 @@ def get_gcp_access_token(service_account_json: str) -> Optional[str]: return None @staticmethod - def get_gcp_credentials_from_registry(registry: Registry, env_creds: Dict[str, dict]) -> Optional[str]: + def get_gcp_credentials_from_registry(registry: Registry, env_creds: Optional[Dict[str, dict]]) -> Optional[str]: """Extract GCP service account JSON from registry for token generation.""" auth_config = CloudAuthHelper.resolve_auth_config(registry, "maven") if not auth_config or auth_config.provider != "gcp": From 2b002fa74fa99326e39f42eae0bcb613ce5f6420 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Sun, 14 Dec 2025 23:34:42 +0530 Subject: [PATCH 06/48] fix: Creds Fix --- scripts/build_env/handle_sd.py | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/scripts/build_env/handle_sd.py b/scripts/build_env/handle_sd.py index 03955c75a..603301ad5 100644 --- a/scripts/build_env/handle_sd.py +++ b/scripts/build_env/handle_sd.py @@ -292,7 +292,7 @@ def download_sds_with_version(env, base_sd_path, sd_version, effective_merge_mod source_name, version = entry.split(":", 1) logger.info(f"Starting download of SD: {source_name}-{version}") - sd_data = download_sd_by_appver(source_name, version, app_def_getter_plugins) + sd_data = download_sd_by_appver(source_name, version, app_def_getter_plugins, env) sd_data_list.append(sd_data) @@ -300,9 +300,11 @@ def download_sds_with_version(env, base_sd_path, sd_version, effective_merge_mod extract_sds_from_json(env, base_sd_path, sd_data_json, effective_merge_mode) -def _get_environment_credentials() -> dict: +def _get_environment_credentials(env: Environment = None) -> dict: """Get credentials from environment for V2 cloud registry support.""" env_creds = {} + + # First try environment variables aws_access_key = os.getenv("AWS_ACCESS_KEY_ID") aws_secret_key = os.getenv("AWS_SECRET_ACCESS_KEY") if aws_access_key and aws_secret_key: @@ -312,6 +314,24 @@ def _get_environment_credentials() -> dict: } logger.debug("Loaded AWS credentials from environment") + # If not in environment and env object is provided, try to get from credentials file + if not env_creds and env and hasattr(env, 'creds') and env.creds: + # Handle AWS credentials + if 'aws-keys' in env.creds: + # env.creds['aws-keys'] has structure: {'type': 'usernamePassword', 'data': {'username': '...', 'password': '...'}} + aws_creds = env.creds['aws-keys']['data'] + env_creds["aws-keys"] = { + "username": aws_creds['username'], + "password": aws_creds['password'] + } + logger.debug("Loaded AWS credentials from credentials.yml") + + # Handle GCP credentials + if 'gcp-keys' in env.creds: + gcp_creds = env.creds['gcp-keys']['data']['secret'] + env_creds["gcp-keys"] = {"secret": gcp_creds} + logger.debug("Loaded GCP credentials from credentials.yml") + gcp_sa_json_path = os.getenv("GCP_SA_JSON_PATH") if gcp_sa_json_path and path.exists(gcp_sa_json_path): try: @@ -333,10 +353,10 @@ def _get_environment_credentials() -> dict: return env_creds -def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine) -> dict[str, object]: +def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine, env: Environment = None) -> dict[str, object]: app_def = get_appdef_for_app(f"{app_name}:{version}", app_name, plugins) - env_creds = _get_environment_credentials() + env_creds = _get_environment_credentials(env) artifact_info = asyncio.run(artifact.check_artifact_async(app_def, artifact.FileExtension.JSON, version, env_creds)) if not artifact_info: raise ValueError( From f7086accf41d980859936c13ae156fa9e2de5c43 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Wed, 3 Dec 2025 01:00:03 +0530 Subject: [PATCH 07/48] feat: Add RegDef V2 validation with version detection Detect version field in RegDef files and validate against V2 schema when version is 2.0. Use logger for validation messages and move schema paths to constants. --- scripts/build_env/main.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/scripts/build_env/main.py b/scripts/build_env/main.py index bd4707115..960095e10 100644 --- a/scripts/build_env/main.py +++ b/scripts/build_env/main.py @@ -1,4 +1,5 @@ import argparse +import os from envgenehelper import * from envgenehelper.deployer import * @@ -17,6 +18,9 @@ CLOUD_SCHEMA = "schemas/cloud.schema.json" NAMESPACE_SCHEMA = "schemas/namespace.schema.json" ENV_SPECIFIC_RESOURCE_PROFILE_SCHEMA = "schemas/resource-profile.schema.json" +APPDEF_SCHEMA = "schemas/appdef.schema.json" +REGDEF_V1_SCHEMA = "schemas/regdef.schema.json" +REGDEF_V2_SCHEMA = "schemas/regdef-v2.schema.json" def prepare_folders_for_rendering(env_name, cluster_name, source_env_dir, templates_dir, render_dir, @@ -273,15 +277,18 @@ def validate_appregdefs(render_dir, env_name): logger.info(f"No AppDef YAMLs found in {appdef_dir}") for file in appdef_files: logger.info(f"AppDef file: {file}") - validate_yaml_by_scheme_or_fail(file, "schemas/appdef.schema.json") + validate_yaml_by_scheme_or_fail(file, APPDEF_SCHEMA) if os.path.exists(regdef_dir): regdef_files = findAllYamlsInDir(regdef_dir) if not regdef_files: logger.info(f"No RegDef YAMLs found in {regdef_dir}") for file in regdef_files: - logger.info(f"RegDef file: {file}") - validate_yaml_by_scheme_or_fail(file, "schemas/regdef.schema.json") + logger.info(f"Validating RegDef file: {file}") + regdef_content = openYaml(file) + version = str(regdef_content.get('version', '1.0')) + schema_path = REGDEF_V2_SCHEMA if version != '1.0' else REGDEF_V1_SCHEMA + validate_yaml_by_scheme_or_fail(file, schema_path) def render_environment(env_name, cluster_name, templates_dir, all_instances_dir, output_dir, g_template_version, From e334e4ac84e4b18a07820c881706c5fd80fd9fc9 Mon Sep 17 00:00:00 2001 From: Siva Reddy Kunduru <35566000+sivareddyit@users.noreply.github.com> Date: Wed, 17 Dec 2025 20:22:39 +0530 Subject: [PATCH 08/48] fix: preserve the input data type while calculating effective set (#882) --- .../effective-set/cleanup/monitoring/parameters.yaml | 6 +++--- .../pl-01/effective-set/cleanup/pg/parameters.yaml | 6 +++--- .../MONITORING/values/deployment-parameters.yaml | 12 ++++++------ .../pg/postgres/values/deployment-parameters.yaml | 12 ++++++------ .../processor/expression/binding/CloudMap.java | 8 ++++---- 5 files changed, 22 insertions(+), 22 deletions(-) diff --git a/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/cleanup/monitoring/parameters.yaml b/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/cleanup/monitoring/parameters.yaml index 6849363a1..89e294958 100644 --- a/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/cleanup/monitoring/parameters.yaml +++ b/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/cleanup/monitoring/parameters.yaml @@ -21,7 +21,7 @@ DBAAS_AGGREGATOR_PASSWORD: pass-placeholder-123 DBAAS_AGGREGATOR_USERNAME: user-placeholder-123 DBAAS_CLUSTER_DBA_CREDENTIALS_PASSWORD: pass-placeholder-123 DBAAS_CLUSTER_DBA_CREDENTIALS_USERNAME: user-placeholder-123 -DBAAS_ENABLED: 'true' +DBAAS_ENABLED: true DEFAULT_TENANT_ADMIN_LOGIN: admin DEFAULT_TENANT_ADMIN_PASSWORD: password DEFAULT_TENANT_NAME: tenant @@ -38,7 +38,7 @@ GRAFANA_UI_URL: https://cluster-01.qubership.org GRAYLOG_UI_URL: https://cluster-01.qubership.org MAAS_CREDENTIALS_PASSWORD: pass-placeholder-123 MAAS_CREDENTIALS_USERNAME: user-placeholder-123 -MAAS_ENABLED: 'true' +MAAS_ENABLED: true MAAS_EXTERNAL_ROUTE: http://maas.cluster-01.qubership.org MAAS_INTERNAL_ADDRESS: http://maas.maas:8080 MAAS_SERVICE_ADDRESS: http://maas.cluster-01.qubership.org @@ -55,7 +55,7 @@ PARAM_2: value-2 PARAM_6: value-6 PRIVATE_GATEWAY_URL: https://private-gateway-pl-01-monitoring.cluster-01.qubership.org PRIVATE_IDENTITY_PROVIDER_URL: https://private-gateway-pl-01-monitoring.cluster-01.qubership.org -PRODUCTION_MODE: 'false' +PRODUCTION_MODE: false PUBLIC_GATEWAY_URL: https://public-gateway-pl-01-monitoring.cluster-01.qubership.org PUBLIC_IDENTITY_PROVIDER_URL: https://public-gateway-pl-01-monitoring.cluster-01.qubership.org SERVER_HOSTNAME: cluster-01.qubership.org diff --git a/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/cleanup/pg/parameters.yaml b/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/cleanup/pg/parameters.yaml index aca35d314..7edd43101 100644 --- a/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/cleanup/pg/parameters.yaml +++ b/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/cleanup/pg/parameters.yaml @@ -21,7 +21,7 @@ DBAAS_AGGREGATOR_PASSWORD: pass-placeholder-123 DBAAS_AGGREGATOR_USERNAME: user-placeholder-123 DBAAS_CLUSTER_DBA_CREDENTIALS_PASSWORD: pass-placeholder-123 DBAAS_CLUSTER_DBA_CREDENTIALS_USERNAME: user-placeholder-123 -DBAAS_ENABLED: 'true' +DBAAS_ENABLED: true DEFAULT_TENANT_ADMIN_LOGIN: admin DEFAULT_TENANT_ADMIN_PASSWORD: password DEFAULT_TENANT_NAME: tenant @@ -36,7 +36,7 @@ GRAFANA_UI_URL: https://cluster-01.qubership.org GRAYLOG_UI_URL: https://cluster-01.qubership.org MAAS_CREDENTIALS_PASSWORD: pass-placeholder-123 MAAS_CREDENTIALS_USERNAME: user-placeholder-123 -MAAS_ENABLED: 'true' +MAAS_ENABLED: true MAAS_EXTERNAL_ROUTE: http://maas.cluster-01.qubership.org MAAS_INTERNAL_ADDRESS: http://maas.maas:8080 MAAS_SERVICE_ADDRESS: http://maas.cluster-01.qubership.org @@ -52,7 +52,7 @@ PARAM_2: value-2 PARAM_6: value-6 PRIVATE_GATEWAY_URL: https://private-gateway-pl-01-pg.cluster-01.qubership.org PRIVATE_IDENTITY_PROVIDER_URL: https://private-gateway-pl-01-pg.cluster-01.qubership.org -PRODUCTION_MODE: 'false' +PRODUCTION_MODE: false PUBLIC_GATEWAY_URL: https://public-gateway-pl-01-pg.cluster-01.qubership.org PUBLIC_IDENTITY_PROVIDER_URL: https://public-gateway-pl-01-pg.cluster-01.qubership.org SERVER_HOSTNAME: cluster-01.qubership.org diff --git a/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/deployment/monitoring/MONITORING/values/deployment-parameters.yaml b/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/deployment/monitoring/MONITORING/values/deployment-parameters.yaml index 3cf5897b9..e9f387b25 100644 --- a/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/deployment/monitoring/MONITORING/values/deployment-parameters.yaml +++ b/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/deployment/monitoring/MONITORING/values/deployment-parameters.yaml @@ -22,7 +22,7 @@ CONSUL_PUBLIC_URL: http://consul.consul:8080 CONSUL_URL: http://consul.consul:8080 CUSTOM_HOST: cluster-01.qubership.org DBAAS_AGGREGATOR_ADDRESS: https://dbaas.cluster-01.qubership.org -DBAAS_ENABLED: 'true' +DBAAS_ENABLED: true DEFAULT_TENANT_ADMIN_LOGIN: admin DEFAULT_TENANT_ADMIN_PASSWORD: password DEFAULT_TENANT_NAME: tenant @@ -38,7 +38,7 @@ ESCAPE_SEQUENCE: 'true' GATEWAY_URL: http://internal-gateway-service:8080 GRAFANA_UI_URL: https://cluster-01.qubership.org GRAYLOG_UI_URL: https://cluster-01.qubership.org -MAAS_ENABLED: 'true' +MAAS_ENABLED: true MAAS_EXTERNAL_ROUTE: http://maas.cluster-01.qubership.org MAAS_INTERNAL_ADDRESS: http://maas.maas:8080 MAAS_SERVICE_ADDRESS: http://maas.cluster-01.qubership.org @@ -55,7 +55,7 @@ PARAM_2: value-2 PARAM_6: value-6 PRIVATE_GATEWAY_URL: https://private-gateway-pl-01-monitoring.cluster-01.qubership.org PRIVATE_IDENTITY_PROVIDER_URL: https://private-gateway-pl-01-monitoring.cluster-01.qubership.org -PRODUCTION_MODE: 'false' +PRODUCTION_MODE: false PUBLIC_GATEWAY_URL: https://public-gateway-pl-01-monitoring.cluster-01.qubership.org PUBLIC_IDENTITY_PROVIDER_URL: https://public-gateway-pl-01-monitoring.cluster-01.qubership.org SERVER_HOSTNAME: cluster-01.qubership.org @@ -103,7 +103,7 @@ global: &id002 CONSUL_URL: http://consul.consul:8080 CUSTOM_HOST: cluster-01.qubership.org DBAAS_AGGREGATOR_ADDRESS: https://dbaas.cluster-01.qubership.org - DBAAS_ENABLED: 'true' + DBAAS_ENABLED: true DEFAULT_TENANT_ADMIN_LOGIN: admin DEFAULT_TENANT_ADMIN_PASSWORD: password DEFAULT_TENANT_NAME: tenant @@ -119,7 +119,7 @@ global: &id002 GATEWAY_URL: http://internal-gateway-service:8080 GRAFANA_UI_URL: https://cluster-01.qubership.org GRAYLOG_UI_URL: https://cluster-01.qubership.org - MAAS_ENABLED: 'true' + MAAS_ENABLED: true MAAS_EXTERNAL_ROUTE: http://maas.cluster-01.qubership.org MAAS_INTERNAL_ADDRESS: http://maas.maas:8080 MAAS_SERVICE_ADDRESS: http://maas.cluster-01.qubership.org @@ -136,7 +136,7 @@ global: &id002 PARAM_6: value-6 PRIVATE_GATEWAY_URL: https://private-gateway-pl-01-monitoring.cluster-01.qubership.org PRIVATE_IDENTITY_PROVIDER_URL: https://private-gateway-pl-01-monitoring.cluster-01.qubership.org - PRODUCTION_MODE: 'false' + PRODUCTION_MODE: false PUBLIC_GATEWAY_URL: https://public-gateway-pl-01-monitoring.cluster-01.qubership.org PUBLIC_IDENTITY_PROVIDER_URL: https://public-gateway-pl-01-monitoring.cluster-01.qubership.org SERVER_HOSTNAME: cluster-01.qubership.org diff --git a/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/deployment/pg/postgres/values/deployment-parameters.yaml b/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/deployment/pg/postgres/values/deployment-parameters.yaml index 304a5914e..08fcb3377 100644 --- a/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/deployment/pg/postgres/values/deployment-parameters.yaml +++ b/build_effective_set_generator_java/effective-set-generator/src/test/resources/environments/cluster-01/pl-01/effective-set/deployment/pg/postgres/values/deployment-parameters.yaml @@ -22,7 +22,7 @@ CONSUL_PUBLIC_URL: http://consul.consul:8080 CONSUL_URL: http://consul.consul:8080 CUSTOM_HOST: cluster-01.qubership.org DBAAS_AGGREGATOR_ADDRESS: https://dbaas.cluster-01.qubership.org -DBAAS_ENABLED: 'true' +DBAAS_ENABLED: true DEFAULT_TENANT_ADMIN_LOGIN: admin DEFAULT_TENANT_ADMIN_PASSWORD: password DEFAULT_TENANT_NAME: tenant @@ -36,7 +36,7 @@ ESCAPE_SEQUENCE: 'true' GATEWAY_URL: http://internal-gateway-service:8080 GRAFANA_UI_URL: https://cluster-01.qubership.org GRAYLOG_UI_URL: https://cluster-01.qubership.org -MAAS_ENABLED: 'true' +MAAS_ENABLED: true MAAS_EXTERNAL_ROUTE: http://maas.cluster-01.qubership.org MAAS_INTERNAL_ADDRESS: http://maas.maas:8080 MAAS_SERVICE_ADDRESS: http://maas.cluster-01.qubership.org @@ -52,7 +52,7 @@ PARAM_2: value-2 PARAM_6: value-6 PRIVATE_GATEWAY_URL: https://private-gateway-pl-01-pg.cluster-01.qubership.org PRIVATE_IDENTITY_PROVIDER_URL: https://private-gateway-pl-01-pg.cluster-01.qubership.org -PRODUCTION_MODE: 'false' +PRODUCTION_MODE: false PUBLIC_GATEWAY_URL: https://public-gateway-pl-01-pg.cluster-01.qubership.org PUBLIC_IDENTITY_PROVIDER_URL: https://public-gateway-pl-01-pg.cluster-01.qubership.org SERVER_HOSTNAME: cluster-01.qubership.org @@ -95,7 +95,7 @@ pg_patroni: &id001 CONSUL_URL: http://consul.consul:8080 CUSTOM_HOST: cluster-01.qubership.org DBAAS_AGGREGATOR_ADDRESS: https://dbaas.cluster-01.qubership.org - DBAAS_ENABLED: 'true' + DBAAS_ENABLED: true DEFAULT_TENANT_ADMIN_LOGIN: admin DEFAULT_TENANT_ADMIN_PASSWORD: password DEFAULT_TENANT_NAME: tenant @@ -109,7 +109,7 @@ pg_patroni: &id001 GATEWAY_URL: http://internal-gateway-service:8080 GRAFANA_UI_URL: https://cluster-01.qubership.org GRAYLOG_UI_URL: https://cluster-01.qubership.org - MAAS_ENABLED: 'true' + MAAS_ENABLED: true MAAS_EXTERNAL_ROUTE: http://maas.cluster-01.qubership.org MAAS_INTERNAL_ADDRESS: http://maas.maas:8080 MAAS_SERVICE_ADDRESS: http://maas.cluster-01.qubership.org @@ -125,7 +125,7 @@ pg_patroni: &id001 PARAM_6: value-6 PRIVATE_GATEWAY_URL: https://private-gateway-pl-01-pg.cluster-01.qubership.org PRIVATE_IDENTITY_PROVIDER_URL: https://private-gateway-pl-01-pg.cluster-01.qubership.org - PRODUCTION_MODE: 'false' + PRODUCTION_MODE: false PUBLIC_GATEWAY_URL: https://public-gateway-pl-01-pg.cluster-01.qubership.org PUBLIC_IDENTITY_PROVIDER_URL: https://public-gateway-pl-01-pg.cluster-01.qubership.org SERVER_HOSTNAME: cluster-01.qubership.org diff --git a/build_effective_set_generator_java/parameters-processor/src/main/java/org/qubership/cloud/parameters/processor/expression/binding/CloudMap.java b/build_effective_set_generator_java/parameters-processor/src/main/java/org/qubership/cloud/parameters/processor/expression/binding/CloudMap.java index 20267f393..0fb3c2485 100644 --- a/build_effective_set_generator_java/parameters-processor/src/main/java/org/qubership/cloud/parameters/processor/expression/binding/CloudMap.java +++ b/build_effective_set_generator_java/parameters-processor/src/main/java/org/qubership/cloud/parameters/processor/expression/binding/CloudMap.java @@ -109,12 +109,12 @@ public Map getMap(String cloudName) { map.putIfAbsent("DBAAS_CLUSTER_DBA_CREDENTIALS_USERNAME", DEFAULT_DBAAS_AGGREGATOR_LOGIN); map.putIfAbsent("DBAAS_CLUSTER_DBA_CREDENTIALS_PASSWORD", DEFAULT_DBAAS_AGGREGATOR_PASSWORD); } - map.putIfAbsent("DBAAS_ENABLED", Boolean.toString(dbaas.isEnable())); + map.putIfAbsent("DBAAS_ENABLED", new Parameter(dbaas.isEnable())); } MaaS maas = config.getMaas(); if (maas != null) { - map.putIfAbsent("MAAS_ENABLED", Boolean.toString(maas.isEnable())); + map.putIfAbsent("MAAS_ENABLED", new Parameter(maas.isEnable())); if (maas.isEnable()) { //Deprecated. For backward compatibility. New name MAAS_EXTERNAL_ROUTE map.put("MAAS_SERVICE_ADDRESS", maas.getMaasUrl()); @@ -131,7 +131,7 @@ public Map getMap(String cloudName) { } } } else { - map.putIfAbsent("MAAS_ENABLED", "false"); + map.putIfAbsent("MAAS_ENABLED", new Parameter(false)); } Vault vaultConfig = config.getVault(); @@ -176,7 +176,7 @@ public Map getMap(String cloudName) { } } - map.put("PRODUCTION_MODE", Boolean.toString(config.isProductionMode())); + map.put("PRODUCTION_MODE", new Parameter(config.isProductionMode())); map.put("namespace", new Parameter(new NamespaceMap(tenant, cloudName, defaultNamespace, defaultApp, binding, originalNamespace).init())); map.put("CLOUDNAME", cloudName); map.put("e2e", new Parameter(e2e)); From 6239069648b530417a2304298bff9509d567fa2c Mon Sep 17 00:00:00 2001 From: "qubership-actions[bot]" Date: Wed, 17 Dec 2025 15:02:13 +0000 Subject: [PATCH 09/48] chore: Update docker image tags and envgene_version for branch main [skip ci] --- .../instance-repo-pipeline/.github/workflows/Envgene.yml | 6 +++--- .../git-system-follower-package/package.yaml | 2 +- .../git-system-follower-package/package.yaml | 2 +- .../scripts/templates/default/cookiecutter.json | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/github_workflows/instance-repo-pipeline/.github/workflows/Envgene.yml b/github_workflows/instance-repo-pipeline/.github/workflows/Envgene.yml index 500158605..37723e3cb 100644 --- a/github_workflows/instance-repo-pipeline/.github/workflows/Envgene.yml +++ b/github_workflows/instance-repo-pipeline/.github/workflows/Envgene.yml @@ -69,9 +69,9 @@ env: DOCKER_IMAGE_NAME_EFFECTIVE_SET_GENERATOR: "${{ vars.DOCKER_REGISTRY || 'ghcr.io/netcracker' }}/qubership-effective-set-generator" #DOCKER_IMAGE_TAGS - DOCKER_IMAGE_TAG_PIPEGENE: "1.12.0" - DOCKER_IMAGE_TAG_ENVGENE: "1.12.0" - DOCKER_IMAGE_TAG_EFFECTIVE_SET_GENERATOR: "1.12.0" + DOCKER_IMAGE_TAG_PIPEGENE: "1.12.1" + DOCKER_IMAGE_TAG_ENVGENE: "1.12.1" + DOCKER_IMAGE_TAG_EFFECTIVE_SET_GENERATOR: "1.12.1" jobs: process_environment_variables: diff --git a/gsf_packages/envgene_discovery_project/git-system-follower-package/package.yaml b/gsf_packages/envgene_discovery_project/git-system-follower-package/package.yaml index 7fdcebe38..448d31d1e 100644 --- a/gsf_packages/envgene_discovery_project/git-system-follower-package/package.yaml +++ b/gsf_packages/envgene_discovery_project/git-system-follower-package/package.yaml @@ -1,5 +1,5 @@ apiVersion: v1 type: gitlab-ci-pipeline name: envgene_discovery_project -version: 1.12.0 +version: 1.12.1 dependencies: [] diff --git a/gsf_packages/envgene_instance_project/git-system-follower-package/package.yaml b/gsf_packages/envgene_instance_project/git-system-follower-package/package.yaml index b9fda893a..2524e20b1 100644 --- a/gsf_packages/envgene_instance_project/git-system-follower-package/package.yaml +++ b/gsf_packages/envgene_instance_project/git-system-follower-package/package.yaml @@ -1,5 +1,5 @@ apiVersion: v1 type: gitlab-ci-pipeline name: envgene_instance_project -version: 1.12.0 +version: 1.12.1 dependencies: [] diff --git a/gsf_packages/envgene_instance_project/git-system-follower-package/scripts/templates/default/cookiecutter.json b/gsf_packages/envgene_instance_project/git-system-follower-package/scripts/templates/default/cookiecutter.json index b7768fb78..b7753f3fa 100644 --- a/gsf_packages/envgene_instance_project/git-system-follower-package/scripts/templates/default/cookiecutter.json +++ b/gsf_packages/envgene_instance_project/git-system-follower-package/scripts/templates/default/cookiecutter.json @@ -2,7 +2,7 @@ "gsf_repository_name": "envgene_instance_project", "docker_registry": "ghcr.io", "docker_namespace": "netcracker", - "envgene_version": "1.12.0", + "envgene_version": "1.12.1", "envgen_image": "qubership-envgene", "pipe_image": "qubership-pipegene", "cloud_deploytool_image": "env-generator-deploytool_build_deploytool", From 1f19356168fec36833d8ed98c1be1a8aa42958b8 Mon Sep 17 00:00:00 2001 From: Dias <120464230+dysmon@users.noreply.github.com> Date: Thu, 18 Dec 2025 14:47:15 +0500 Subject: [PATCH 10/48] feat: unify logs (#868) * chore: trigger pipeline * feat: centralise logs * fix: delete needless dict from pipeline parameters * fix: remove formatting and regex * feat: log_pipe_params file added * feat: add dependency * feat: add dependency * feat: add dependency * feat: add dependency * feat: add dependency * fix: delete extra attributes * feat: add script to dockerfile * feat: centralise logs * feat: centralise logs * fix: update java version * feat: test * feat: test-2 * chore: Update package files [skip ci] * feat: test-3 * chore: trigger pipeline * feat: trigger pipeline * feat: update exception handle * feat: update exception handle * feat: centralise logs * feat: delete extra lines * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: logging updated * feat: logging updated * feat: centralise logs * feat: centralise logs * feat: centralise logs * feat: centralise logs --------- Co-authored-by: github-actions[bot] --- base_modules/scripts/decrypt_fernet.py | 18 +++--- base_modules/scripts/logging_functions.sh | 18 ------ .../ansible/library/maven_artifact.py | 1 - .../build/Dockerfile | 1 + build_effective_set_generator_java/Dockerfile | 3 + .../commons/pom.xml | 4 +- .../devops/commons/utils/ConsoleLogger.java | 35 ++++++++---- .../effective-set-generator/pom.xml | 4 +- .../qubership/cloud/devops/cli/CmdbCli.java | 6 +- .../devops/cli/logger/LogLevelMapper.java | 24 ++++++++ .../devops/cli/logger/LoggingInitializer.java | 19 +++++++ .../devops/cli/parser/CliParameterParser.java | 4 +- .../implementation/FileDataConverterImpl.java | 2 +- .../ApplicationServiceCliImpl.java | 3 +- .../src/main/resources/application.properties | 7 ++- .../gstring-to-jinjava-translator/pom.xml | 4 +- .../parameters-processor/pom.xml | 4 +- .../expression/ExpressionLanguage.java | 6 +- .../ansible/library/maven_artifact.py | 1 - build_envgene/build/Dockerfile | 1 + build_envgene/scripts/report.py | 2 - build_pipegene/build/Dockerfile | 1 + build_pipegene/scripts/github_actions.py | 17 ++---- build_pipegene/scripts/gitlab_ci.py | 6 +- build_pipegene/scripts/main.py | 19 ++----- build_pipegene/scripts/pipeline_helper.py | 8 ++- build_pipegene/scripts/validations.py | 2 +- creds_rotation/build/requirements.txt | 2 +- .../artifact_searcher/artifact.py | 2 +- python/artifact-searcher/pyproject.toml | 1 - python/envgene/envgenehelper/__main__.py | 2 +- python/envgene/envgenehelper/logger.py | 55 ++++++++++--------- scripts/build_env/generate_config_env.py | 6 +- scripts/build_env/handle_sd.py | 12 ++-- scripts/utils/__init__.py | 0 scripts/utils/log_pipe_params.py | 5 ++ .../utils}/pipeline_parameters.py | 49 ++++++++++------- 37 files changed, 202 insertions(+), 152 deletions(-) delete mode 100755 base_modules/scripts/logging_functions.sh create mode 100644 build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LogLevelMapper.java create mode 100644 build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LoggingInitializer.java create mode 100644 scripts/utils/__init__.py create mode 100644 scripts/utils/log_pipe_params.py rename {build_pipegene/scripts => scripts/utils}/pipeline_parameters.py (56%) diff --git a/base_modules/scripts/decrypt_fernet.py b/base_modules/scripts/decrypt_fernet.py index 4429538fd..86aa61192 100644 --- a/base_modules/scripts/decrypt_fernet.py +++ b/base_modules/scripts/decrypt_fernet.py @@ -1,11 +1,9 @@ -import logging - +from envgenehelper import logger from yaml import safe_load, safe_dump import click from cryptography.fernet import Fernet -logging.basicConfig(format = '%(filename)s[LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s', level = logging.INFO) ENCRYPTED_CONST = 'encrypted:AES256_Fernet' @click.group(chain=True) @@ -19,27 +17,27 @@ def cmdb_prepare(): help="Set secret_key for encrypt cred files") def decrypt_file(secret_key, file_path): ''' {getenv('CI_PROJECT_DIR')}/ansible/inventory/group_vars/{getenv('env_name')}/appdeployer_cmdb/Tenants/{getenv('tenant_name')}/Credentials''' - logging.debug('Try to read %s file', file_path) + logger.debug('Try to read %s file', file_path) with open(file_path, mode="r", encoding="utf-8") as sensitive: sensitive_data = safe_load(sensitive) is_encrypted = check_if_file_is_encrypted(sensitive_data) if is_encrypted: if not secret_key: - logging.error(f'Variable "{secret_key}" is not specified') + logger.error(f'Variable "{secret_key}" is not specified') exit(1) cipher = Fernet(secret_key) - logging.debug('Try to decrypt data from %s file', file_path) + logger.debug('Try to decrypt data from %s file', file_path) if isinstance(sensitive_data, dict): decrypted_data = decode_sensitive(cipher, sensitive_data) - logging.debug('Try to write data to %s file', file_path) + logger.debug('Try to write data to %s file', file_path) with open(file_path, mode="w") as sensitive: safe_dump(decrypted_data, sensitive, default_flow_style=False) - logging.info('The %s file has been decrypted', file_path) + logger.info('The %s file has been decrypted', file_path) else: - logging.info('The %s is empty or has no dict struct or not encrypted', file_path) + logger.info('The %s is empty or has no dict struct or not encrypted', file_path) else: - logging.info('File is not encrypted') + logger.info('File is not encrypted') def check_if_file_is_encrypted(sensitive_data) -> bool: for key, data in sensitive_data.items(): diff --git a/base_modules/scripts/logging_functions.sh b/base_modules/scripts/logging_functions.sh deleted file mode 100755 index c5e268eb3..000000000 --- a/base_modules/scripts/logging_functions.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env bash - -function log_info() { - echo -e "[\\e[1;94mINFO\\e[0m] $*" -} - -function log_warn() { - echo -e "[\\e[1;93mWARN\\e[0m] $*" -} - -function log_error() { - echo -e "[\\e[1;91mERROR\\e[0m] $*" -} - -function fail() { - log_error "$*" - exit 1 -} diff --git a/build_effective_set_generator/ansible/library/maven_artifact.py b/build_effective_set_generator/ansible/library/maven_artifact.py index 820d97f81..26072f0c1 100644 --- a/build_effective_set_generator/ansible/library/maven_artifact.py +++ b/build_effective_set_generator/ansible/library/maven_artifact.py @@ -1,4 +1,3 @@ -import logging import os import posixpath import io diff --git a/build_effective_set_generator/build/Dockerfile b/build_effective_set_generator/build/Dockerfile index 6a0652b32..cafc007cf 100644 --- a/build_effective_set_generator/build/Dockerfile +++ b/build_effective_set_generator/build/Dockerfile @@ -13,6 +13,7 @@ COPY build_effective_set_generator/ansible /module/ansible COPY python /python COPY schemas /module/schemas COPY python/integration /python/integration +COPY scripts/utils /module/scripts/utils ENV ANSIBLE_LIBRARY=/module/ansible/library:$ANSIBLE_LIBRARY RUN set -eux; \ diff --git a/build_effective_set_generator_java/Dockerfile b/build_effective_set_generator_java/Dockerfile index 140d7d7e1..c582674df 100644 --- a/build_effective_set_generator_java/Dockerfile +++ b/build_effective_set_generator_java/Dockerfile @@ -20,6 +20,9 @@ ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' COPY --from=builder --chown=1001:root /deployments /deployments COPY --from=builder /etc/alternatives/jre/lib/security/java.security /etc/alternatives/jre/lib/security/java.security +## NOTE: This script requires Python and will fail unless Python is added in the future. +COPY scripts/utils /module/scripts/utils + RUN chmod g+rwX /deployments # Ensure sane permissions on copied tree without findutils diff --git a/build_effective_set_generator_java/commons/pom.xml b/build_effective_set_generator_java/commons/pom.xml index 4e55a0646..5996b4c2f 100644 --- a/build_effective_set_generator_java/commons/pom.xml +++ b/build_effective_set_generator_java/commons/pom.xml @@ -17,8 +17,8 @@ - 11 - 11 + 17 + 17 UTF-8 diff --git a/build_effective_set_generator_java/commons/src/main/java/org/qubership/cloud/devops/commons/utils/ConsoleLogger.java b/build_effective_set_generator_java/commons/src/main/java/org/qubership/cloud/devops/commons/utils/ConsoleLogger.java index a9e3d364c..09ead2fbd 100644 --- a/build_effective_set_generator_java/commons/src/main/java/org/qubership/cloud/devops/commons/utils/ConsoleLogger.java +++ b/build_effective_set_generator_java/commons/src/main/java/org/qubership/cloud/devops/commons/utils/ConsoleLogger.java @@ -17,29 +17,44 @@ package org.qubership.cloud.devops.commons.utils; import lombok.extern.slf4j.Slf4j; +import java.util.Date; @Slf4j public class ConsoleLogger { - private static final String RED = "\u001B[31m"; // Red color for errors - private static final String YELLOW = "\u001B[33m"; // Yellow for warnings - private static final String GREEN = "\u001B[32m"; // Green for success - private static final String RESET = "\u001B[0m"; // Reset color + private static final String BLUE = "\u001B[34;20m"; + private static final String WHITE = "\u001B[97;20m"; + private static final String YELLOW = "\u001B[33;20m"; + private static final String RED = "\u001B[31;20m"; + private static final String RESET = "\u001B[0m"; public static void logError(String message) { - log.error(RED + "ERROR: " + message + RESET); + log.error(formatMessage("ERROR", message, RED)); } public static void logWarning(String message) { - log.warn(YELLOW + "WARNING: " + message + RESET); + log.warn(formatMessage("WARNING", message, YELLOW)); } - public static void logSuccess(String message) { - log.info(GREEN + "SUCCESS: " + message + RESET); + public static void logInfo(String message) { + log.info(formatMessage("INFO", message, WHITE)); } - public static void logInfo(String message) { - log.info(RESET + "INFO: " + message + RESET); + public static void logDebug(String message) { + log.debug(formatMessage("DEBUG", message, BLUE)); + } + + private static String formatMessage(String level, String message, String color) { + StackTraceElement ste = Thread.currentThread().getStackTrace()[3]; + String timestamp = String.format("%tF % 3.8.1 true - 11 - 11 + 17 + 17 UTF-8 UTF-8 ${quarkus.platform.version} diff --git a/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/CmdbCli.java b/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/CmdbCli.java index 050db6b1b..7727a7fef 100644 --- a/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/CmdbCli.java +++ b/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/CmdbCli.java @@ -60,14 +60,14 @@ public Integer call() { logInfo("Starting effective set generation"); fileDataRepository.prepareProcessingEnv(); parser.generateEffectiveSet(); - logSuccess("Successfully generated the effective set"); + logInfo("Successfully generated the effective set"); Instant end = Instant.now(); Duration timeElapsed = Duration.between(start, end); - logInfo("Total Time taken : " + timeElapsed.toMillis() + " milliseconds"); + logInfo("Total Time taken: " + timeElapsed.toMillis() + " milliseconds"); return 0; } catch (Exception e) { logError(String.format(EFFECTIVE_SET_FAILED, e.getMessage())); - log.debug("stack trace {}", ExceptionUtils.getStackTrace(e)); + logDebug(String.format("Stack trace: %s", ExceptionUtils.getStackTrace(e))); return 1; } } diff --git a/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LogLevelMapper.java b/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LogLevelMapper.java new file mode 100644 index 000000000..91db8feef --- /dev/null +++ b/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LogLevelMapper.java @@ -0,0 +1,24 @@ +package org.qubership.cloud.devops.cli.logger; + +import jakarta.enterprise.context.ApplicationScoped; +import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.jboss.logging.Logger.Level; + +@ApplicationScoped +public class LogLevelMapper { + + @ConfigProperty(name = "LOG_LEVEL", defaultValue = "INFO") + String level; + + public Level getMappedLevel() { + return switch (level.toUpperCase()) { + case "CRITICAL" -> Level.FATAL; + case "ERROR" -> Level.ERROR; + case "WARNING" -> Level.WARN; + case "INFO" -> Level.INFO; + case "DEBUG" -> Level.DEBUG; + case "TRACE" -> Level.TRACE; + default -> Level.INFO; + }; + } +} diff --git a/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LoggingInitializer.java b/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LoggingInitializer.java new file mode 100644 index 000000000..cd4d4c6e0 --- /dev/null +++ b/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LoggingInitializer.java @@ -0,0 +1,19 @@ +package org.qubership.cloud.devops.cli.logger; + +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import jakarta.annotation.PostConstruct; +import org.jboss.logging.Logger.Level; + +@ApplicationScoped +public class LoggingInitializer { + + @Inject + LogLevelMapper logLevelMapper; + + @PostConstruct + public void init() { + Level logLevel = logLevelMapper.getMappedLevel(); + System.setProperty("quarkus.log.level", logLevel.name()); + } +} diff --git a/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/parser/CliParameterParser.java b/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/parser/CliParameterParser.java index 3b585827b..d8da095b2 100644 --- a/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/parser/CliParameterParser.java +++ b/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/parser/CliParameterParser.java @@ -141,8 +141,8 @@ private void processAndSaveParameters(Optional solutionDescripto cleanupMappingFileData.put(inputData.getNamespaceDTOMap().get(namespaceName).getName(), cleanupPostFixDir); logInfo("Finished processing of application: " + app.getAppName() + ":" + app.getAppVersion() + " from the namespace " + namespaceName); } catch (Exception e) { - log.debug(String.format(APP_PARSE_ERROR, app.getAppName(), namespaceName, e.getMessage())); - log.debug("stack trace for further details: {}", ExceptionUtils.getStackTrace(e)); + logDebug(String.format(APP_PARSE_ERROR, app.getAppName(), namespaceName, e.getMessage())); + logDebug(String.format("Stack trace for further details: %s", ExceptionUtils.getStackTrace(e))); errorList.computeIfAbsent(app.getAppName() + ":" + namespaceName, k -> e.getMessage()); } }); diff --git a/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/repository/implementation/FileDataConverterImpl.java b/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/repository/implementation/FileDataConverterImpl.java index 534b1fca8..214b92d46 100644 --- a/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/repository/implementation/FileDataConverterImpl.java +++ b/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/repository/implementation/FileDataConverterImpl.java @@ -75,7 +75,7 @@ public Bom parseSbomFile(File file) { } catch (IOException | IllegalArgumentException e) { if (file.getName().startsWith(CLEANUPER) && e instanceof FileNotFoundException) { - log.error("Issue while reading the file " + e.getMessage()); + logError("Issue while reading the file " + e.getMessage()); return null; } throw new FileParseException(String.format(ExceptionMessage.FILE_READ_ERROR, file.getAbsolutePath(), e.getMessage())); diff --git a/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/service/implementation/ApplicationServiceCliImpl.java b/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/service/implementation/ApplicationServiceCliImpl.java index d7ed33c60..a9f9eef74 100644 --- a/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/service/implementation/ApplicationServiceCliImpl.java +++ b/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/service/implementation/ApplicationServiceCliImpl.java @@ -27,6 +27,7 @@ import jakarta.inject.Inject; import java.util.List; +import static org.qubership.cloud.devops.commons.utils.ConsoleLogger.*; import static org.qubership.cloud.devops.commons.exceptions.constant.ExceptionAdditionalInfoMessages.ENTITY_NOT_FOUND; @@ -53,7 +54,7 @@ public Application getByName(String applicationName, String namespace) { CloudDTO cloudDTO = inputData.getCloudDTO(); application = getApplicationLinkDTO(applicationName, cloudDTO.getApplications()); if (application == null) { - log.warn(String.format(ENTITY_NOT_FOUND, "Application")); + logWarning(String.format(ENTITY_NOT_FOUND, "Application")); return null; } } diff --git a/build_effective_set_generator_java/effective-set-generator/src/main/resources/application.properties b/build_effective_set_generator_java/effective-set-generator/src/main/resources/application.properties index 1767e6536..93942c580 100644 --- a/build_effective_set_generator_java/effective-set-generator/src/main/resources/application.properties +++ b/build_effective_set_generator_java/effective-set-generator/src/main/resources/application.properties @@ -1,3 +1,6 @@ quarkus.package.jar.type=uber-jar -quarkus.console.color=true -quarkus.log.console.format=%d %s%n + +quarkus.log.level=${LOG_LEVEL:INFO} +quarkus.console.color=false +quarkus.log.console.format=%s%n + diff --git a/build_effective_set_generator_java/gstring-to-jinjava-translator/pom.xml b/build_effective_set_generator_java/gstring-to-jinjava-translator/pom.xml index f06b4ac57..f94cd4074 100644 --- a/build_effective_set_generator_java/gstring-to-jinjava-translator/pom.xml +++ b/build_effective_set_generator_java/gstring-to-jinjava-translator/pom.xml @@ -9,8 +9,8 @@ The library that translates text from GString to JinJava format - 11 - 11 + 17 + 17 UTF-8 4.13.1 3.0.0 diff --git a/build_effective_set_generator_java/parameters-processor/pom.xml b/build_effective_set_generator_java/parameters-processor/pom.xml index 8c4597819..f41fa1b0b 100644 --- a/build_effective_set_generator_java/parameters-processor/pom.xml +++ b/build_effective_set_generator_java/parameters-processor/pom.xml @@ -14,8 +14,8 @@ The library that handle parameters processing - 11 - 11 + 17 + 17 UTF-8 5.9.2 diff --git a/build_effective_set_generator_java/parameters-processor/src/main/java/org/qubership/cloud/parameters/processor/expression/ExpressionLanguage.java b/build_effective_set_generator_java/parameters-processor/src/main/java/org/qubership/cloud/parameters/processor/expression/ExpressionLanguage.java index 1b52d4c7b..cb7a571c0 100644 --- a/build_effective_set_generator_java/parameters-processor/src/main/java/org/qubership/cloud/parameters/processor/expression/ExpressionLanguage.java +++ b/build_effective_set_generator_java/parameters-processor/src/main/java/org/qubership/cloud/parameters/processor/expression/ExpressionLanguage.java @@ -50,6 +50,8 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; +import static org.qubership.cloud.devops.commons.utils.ConsoleLogger.*; + @Slf4j public class ExpressionLanguage extends AbstractLanguage { @@ -222,7 +224,7 @@ private Parameter processValue(Object value, Map binding, boo try { rendered = renderStringByJinJava(strValue, binding, escapeDollar); } catch (Exception e) { - log.debug(String.format("Parameter {} was not processed by JinJava, hence reverting to Groovy.", strValue)); + logDebug(String.format("Parameter {} was not processed by JinJava, hence reverting to Groovy.", strValue)); rendered = renderStringByGroovy(strValue, binding, escapeDollar); } Object originalValue = this.binding.getTypeCollector().get(rendered); // Object @@ -471,7 +473,7 @@ public Map processParameters(Map parameters) try { processedParams.put(key, processValue(value, this.binding, true)); } catch (IOException e) { - log.error(String.format("Error in processing the parameter key %s and value %s", key, value)); + logError(String.format("Error in processing the parameter key %s and value %s", key, value)); } }); return processedParams; diff --git a/build_envgene/ansible/library/maven_artifact.py b/build_envgene/ansible/library/maven_artifact.py index 820d97f81..26072f0c1 100644 --- a/build_envgene/ansible/library/maven_artifact.py +++ b/build_envgene/ansible/library/maven_artifact.py @@ -1,4 +1,3 @@ -import logging import os import posixpath import io diff --git a/build_envgene/build/Dockerfile b/build_envgene/build/Dockerfile index 75d782688..26f325a12 100644 --- a/build_envgene/build/Dockerfile +++ b/build_envgene/build/Dockerfile @@ -41,6 +41,7 @@ COPY scripts/build_env /build_env/scripts/build_env/ COPY scripts/build_template /build_env/scripts/build_template/ COPY scripts/cloud_passport/ /cloud_passport/scripts/ COPY schemas /build_env/schemas +COPY scripts/utils /module/scripts/utils ENV ANSIBLE_LIBRARY=/module/ansible/library diff --git a/build_envgene/scripts/report.py b/build_envgene/scripts/report.py index 66b503369..fcefc9a7c 100644 --- a/build_envgene/scripts/report.py +++ b/build_envgene/scripts/report.py @@ -8,8 +8,6 @@ from os import getenv from requests.packages.urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(InsecureRequestWarning) -import logging -logging.basicConfig(format = u'%(filename)s[LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s', level = logging.INFO) with open('archive.yaml', 'r') as file_save: archive=yaml.load(file_save, Loader=yaml.SafeLoader) diff --git a/build_pipegene/build/Dockerfile b/build_pipegene/build/Dockerfile index dcd20dd68..cf2c2c19a 100644 --- a/build_pipegene/build/Dockerfile +++ b/build_pipegene/build/Dockerfile @@ -33,6 +33,7 @@ COPY python /python COPY schemas /module/schemas COPY base_modules/scripts /module/scripts COPY build_pipegene/scripts /module/scripts +COPY scripts/utils /module/scripts/scripts/utils/ COPY build_pipegene/pipegene_plugins /module/scripts/pipegene_plugins # Create virtual environment and install Python packages diff --git a/build_pipegene/scripts/github_actions.py b/build_pipegene/scripts/github_actions.py index 11c2a078f..a38e29c7d 100644 --- a/build_pipegene/scripts/github_actions.py +++ b/build_pipegene/scripts/github_actions.py @@ -1,26 +1,17 @@ import click -import re -from envgenehelper import logger from validations import validate_pipeline -from pipeline_parameters import PipelineParametersHandler +from scripts.utils.pipeline_parameters import PipelineParametersHandler @click.group() def cli(): pass -def prepare_input_params() -> dict: - pipe_params = PipelineParametersHandler() - params_log = ("Input parameters are: ") - params_log += pipe_params.get_params_str() - params_log = re.sub(r'(CRED_ROTATION_PAYLOAD=)[^, ]+', r'\1***', params_log) - logger.info(params_log) - return pipe_params.params - @cli.command("validate_pipeline") def validate_pipeline_command(): - params = prepare_input_params() - validate_pipeline(params) + handler = PipelineParametersHandler() + handler.log_pipeline_params() + validate_pipeline(handler.params) if __name__ == "__main__": cli() diff --git a/build_pipegene/scripts/gitlab_ci.py b/build_pipegene/scripts/gitlab_ci.py index 60e2ccbe5..ad0d93b48 100644 --- a/build_pipegene/scripts/gitlab_ci.py +++ b/build_pipegene/scripts/gitlab_ci.py @@ -62,7 +62,7 @@ def build_pipeline(params: dict): else: cluster_name = get_cluster_name_from_full_name(env) environment_name = get_environment_name_from_full_name(env) - if params['ENV_INVENTORY_GENERATION_PARAMS']['ENV_INVENTORY_INIT']: + if params['ENV_INVENTORY_INIT']: env_definition = None else: env_definition = getEnvDefinition(get_env_instances_dir(environment_name, cluster_name, f"{ci_project_dir}/environments")) @@ -84,8 +84,8 @@ def build_pipeline(params: dict): else: logger.info(f"Generation of cloud passport for environment '{env}' is skipped") - if is_inventory_generation_needed(params['IS_TEMPLATE_TEST'], params['ENV_INVENTORY_GENERATION_PARAMS']): - jobs_map["env_inventory_generation_job"] = prepare_inventory_generation_job(pipeline, env, environment_name, cluster_name, params['ENV_INVENTORY_GENERATION_PARAMS'], tags) + if is_inventory_generation_needed(params['IS_TEMPLATE_TEST'], params): + jobs_map["env_inventory_generation_job"] = prepare_inventory_generation_job(pipeline, env, environment_name, cluster_name, params, tags) else: logger.info(f'Preparing of env inventory generation job for {env} is skipped because we are in template test mode.') diff --git a/build_pipegene/scripts/main.py b/build_pipegene/scripts/main.py index 2a106f502..e124be622 100644 --- a/build_pipegene/scripts/main.py +++ b/build_pipegene/scripts/main.py @@ -1,31 +1,22 @@ import click -import re -from envgenehelper import logger from gitlab_ci import build_pipeline from validations import validate_pipeline -from pipeline_parameters import PipelineParametersHandler +from scripts.utils.pipeline_parameters import PipelineParametersHandler @click.group(chain=True) def gcip(): pass -def prepare_input_params() -> dict: - pipe_params = PipelineParametersHandler() - params_log = ("Input parameters are: ") - params_log += pipe_params.get_params_str() - params_log = params_log = re.sub(r"(CRED_ROTATION_PAYLOAD:\s*)\(.*?\)", r"\1***", params_log, flags=re.DOTALL) - logger.info(params_log) - return pipe_params.params - @gcip.command("generate_pipeline") def generate_pipeline(): perform_generation() def perform_generation(): - params = prepare_input_params() - validate_pipeline(params) - build_pipeline(params) + handler = PipelineParametersHandler() + handler.log_pipeline_params() + validate_pipeline(handler.params) + build_pipeline(handler.params) if __name__ == "__main__": gcip() diff --git a/build_pipegene/scripts/pipeline_helper.py b/build_pipegene/scripts/pipeline_helper.py index 2a42cd470..add724241 100644 --- a/build_pipegene/scripts/pipeline_helper.py +++ b/build_pipegene/scripts/pipeline_helper.py @@ -23,12 +23,10 @@ def __init__( timeout: Optional[int] = None ) -> None: super().__init__(name=name, stage=stage, image=image, script=script, variables=variables, needs=needs, tags=tags) - self.script = script self.timeout = timeout def render(self) -> Dict[str, Any]: job_data = super().render() - job_data['script'] = self.script job_data['timeout'] = self.timeout return job_data @@ -45,6 +43,12 @@ def job_instance(params, vars, needs=None, rules=None): ) if 'before_script' in params.keys(): job.prepend_scripts(params['before_script']) + + global_before = [ + 'python /module/scripts/utils/log_pipe_params.py' + ] + job.prepend_scripts(*global_before) + if 'after_script' in params.keys(): job.append_scripts(params['after_script']) if needs is None: diff --git a/build_pipegene/scripts/validations.py b/build_pipegene/scripts/validations.py index 5e7f4e978..e6da51b69 100644 --- a/build_pipegene/scripts/validations.py +++ b/build_pipegene/scripts/validations.py @@ -15,7 +15,7 @@ def validate_pipeline(params: dict): params["ENV_NAMES"], params["GET_PASSPORT"], params["ENV_BUILD"], - params["ENV_INVENTORY_GENERATION_PARAMS"]["ENV_INVENTORY_INIT"], + params["ENV_INVENTORY_INIT"], ) def basic_checks(env_names): diff --git a/creds_rotation/build/requirements.txt b/creds_rotation/build/requirements.txt index c77ed86fc..5c02cea82 100644 --- a/creds_rotation/build/requirements.txt +++ b/creds_rotation/build/requirements.txt @@ -7,4 +7,4 @@ rpds-py==0.17.1 jsonschema-specifications==2023.12.1 cryptography==41.0.3 cffi>=1.15 -aiofiles==23.2.0 +aiofiles==23.2.0 \ No newline at end of file diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index a2dcdcc84..f963f27d4 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -11,7 +11,7 @@ import aiohttp import requests -from loguru import logger +from envgenehelper import logger from requests.auth import HTTPBasicAuth from artifact_searcher.utils.models import Registry, Application, FileExtension, Credentials, ArtifactInfo from artifact_searcher.utils.constants import DEFAULT_REQUEST_TIMEOUT diff --git a/python/artifact-searcher/pyproject.toml b/python/artifact-searcher/pyproject.toml index 738f19a0d..131ac4b7c 100644 --- a/python/artifact-searcher/pyproject.toml +++ b/python/artifact-searcher/pyproject.toml @@ -7,7 +7,6 @@ name = "artifact_searcher" version = "0.0.1" requires-python = "~=3.12" dependencies = [ - "loguru~=0.7.3", "pydantic~=2.10.6", "requests~=2.32.3", "deepdiff~=8.0.1", diff --git a/python/envgene/envgenehelper/__main__.py b/python/envgene/envgenehelper/__main__.py index 1f702f840..51c319eff 100644 --- a/python/envgene/envgenehelper/__main__.py +++ b/python/envgene/envgenehelper/__main__.py @@ -8,4 +8,4 @@ def handle_exception(exc_type, exc_value, exc_traceback): else: logger.critical("Uncaught exception:", exc_info=(exc_type, exc_value, exc_traceback)) -sys.excepthook = handle_exception +sys.excepthook = handle_exception \ No newline at end of file diff --git a/python/envgene/envgenehelper/logger.py b/python/envgene/envgenehelper/logger.py index c2b8fc373..2856ad24b 100644 --- a/python/envgene/envgenehelper/logger.py +++ b/python/envgene/envgenehelper/logger.py @@ -1,36 +1,39 @@ -from os import getenv import logging +from os import getenv -class CustomFormatter(logging.Formatter): - grey = "\x1b[38;20m" - yellow = "\x1b[33;20m" - red = "\x1b[31;20m" - bold_red = "\x1b[31;1m" - reset = "\x1b[0m" - format = u'[%(asctime)s] [%(levelname)-8s] %(message)s [%(filename)s:%(lineno)d]' +class CustomFormatter(logging.Formatter): + BLUE = "\x1b[34;20m" + WHITE = "\x1b[97;20m" + YELLOW = "\x1b[33;20m" + RED = "\x1b[31;20m" + BOLD_RED = "\x1b[31;1m" + RESET = "\x1b[0m" + BASE_FMT = "%(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(lineno)d]" - FORMATS = { - logging.DEBUG: grey + format + reset, - logging.INFO: grey + format + reset, - logging.WARNING: yellow + format + reset, - logging.ERROR: red + format + reset, - logging.CRITICAL: bold_red + format + reset - } + def __init__(self): + super().__init__() + self.formatters = { + logging.DEBUG: logging.Formatter(self.BLUE + self.BASE_FMT + self.RESET), + logging.INFO: logging.Formatter(self.WHITE + self.BASE_FMT + self.RESET), + logging.WARNING: logging.Formatter(self.YELLOW + self.BASE_FMT + self.RESET), + logging.ERROR: logging.Formatter(self.RED + self.BASE_FMT + self.RESET), + logging.CRITICAL: logging.Formatter(self.BOLD_RED + self.BASE_FMT + self.RESET), + } def format(self, record): - log_fmt = self.FORMATS.get(record.levelno) - formatter = logging.Formatter(log_fmt) + formatter = self.formatters.get(record.levelno, self.formatters[logging.INFO]) return formatter.format(record) -# create logger with 'spam_application' logger = logging.getLogger("envgene") -logger.setLevel(logging.INFO) -# create console handler with a higher log level -ch = logging.StreamHandler() -# get logging level from env var -log_level_str = getenv('ENVGENE_LOG_LEVEL', 'INFO').upper() +logger.propagate = False + +log_level_str = getenv("LOG_LEVEL", "INFO").upper() log_level = getattr(logging, log_level_str, logging.INFO) -ch.setLevel(log_level) -ch.setFormatter(CustomFormatter()) -logger.addHandler(ch) +logger.setLevel(log_level) + +if not logger.hasHandlers(): + handler = logging.StreamHandler() + handler.setLevel(log_level) + handler.setFormatter(CustomFormatter()) + logger.addHandler(handler) diff --git a/scripts/build_env/generate_config_env.py b/scripts/build_env/generate_config_env.py index 5b927f6ac..74bb9e361 100644 --- a/scripts/build_env/generate_config_env.py +++ b/scripts/build_env/generate_config_env.py @@ -183,20 +183,20 @@ def render_from_file_to_file(self, src_template_path: str, target_file_path: str template = openFileAsString(src_template_path) template = replace_ansible_stuff(template_str=template, template_path=src_template_path) rendered = create_jinja_env().from_string(template).render(self.ctx.as_dict()) - logger.info(f"Rendered entity: \n {rendered}") + logger.debug(f"Rendered entity: \n{rendered}") writeYamlToFile(target_file_path, readYaml(escaping_quotation(rendered))) def render_from_file_to_obj(self, src_template_path) -> dict: template = openFileAsString(src_template_path) template = replace_ansible_stuff(template_str=template, template_path=src_template_path) rendered = create_jinja_env().from_string(template).render(self.ctx.as_dict()) - logger.info(f"Rendered entity: \n {rendered}") + logger.debug(f"Rendered entity: \n{rendered}") return readYaml(escaping_quotation(rendered)) def render_from_obj_to_file(self, template, target_file_path): template = replace_ansible_stuff(template_str=dumpYamlToStr(template)) rendered = create_jinja_env().from_string(template).render(self.ctx.as_dict()) - logger.info(f"Rendered entity: \n {rendered}") + logger.debug(f"Rendered entity: \n{rendered}") writeYamlToFile(target_file_path, readYaml(escaping_quotation(rendered))) def generate_tenant_file(self): diff --git a/scripts/build_env/handle_sd.py b/scripts/build_env/handle_sd.py index 381d5b1d5..6602663d6 100644 --- a/scripts/build_env/handle_sd.py +++ b/scripts/build_env/handle_sd.py @@ -2,7 +2,7 @@ import json import os from enum import Enum -from os import path +from os import path, getenv from pathlib import Path import envgenehelper as helper @@ -91,11 +91,11 @@ def prepare_vars_and_run_sd_handling(): env = Environment(base_dir, cluster, env_name) - sd_source_type = getenv_and_log('SD_SOURCE_TYPE') - sd_version = getenv_and_log('SD_VERSION') - sd_data = getenv_and_log('SD_DATA') - sd_delta = getenv_and_log('SD_DELTA') - sd_merge_mode = getenv_and_log("SD_REPO_MERGE_MODE") + sd_source_type = getenv('SD_SOURCE_TYPE') + sd_version = getenv('SD_VERSION') + sd_data = getenv('SD_DATA') + sd_delta = getenv('SD_DELTA') + sd_merge_mode = getenv("SD_REPO_MERGE_MODE") handle_sd(env, sd_source_type, sd_version, sd_data, sd_delta, sd_merge_mode) diff --git a/scripts/utils/__init__.py b/scripts/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/scripts/utils/log_pipe_params.py b/scripts/utils/log_pipe_params.py new file mode 100644 index 000000000..2460041fa --- /dev/null +++ b/scripts/utils/log_pipe_params.py @@ -0,0 +1,5 @@ +from pipeline_parameters import PipelineParametersHandler + +if __name__ == '__main__': + handler = PipelineParametersHandler() + handler.log_pipeline_params() \ No newline at end of file diff --git a/build_pipegene/scripts/pipeline_parameters.py b/scripts/utils/pipeline_parameters.py similarity index 56% rename from build_pipegene/scripts/pipeline_parameters.py rename to scripts/utils/pipeline_parameters.py index 7663169b0..443b71c91 100644 --- a/build_pipegene/scripts/pipeline_parameters.py +++ b/scripts/utils/pipeline_parameters.py @@ -1,8 +1,9 @@ +import json from os import getenv -from pprint import pformat - +from envgenehelper import logger from envgenehelper.plugin_engine import PluginEngine + def get_pipeline_parameters() -> dict: return { 'ENV_NAMES': getenv("ENV_NAMES", ""), @@ -14,22 +15,20 @@ def get_pipeline_parameters() -> dict: 'IS_TEMPLATE_TEST': getenv("ENV_TEMPLATE_TEST") == "true", 'CI_COMMIT_REF_NAME': getenv("CI_COMMIT_REF_NAME", ""), 'JSON_SCHEMAS_DIR': getenv("JSON_SCHEMAS_DIR", "/module/schemas"), - 'ENV_INVENTORY_GENERATION_PARAMS': { - "SD_SOURCE_TYPE": getenv("SD_SOURCE_TYPE"), - "SD_VERSION": getenv("SD_VERSION"), - "SD_DATA": getenv("SD_DATA"), - "SD_DELTA": getenv("SD_DELTA"), - "SD_REPO_MERGE_MODE": getenv("SD_REPO_MERGE_MODE"), - "ENV_INVENTORY_INIT": getenv("ENV_INVENTORY_INIT"), - "ENV_SPECIFIC_PARAMETERS": getenv("ENV_SPECIFIC_PARAMS"), - "ENV_TEMPLATE_NAME": getenv("ENV_TEMPLATE_NAME"), - "ENV_TEMPLATE_VERSION": getenv("ENV_TEMPLATE_VERSION"), - }, + "SD_SOURCE_TYPE": getenv("SD_SOURCE_TYPE"), + "SD_VERSION": getenv("SD_VERSION"), + "SD_DATA": getenv("SD_DATA"), + "SD_DELTA": getenv("SD_DELTA"), + "SD_REPO_MERGE_MODE": getenv("SD_REPO_MERGE_MODE"), + "ENV_INVENTORY_INIT": getenv("ENV_INVENTORY_INIT"), + "ENV_SPECIFIC_PARAMETERS": getenv("ENV_SPECIFIC_PARAMS"), + "ENV_TEMPLATE_NAME": getenv("ENV_TEMPLATE_NAME"), 'CRED_ROTATION_PAYLOAD': getenv("CRED_ROTATION_PAYLOAD", ""), 'CRED_ROTATION_FORCE': getenv("CRED_ROTATION_FORCE", ""), 'GITLAB_RUNNER_TAG_NAME' : getenv("GITLAB_RUNNER_TAG_NAME", ""), 'RUNNER_SCRIPT_TIMEOUT' : getenv("RUNNER_SCRIPT_TIMEOUT") or "10m", - 'DEPLOYMENT_SESSION_ID': getenv("DEPLOYMENT_SESSION_ID", "") + 'DEPLOYMENT_SESSION_ID': getenv("DEPLOYMENT_SESSION_ID", ""), + 'LOG_LEVEL': getenv("LOG_LEVEL") } class PipelineParametersHandler: @@ -39,9 +38,21 @@ def __init__(self, **kwargs): pipe_param_plugin = PluginEngine(plugins_dir=plugins_dir) if pipe_param_plugin.modules: pipe_param_plugin.run(pipeline_params=self.params) + + def log_pipeline_params(self): + params_str = "Input parameters are: " + + params = self.params.copy() + if params.get("CRED_ROTATION_PAYLOAD"): + params["CRED_ROTATION_PAYLOAD"] = "***" + + for k, v in params.items(): + try: + parsed = json.loads(v) + params[k] = json.dumps(parsed, separators=(",", ":")) + except (TypeError, ValueError): + pass + + params_str += f"\n{k.upper()}: {params[k]}" - def get_params_str(self) -> str: - result = '' - for k, v in self.params.items(): - result += f"\n{k.upper()}: {pformat(v)}" - return result + logger.info(params_str) \ No newline at end of file From 3e37f4d866b5bd25bffe6336770f60af8606ef5f Mon Sep 17 00:00:00 2001 From: "qubership-actions[bot]" Date: Thu, 18 Dec 2025 10:09:40 +0000 Subject: [PATCH 11/48] chore: Update docker image tags and envgene_version for branch main [skip ci] --- .../instance-repo-pipeline/.github/workflows/Envgene.yml | 6 +++--- .../git-system-follower-package/package.yaml | 2 +- .../git-system-follower-package/package.yaml | 2 +- .../scripts/templates/default/cookiecutter.json | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/github_workflows/instance-repo-pipeline/.github/workflows/Envgene.yml b/github_workflows/instance-repo-pipeline/.github/workflows/Envgene.yml index 37723e3cb..d1411d50a 100644 --- a/github_workflows/instance-repo-pipeline/.github/workflows/Envgene.yml +++ b/github_workflows/instance-repo-pipeline/.github/workflows/Envgene.yml @@ -69,9 +69,9 @@ env: DOCKER_IMAGE_NAME_EFFECTIVE_SET_GENERATOR: "${{ vars.DOCKER_REGISTRY || 'ghcr.io/netcracker' }}/qubership-effective-set-generator" #DOCKER_IMAGE_TAGS - DOCKER_IMAGE_TAG_PIPEGENE: "1.12.1" - DOCKER_IMAGE_TAG_ENVGENE: "1.12.1" - DOCKER_IMAGE_TAG_EFFECTIVE_SET_GENERATOR: "1.12.1" + DOCKER_IMAGE_TAG_PIPEGENE: "1.13.0" + DOCKER_IMAGE_TAG_ENVGENE: "1.13.0" + DOCKER_IMAGE_TAG_EFFECTIVE_SET_GENERATOR: "1.13.0" jobs: process_environment_variables: diff --git a/gsf_packages/envgene_discovery_project/git-system-follower-package/package.yaml b/gsf_packages/envgene_discovery_project/git-system-follower-package/package.yaml index 448d31d1e..c9fc67bc4 100644 --- a/gsf_packages/envgene_discovery_project/git-system-follower-package/package.yaml +++ b/gsf_packages/envgene_discovery_project/git-system-follower-package/package.yaml @@ -1,5 +1,5 @@ apiVersion: v1 type: gitlab-ci-pipeline name: envgene_discovery_project -version: 1.12.1 +version: 1.13.0 dependencies: [] diff --git a/gsf_packages/envgene_instance_project/git-system-follower-package/package.yaml b/gsf_packages/envgene_instance_project/git-system-follower-package/package.yaml index 2524e20b1..d888be0c7 100644 --- a/gsf_packages/envgene_instance_project/git-system-follower-package/package.yaml +++ b/gsf_packages/envgene_instance_project/git-system-follower-package/package.yaml @@ -1,5 +1,5 @@ apiVersion: v1 type: gitlab-ci-pipeline name: envgene_instance_project -version: 1.12.1 +version: 1.13.0 dependencies: [] diff --git a/gsf_packages/envgene_instance_project/git-system-follower-package/scripts/templates/default/cookiecutter.json b/gsf_packages/envgene_instance_project/git-system-follower-package/scripts/templates/default/cookiecutter.json index b7753f3fa..cb2a3eb65 100644 --- a/gsf_packages/envgene_instance_project/git-system-follower-package/scripts/templates/default/cookiecutter.json +++ b/gsf_packages/envgene_instance_project/git-system-follower-package/scripts/templates/default/cookiecutter.json @@ -2,7 +2,7 @@ "gsf_repository_name": "envgene_instance_project", "docker_registry": "ghcr.io", "docker_namespace": "netcracker", - "envgene_version": "1.12.1", + "envgene_version": "1.13.0", "envgen_image": "qubership-envgene", "pipe_image": "qubership-pipegene", "cloud_deploytool_image": "env-generator-deploytool_build_deploytool", From 320c9173ac20195cbdafd6b2a0e783383819da51 Mon Sep 17 00:00:00 2001 From: popoveugene <42543333+popoveugene@users.noreply.github.com> Date: Fri, 19 Dec 2025 12:49:20 +0300 Subject: [PATCH 12/48] docs: update collision processing (#888) --- docs/features/calculator-cli.md | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/features/calculator-cli.md b/docs/features/calculator-cli.md index 69dfd02d5..c042be577 100644 --- a/docs/features/calculator-cli.md +++ b/docs/features/calculator-cli.md @@ -571,7 +571,7 @@ The `` can be complex, such as a map or a list, whose elements can also b > [!IMPORTANT] > Parameters whose keys match the name of one of the services must be excluded from this file -> and placed in [`collision-deployParameters.yaml`](#version-20deployment-parameter-context-collision-parameters) instead +> and placed in [`collision-deployment-parameters.yaml`](#version-20deployment-parameter-context-collision-parameters) instead ###### [Version 2.0] Image parameters derived from `deploy_param` @@ -628,11 +628,14 @@ global: &id001 ##### \[Version 2.0][Deployment Parameter Context] Collision Parameters -Parameters whose key matches the name of one of the [services](#version-20-service-inclusion-criteria-and-naming-convention) are placed in the following files: +Parameters at the **root level** of `deployment-parameters.yaml` or `credentials.yaml` whose keys match the name of one of the [services](#version-20-service-inclusion-criteria-and-naming-convention) are placed in the following files: - `collision-deployment-parameters.yaml`: if the parameter is non-sensitive (i.e., not defined via a credential macro). - `collision-credentials.yaml`: if the parameter is sensitive (i.e., defined via a credential macro). +> [!NOTE] +> Only root-level parameters are processed by this collision logic. If a parameter with a service name as its key is nested under a service section, it is not moved to the collision files and remains in its original location. + The structure of both files is following: ```yaml From a519f4abd7315b7be03d24f331899c905ffb092b Mon Sep 17 00:00:00 2001 From: Dias <120464230+dysmon@users.noreply.github.com> Date: Fri, 19 Dec 2025 16:19:49 +0500 Subject: [PATCH 13/48] fix: change log level name (#887) --- .../devops/cli/logger/LogLevelMapper.java | 24 ------------------- .../devops/cli/logger/LoggingInitializer.java | 19 --------------- .../src/main/resources/application.properties | 2 +- python/envgene/envgenehelper/logger.py | 2 +- scripts/utils/pipeline_parameters.py | 2 +- 5 files changed, 3 insertions(+), 46 deletions(-) delete mode 100644 build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LogLevelMapper.java delete mode 100644 build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LoggingInitializer.java diff --git a/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LogLevelMapper.java b/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LogLevelMapper.java deleted file mode 100644 index 91db8feef..000000000 --- a/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LogLevelMapper.java +++ /dev/null @@ -1,24 +0,0 @@ -package org.qubership.cloud.devops.cli.logger; - -import jakarta.enterprise.context.ApplicationScoped; -import org.eclipse.microprofile.config.inject.ConfigProperty; -import org.jboss.logging.Logger.Level; - -@ApplicationScoped -public class LogLevelMapper { - - @ConfigProperty(name = "LOG_LEVEL", defaultValue = "INFO") - String level; - - public Level getMappedLevel() { - return switch (level.toUpperCase()) { - case "CRITICAL" -> Level.FATAL; - case "ERROR" -> Level.ERROR; - case "WARNING" -> Level.WARN; - case "INFO" -> Level.INFO; - case "DEBUG" -> Level.DEBUG; - case "TRACE" -> Level.TRACE; - default -> Level.INFO; - }; - } -} diff --git a/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LoggingInitializer.java b/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LoggingInitializer.java deleted file mode 100644 index cd4d4c6e0..000000000 --- a/build_effective_set_generator_java/effective-set-generator/src/main/java/org/qubership/cloud/devops/cli/logger/LoggingInitializer.java +++ /dev/null @@ -1,19 +0,0 @@ -package org.qubership.cloud.devops.cli.logger; - -import jakarta.enterprise.context.ApplicationScoped; -import jakarta.inject.Inject; -import jakarta.annotation.PostConstruct; -import org.jboss.logging.Logger.Level; - -@ApplicationScoped -public class LoggingInitializer { - - @Inject - LogLevelMapper logLevelMapper; - - @PostConstruct - public void init() { - Level logLevel = logLevelMapper.getMappedLevel(); - System.setProperty("quarkus.log.level", logLevel.name()); - } -} diff --git a/build_effective_set_generator_java/effective-set-generator/src/main/resources/application.properties b/build_effective_set_generator_java/effective-set-generator/src/main/resources/application.properties index 93942c580..8953e8865 100644 --- a/build_effective_set_generator_java/effective-set-generator/src/main/resources/application.properties +++ b/build_effective_set_generator_java/effective-set-generator/src/main/resources/application.properties @@ -1,6 +1,6 @@ quarkus.package.jar.type=uber-jar -quarkus.log.level=${LOG_LEVEL:INFO} +quarkus.log.level=${ENVGENE_LOG_LEVEL:INFO} quarkus.console.color=false quarkus.log.console.format=%s%n diff --git a/python/envgene/envgenehelper/logger.py b/python/envgene/envgenehelper/logger.py index 2856ad24b..846e8912b 100644 --- a/python/envgene/envgenehelper/logger.py +++ b/python/envgene/envgenehelper/logger.py @@ -28,7 +28,7 @@ def format(self, record): logger = logging.getLogger("envgene") logger.propagate = False -log_level_str = getenv("LOG_LEVEL", "INFO").upper() +log_level_str = getenv("ENVGENE_LOG_LEVEL", "INFO").upper() log_level = getattr(logging, log_level_str, logging.INFO) logger.setLevel(log_level) diff --git a/scripts/utils/pipeline_parameters.py b/scripts/utils/pipeline_parameters.py index 443b71c91..e12382917 100644 --- a/scripts/utils/pipeline_parameters.py +++ b/scripts/utils/pipeline_parameters.py @@ -28,7 +28,7 @@ def get_pipeline_parameters() -> dict: 'GITLAB_RUNNER_TAG_NAME' : getenv("GITLAB_RUNNER_TAG_NAME", ""), 'RUNNER_SCRIPT_TIMEOUT' : getenv("RUNNER_SCRIPT_TIMEOUT") or "10m", 'DEPLOYMENT_SESSION_ID': getenv("DEPLOYMENT_SESSION_ID", ""), - 'LOG_LEVEL': getenv("LOG_LEVEL") + 'ENVGENE_LOG_LEVEL': getenv("ENVGENE_LOG_LEVEL") } class PipelineParametersHandler: From 433b3fa8a75fd8407d2865b71c40b12b10384027 Mon Sep 17 00:00:00 2001 From: "qubership-actions[bot]" Date: Fri, 19 Dec 2025 11:29:15 +0000 Subject: [PATCH 14/48] chore: Update docker image tags and envgene_version for branch main [skip ci] --- .../instance-repo-pipeline/.github/workflows/Envgene.yml | 6 +++--- .../git-system-follower-package/package.yaml | 2 +- .../git-system-follower-package/package.yaml | 2 +- .../scripts/templates/default/cookiecutter.json | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/github_workflows/instance-repo-pipeline/.github/workflows/Envgene.yml b/github_workflows/instance-repo-pipeline/.github/workflows/Envgene.yml index d1411d50a..48a754c55 100644 --- a/github_workflows/instance-repo-pipeline/.github/workflows/Envgene.yml +++ b/github_workflows/instance-repo-pipeline/.github/workflows/Envgene.yml @@ -69,9 +69,9 @@ env: DOCKER_IMAGE_NAME_EFFECTIVE_SET_GENERATOR: "${{ vars.DOCKER_REGISTRY || 'ghcr.io/netcracker' }}/qubership-effective-set-generator" #DOCKER_IMAGE_TAGS - DOCKER_IMAGE_TAG_PIPEGENE: "1.13.0" - DOCKER_IMAGE_TAG_ENVGENE: "1.13.0" - DOCKER_IMAGE_TAG_EFFECTIVE_SET_GENERATOR: "1.13.0" + DOCKER_IMAGE_TAG_PIPEGENE: "1.13.1" + DOCKER_IMAGE_TAG_ENVGENE: "1.13.1" + DOCKER_IMAGE_TAG_EFFECTIVE_SET_GENERATOR: "1.13.1" jobs: process_environment_variables: diff --git a/gsf_packages/envgene_discovery_project/git-system-follower-package/package.yaml b/gsf_packages/envgene_discovery_project/git-system-follower-package/package.yaml index c9fc67bc4..440dd6dbb 100644 --- a/gsf_packages/envgene_discovery_project/git-system-follower-package/package.yaml +++ b/gsf_packages/envgene_discovery_project/git-system-follower-package/package.yaml @@ -1,5 +1,5 @@ apiVersion: v1 type: gitlab-ci-pipeline name: envgene_discovery_project -version: 1.13.0 +version: 1.13.1 dependencies: [] diff --git a/gsf_packages/envgene_instance_project/git-system-follower-package/package.yaml b/gsf_packages/envgene_instance_project/git-system-follower-package/package.yaml index d888be0c7..7b06b6806 100644 --- a/gsf_packages/envgene_instance_project/git-system-follower-package/package.yaml +++ b/gsf_packages/envgene_instance_project/git-system-follower-package/package.yaml @@ -1,5 +1,5 @@ apiVersion: v1 type: gitlab-ci-pipeline name: envgene_instance_project -version: 1.13.0 +version: 1.13.1 dependencies: [] diff --git a/gsf_packages/envgene_instance_project/git-system-follower-package/scripts/templates/default/cookiecutter.json b/gsf_packages/envgene_instance_project/git-system-follower-package/scripts/templates/default/cookiecutter.json index cb2a3eb65..5d20ca0a1 100644 --- a/gsf_packages/envgene_instance_project/git-system-follower-package/scripts/templates/default/cookiecutter.json +++ b/gsf_packages/envgene_instance_project/git-system-follower-package/scripts/templates/default/cookiecutter.json @@ -2,7 +2,7 @@ "gsf_repository_name": "envgene_instance_project", "docker_registry": "ghcr.io", "docker_namespace": "netcracker", - "envgene_version": "1.13.0", + "envgene_version": "1.13.1", "envgen_image": "qubership-envgene", "pipe_image": "qubership-pipegene", "cloud_deploytool_image": "env-generator-deploytool_build_deploytool", From f8431c1047f8be8dd8f2b56470203ddf7ce6bdc2 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Wed, 3 Dec 2025 01:00:03 +0530 Subject: [PATCH 15/48] feat: Add RegDef V2 validation with version detection Detect version field in RegDef files and validate against V2 schema when version is 2.0. Use logger for validation messages and move schema paths to constants. --- scripts/build_env/main.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/scripts/build_env/main.py b/scripts/build_env/main.py index 4d2ebaea5..681fad194 100644 --- a/scripts/build_env/main.py +++ b/scripts/build_env/main.py @@ -1,4 +1,5 @@ import argparse +import os from envgenehelper import * from envgenehelper.deployer import * @@ -19,6 +20,9 @@ CLOUD_SCHEMA = "schemas/cloud.schema.json" NAMESPACE_SCHEMA = "schemas/namespace.schema.json" ENV_SPECIFIC_RESOURCE_PROFILE_SCHEMA = "schemas/resource-profile.schema.json" +APPDEF_SCHEMA = "schemas/appdef.schema.json" +REGDEF_V1_SCHEMA = "schemas/regdef.schema.json" +REGDEF_V2_SCHEMA = "schemas/regdef-v2.schema.json" def prepare_folders_for_rendering(env_name, cluster_name, source_env_dir, templates_dir, render_dir, @@ -280,15 +284,18 @@ def validate_appregdefs(render_dir, env_name): logger.info(f"No AppDef YAMLs found in {appdef_dir}") for file in appdef_files: logger.info(f"AppDef file: {file}") - validate_yaml_by_scheme_or_fail(file, "schemas/appdef.schema.json") + validate_yaml_by_scheme_or_fail(file, APPDEF_SCHEMA) if os.path.exists(regdef_dir): regdef_files = findAllYamlsInDir(regdef_dir) if not regdef_files: logger.info(f"No RegDef YAMLs found in {regdef_dir}") for file in regdef_files: - logger.info(f"RegDef file: {file}") - validate_yaml_by_scheme_or_fail(file, "schemas/regdef.schema.json") + logger.info(f"Validating RegDef file: {file}") + regdef_content = openYaml(file) + version = str(regdef_content.get('version', '1.0')) + schema_path = REGDEF_V2_SCHEMA if version != '1.0' else REGDEF_V1_SCHEMA + validate_yaml_by_scheme_or_fail(file, schema_path) def render_environment(env_name, cluster_name, templates_dir, all_instances_dir, output_dir, g_template_version, From 2f541bdeb33bd4b073eaea7cdfed8146f8061a52 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Tue, 30 Dec 2025 13:36:38 +0530 Subject: [PATCH 16/48] fix: gaps issue in registry v2 Artifact searcher implmentation --- .../artifact_searcher/artifact.py | 46 +++++++--- .../artifact_searcher/cloud_auth_helper.py | 89 +++++++++++++++---- scripts/build_env/handle_sd.py | 82 ++++++++--------- 3 files changed, 149 insertions(+), 68 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index d8aee60ba..c5b538dde 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -270,33 +270,46 @@ async def check_artifact_async( async def _check_artifact_v2_async(app: Application, artifact_extension: FileExtension, version: str, env_creds: Optional[dict]) -> Optional[tuple[str, tuple[str, str]]]: if not getattr(app.registry.maven_config, 'auth_config', None): + logger.error(f"V2 fallback for '{app.name}': Registry '{app.registry.name}' version 2.0 missing maven_config.authConfig") return await _check_artifact_v1_async(app, artifact_extension, version) try: from artifact_searcher.cloud_auth_helper import CloudAuthHelper from qubership_pipelines_common_library.v1.maven_client import Artifact as MavenArtifact - except ImportError: + except ImportError as e: + logger.error(f"V2 fallback for '{app.name}': Missing required libraries - {e}") return await _check_artifact_v1_async(app, artifact_extension, version) auth_config = CloudAuthHelper.resolve_auth_config(app.registry, "maven") - if not auth_config or auth_config.provider not in ["aws", "gcp", "artifactory", "nexus"]: + if not auth_config: + logger.error(f"V2 fallback for '{app.name}': Could not resolve authConfig for registry '{app.registry.name}'") return await _check_artifact_v1_async(app, artifact_extension, version) + + # Note: provider is required in RegDef v2 and validated in cloud_auth_helper # AWS and GCP require credentials; Artifactory/Nexus can work with anonymous access - if auth_config.provider in ["aws", "gcp"] and not env_creds: - logger.warning(f"V2 {auth_config.provider} requires credentials but env_creds is empty") - return await _check_artifact_v1_async(app, artifact_extension, version) - if auth_config.provider in ["aws", "gcp"] and auth_config.credentials_id and auth_config.credentials_id not in (env_creds or {}): - logger.warning(f"V2 {auth_config.provider} credentials '{auth_config.credentials_id}' not found in env_creds") - return await _check_artifact_v1_async(app, artifact_extension, version) + if auth_config.provider in ["aws", "gcp"]: + if not env_creds: + logger.error(f"V2 fallback for '{app.name}': {auth_config.provider} requires credentials but env_creds is empty") + return await _check_artifact_v1_async(app, artifact_extension, version) + if auth_config.credentials_id and auth_config.credentials_id not in env_creds: + logger.error(f"V2 fallback for '{app.name}': {auth_config.provider} credential '{auth_config.credentials_id}' not found in env_creds") + logger.error(f"Available credentials: {list(env_creds.keys())}") + return await _check_artifact_v1_async(app, artifact_extension, version) logger.info(f"V2 search for {app.name} with provider={auth_config.provider}") loop = asyncio.get_running_loop() try: searcher = await loop.run_in_executor(None, CloudAuthHelper.create_maven_searcher, app.registry, env_creds) + except KeyError as e: + logger.error(f"V2 fallback for '{app.name}': Credential not found - {e}") + return await _check_artifact_v1_async(app, artifact_extension, version) + except ValueError as e: + logger.error(f"V2 fallback for '{app.name}': Invalid configuration - {e}") + return await _check_artifact_v1_async(app, artifact_extension, version) except Exception as e: - logger.warning(f"Failed to create V2 searcher for {app.name}: {e}") + logger.error(f"V2 fallback for '{app.name}': Failed to create searcher - {e}", exc_info=True) return await _check_artifact_v1_async(app, artifact_extension, version) artifact_string = f"{app.group_id}:{app.artifact_id}:{version}" @@ -349,13 +362,24 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt except Exception as e: last_error = e error_str = str(e).lower() + + # Log registry response if available (for HTTP errors) + if hasattr(e, 'response'): + try: + response_text = e.response.text[:500] if hasattr(e.response, 'text') else str(e.response)[:500] + logger.error(f"Registry response for {app.name}: HTTP {getattr(e.response, 'status_code', 'N/A')}") + logger.error(f"Response body (first 500 chars): {response_text}") + except Exception: + pass + if attempt < max_retries - 1 and any(x in error_str for x in ["401", "unauthorized", "forbidden", "expired", "timeout"]): logger.warning(f"V2 error for {app.name}: {e}, retrying...") continue - logger.warning(f"V2 failed after {max_retries} attempts for {app.name}: {e}") + + logger.error(f"V2 fallback for '{app.name}': Failed after {attempt + 1} attempt(s) - {e}") return await _check_artifact_v1_async(app, artifact_extension, version) else: - logger.warning(f"V2 failed after {max_retries} attempts: {last_error}") + logger.error(f"V2 fallback for '{app.name}': All {max_retries} attempts exhausted - {last_error}") return await _check_artifact_v1_async(app, artifact_extension, version) if auth_config.provider == "aws": diff --git a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py index 5af4f331c..01e808e46 100644 --- a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py +++ b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py @@ -49,18 +49,47 @@ def resolve_auth_config(registry: Registry, artifact_type: str = "maven") -> Opt return auth_config @staticmethod - def resolve_credentials(auth_config: AuthConfig, env_creds: Optional[Dict[str, dict]]) -> dict: - """Resolve credentials from env_creds based on auth_config.credentials_id.""" + def resolve_credentials(auth_config: AuthConfig, env_creds: Optional[Dict[str, dict]]) -> Optional[dict]: + """Resolve credentials from env_creds based on auth_config.credentials_id. + + Returns: + dict: Credential data if found and non-anonymous + None: For anonymous access (no credentialsId or empty username/password) + """ cred_id = auth_config.credentials_id if not cred_id: - return {} # Anonymous access (Artifactory/Nexus with empty credentialsId) + logger.info("No credentialsId specified, using anonymous access") + return None if not env_creds or cred_id not in env_creds: raise KeyError(f"Credential '{cred_id}' not found in env_creds") cred_entry = env_creds[cred_id] - creds = cred_entry.get("data", cred_entry) if isinstance(cred_entry, dict) else cred_entry - logger.info(f"Resolved credentials for '{cred_id}'") + + # Extract credential data from the new structure: {"type": "...", "data": {...}} + cred_type = cred_entry.get("type") if isinstance(cred_entry, dict) else None + cred_data = cred_entry.get("data", cred_entry) if isinstance(cred_entry, dict) else cred_entry + + # Check for anonymous access (empty username/password for usernamePassword type) + if cred_type == "usernamePassword": + username = cred_data.get("username", "") + password = cred_data.get("password", "") + if not username and not password: + logger.info(f"Credential '{cred_id}' is anonymous (empty username/password)") + return None + creds = {"username": username, "password": password} + elif cred_type == "secret": + # For GCP service account JSON or other secret-based credentials + if "secret" in cred_data: + creds = cred_data + else: + # Handle case where data itself is the secret + creds = {"secret": cred_data} + else: + # Fallback for unknown credential types + creds = cred_data + + logger.info(f"Resolved credentials for '{cred_id}' (type: {cred_type})") # Validate required fields per provider if auth_config.provider == "aws": @@ -88,6 +117,7 @@ def _extract_repository_name(url: str) -> str: return parts[4] return url.split("/")[-1] + @staticmethod def _extract_region(url: str, auth_config: AuthConfig) -> str: """Extract region from URL or auth_config. Prefers explicit config over URL extraction.""" @@ -104,25 +134,42 @@ def _extract_region(url: str, auth_config: AuthConfig) -> str: @staticmethod def create_maven_searcher(registry: Registry, env_creds: Optional[Dict[str, dict]]) -> 'MavenArtifactSearcher': - """Create configured MavenArtifactSearcher for the registry provider.""" + """Create configured MavenArtifactSearcher for the registry provider. + + Provider auto-detection: If auth_config.provider is not specified, it will be + auto-detected from the registry URL. + """ if MavenArtifactSearcher is None: raise ImportError("qubership_pipelines_common_library not available") auth_config = CloudAuthHelper.resolve_auth_config(registry, "maven") - if not auth_config or not auth_config.provider: + if not auth_config: raise ValueError("Could not resolve authConfig for maven artifacts") - if auth_config.provider not in ["aws", "gcp", "artifactory", "nexus"]: - raise ValueError(f"Unsupported provider: {auth_config.provider}") - - creds = CloudAuthHelper.resolve_credentials(auth_config, env_creds) + registry_url = registry.maven_config.repository_domain_name + + # Provider is required in RegDef v2 + provider = auth_config.provider + if not provider: + logger.error(f"V2 fallback: provider field is required in authConfig for registry '{registry.name}'") + raise ValueError(f"Provider field is required in authConfig for registry '{registry.name}'") + + if provider not in ["aws", "gcp", "artifactory", "nexus"]: + raise ValueError(f"Unsupported provider: {provider}") + + # Resolve credentials (returns None for anonymous access) + creds = CloudAuthHelper.resolve_credentials(auth_config, env_creds) searcher = MavenArtifactSearcher(registry_url, params={"timeout": DEFAULT_SEARCHER_TIMEOUT}) - if auth_config.provider == "aws": + # AWS and GCP require credentials - cannot work anonymously + if provider in ["aws", "gcp"] and creds is None: + raise ValueError(f"{provider.upper()} requires credentials - anonymous access not supported") + + if provider == "aws": return CloudAuthHelper._configure_aws(searcher, auth_config, creds, registry_url) - elif auth_config.provider == "gcp": + elif provider == "gcp": return CloudAuthHelper._configure_gcp(searcher, auth_config, creds, registry_url) - elif auth_config.provider == "artifactory": + elif provider == "artifactory": return CloudAuthHelper._configure_artifactory(searcher, creds) else: # nexus return CloudAuthHelper._configure_nexus(searcher, creds) @@ -165,14 +212,24 @@ def _configure_gcp(searcher: 'MavenArtifactSearcher', auth_config: AuthConfig, ) @staticmethod - def _configure_artifactory(searcher: 'MavenArtifactSearcher', creds: dict) -> 'MavenArtifactSearcher': + def _configure_artifactory(searcher: 'MavenArtifactSearcher', creds: Optional[dict]) -> 'MavenArtifactSearcher': + """Configure Artifactory authentication. Supports anonymous access if creds is None.""" + if creds is None: + logger.info("Configuring Artifactory with anonymous access (no credentials)") + return searcher + return searcher.with_artifactory( username=creds.get("username", ""), password=creds.get("password", "") ) @staticmethod - def _configure_nexus(searcher: 'MavenArtifactSearcher', creds: dict) -> 'MavenArtifactSearcher': + def _configure_nexus(searcher: 'MavenArtifactSearcher', creds: Optional[dict]) -> 'MavenArtifactSearcher': + """Configure Nexus authentication. Supports anonymous access if creds is None.""" + if creds is None: + logger.info("Configuring Nexus with anonymous access (no credentials)") + return searcher + return searcher.with_nexus( username=creds.get("username", ""), password=creds.get("password", "") diff --git a/scripts/build_env/handle_sd.py b/scripts/build_env/handle_sd.py index 4e632f8b8..deee30a27 100644 --- a/scripts/build_env/handle_sd.py +++ b/scripts/build_env/handle_sd.py @@ -9,7 +9,7 @@ import yaml from artifact_searcher import artifact from artifact_searcher.utils import models as artifact_models -from envgenehelper.business_helper import getenv_and_log, getenv_with_error +from envgenehelper.business_helper import getenv_and_log, getenv_with_error, getCentralCredentialsPath from envgenehelper.env_helper import Environment from envgenehelper.file_helper import identify_yaml_extension from envgenehelper.logger import logger @@ -301,54 +301,54 @@ def download_sds_with_version(env, base_sd_path, sd_version, effective_merge_mod def _get_environment_credentials(env: Environment = None) -> dict: - """Get credentials from environment for V2 cloud registry support.""" - env_creds = {} + """Load ALL credentials from credentials.yml for V2 cloud registry support. - # First try environment variables - aws_access_key = os.getenv("AWS_ACCESS_KEY_ID") - aws_secret_key = os.getenv("AWS_SECRET_ACCESS_KEY") - if aws_access_key and aws_secret_key: - env_creds["aws-keys"] = { - "username": aws_access_key, - "password": aws_secret_key - } - logger.debug("Loaded AWS credentials from environment") + For V2 registries, loads from /configuration/credentials/credentials.yml + For backward compatibility (V1), also loads from environment-specific credentials if available. - # If not in environment and env object is provided, try to get from credentials file - if not env_creds and env and hasattr(env, 'creds') and env.creds: - # Handle AWS credentials - if 'aws-keys' in env.creds: - # env.creds['aws-keys'] has structure: {'type': 'usernamePassword', 'data': {'username': '...', 'password': '...'}} - aws_creds = env.creds['aws-keys']['data'] - env_creds["aws-keys"] = { - "username": aws_creds['username'], - "password": aws_creds['password'] - } - logger.debug("Loaded AWS credentials from credentials.yml") - - # Handle GCP credentials - if 'gcp-keys' in env.creds: - gcp_creds = env.creds['gcp-keys']['data']['secret'] - env_creds["gcp-keys"] = {"secret": gcp_creds} - logger.debug("Loaded GCP credentials from credentials.yml") + Returns a dict mapping credential IDs to their full credential config: + { + "credential-id": { + "type": "usernamePassword" | "secret", + "data": {...} + } + } + """ + env_creds = {} - gcp_sa_json_path = os.getenv("GCP_SA_JSON_PATH") - if gcp_sa_json_path and path.exists(gcp_sa_json_path): + # V2: Load from central credentials location (/configuration/credentials/credentials.yml) + central_creds_path = getCentralCredentialsPath(WORK_DIR) + if os.path.exists(central_creds_path): try: - with open(gcp_sa_json_path) as f: - env_creds["gcp-sa"] = {"secret": f.read()} - logger.debug("Loaded GCP service account from file") + with open(central_creds_path, 'r') as f: + central_creds = yaml.safe_load(f) or {} + for cred_id, cred_config in central_creds.items(): + if cred_id == "sops": # Skip SOPS metadata + continue + if not isinstance(cred_config, dict): + logger.warning(f"Skipping invalid credential entry '{cred_id}' in central credentials: not a dict") + continue + env_creds[cred_id] = cred_config + logger.debug(f"Loaded credential '{cred_id}' (type: {cred_config.get('type')}) from central credentials") except Exception as e: - logger.warning(f"Failed to load GCP credentials from {gcp_sa_json_path}: {e}") - gcp_sa_json = os.getenv("GCP_SA_JSON") - if gcp_sa_json: - env_creds["gcp-sa"] = {"secret": gcp_sa_json} - logger.debug("Loaded GCP service account from environment variable") + logger.warning(f"Failed to load central credentials from {central_creds_path}: {e}") + + # V1: Backward compatibility - also load from environment-specific credentials if they exist + if env and hasattr(env, 'creds') and env.creds: + for cred_id, cred_config in env.creds.items(): + if cred_id in env_creds: + logger.debug(f"Credential '{cred_id}' already loaded from central location, skipping environment-specific") + continue + if not isinstance(cred_config, dict): + logger.warning(f"Skipping invalid credential entry '{cred_id}' in environment: not a dict") + continue + env_creds[cred_id] = cred_config + logger.debug(f"Loaded credential '{cred_id}' (type: {cred_config.get('type')}) from environment") if env_creds: - logger.info(f"Loaded {len(env_creds)} credential set(s) for V2 cloud registry support") + logger.info(f"Loaded {len(env_creds)} credential(s) from credentials.yml for V2 registry support") else: - logger.debug("No V2 cloud credentials found in environment (V1 will still work)") + logger.debug("No credentials found in credentials.yml (V1 will still work if no auth required)") return env_creds From 8d7772b608f0273269ec9fa2594d406dd52df6b3 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Tue, 30 Dec 2025 14:11:37 +0530 Subject: [PATCH 17/48] fix: gaps issue in registry v2 Artifact searcher implmentation 2 --- .../artifact_searcher/artifact.py | 24 +++++++++++-- scripts/build_env/handle_sd.py | 35 ++++--------------- 2 files changed, 28 insertions(+), 31 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index c5b538dde..757c61a61 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -101,7 +101,11 @@ async def resolve_snapshot_version_async( def version_to_folder_name(version: str) -> str: - """Normalize timestamped snapshot version to -SNAPSHOT folder name.""" + """ + Normalizes version string for folder naming. + If version is timestamped snapshot (e.g. '1.0.0-20240702.123456-1'), it replaces the timestamp suffix with + '-SNAPSHOT'. Otherwise, returns the version unchanged + """ snapshot_pattern = re.compile(r"-\d{8}\.\d{6}-\d+$") return snapshot_pattern.sub("-SNAPSHOT", version) if snapshot_pattern.search(version) else version @@ -146,6 +150,12 @@ def create_app_artifacts_local_path(app_name, app_version): async def download(session, artifact_info: ArtifactInfo) -> ArtifactInfo: + """ + Downloads an artifact to a local directory: ///filename.extension + Sets full local path of artifact to artifact info + Returns: + ArtifactInfo: Object containing related information about the artifact + """ if artifact_info.local_path: logger.info(f"Artifact already downloaded: {artifact_info.local_path}") return artifact_info @@ -190,6 +200,7 @@ async def check_artifact_by_full_url_async( def get_repo_value_pointer_dict(registry: Registry): + """Permanent set of repositories for searching of artifacts""" maven = registry.maven_config repos = { maven.target_snapshot: "targetSnapshot", @@ -208,6 +219,7 @@ def get_repo_pointer(repo_value: str, registry: Registry): async def _attempt_check( app: Application, version: str, artifact_extension: FileExtension, registry_url: str | None = None ) -> Optional[tuple[str, tuple[str, str]]]: + """Helper function to attempt artifact check with a given registry URL""" folder = version_to_folder_name(version) check_artifact_stop_event = asyncio.Event() resolve_snapshot_stop_event = asyncio.Event() @@ -253,8 +265,16 @@ async def check_artifact_async( app: Application, artifact_extension: FileExtension, version: str, env_creds: Optional[dict] = None ) -> Optional[tuple[str, tuple[str, str]]]: + """ + Resolves the full artifact URL and the first repository where it was found. + Supports both release and snapshot versions. + Returns: + Optional[tuple[str, tuple[str, str]]]: A tuple containing: + - str: Full URL to the artifact. + - tuple[str, str]: A pair of (repository name, repository pointer/alias in CMDB). + Returns None if the artifact could not be resolved + """ registry_version = getattr(app.registry, 'version', "1.0") - if registry_version == "2.0": logger.info(f"Detected RegDef V2 for {app.name}, attempting cloud-aware search") try: diff --git a/scripts/build_env/handle_sd.py b/scripts/build_env/handle_sd.py index deee30a27..389b4125d 100644 --- a/scripts/build_env/handle_sd.py +++ b/scripts/build_env/handle_sd.py @@ -9,7 +9,7 @@ import yaml from artifact_searcher import artifact from artifact_searcher.utils import models as artifact_models -from envgenehelper.business_helper import getenv_and_log, getenv_with_error, getCentralCredentialsPath +from envgenehelper.business_helper import getenv_and_log, getenv_with_error from envgenehelper.env_helper import Environment from envgenehelper.file_helper import identify_yaml_extension from envgenehelper.logger import logger @@ -301,54 +301,31 @@ def download_sds_with_version(env, base_sd_path, sd_version, effective_merge_mod def _get_environment_credentials(env: Environment = None) -> dict: - """Load ALL credentials from credentials.yml for V2 cloud registry support. - - For V2 registries, loads from /configuration/credentials/credentials.yml - For backward compatibility (V1), also loads from environment-specific credentials if available. - - Returns a dict mapping credential IDs to their full credential config: - { - "credential-id": { - "type": "usernamePassword" | "secret", - "data": {...} - } - } - """ env_creds = {} - # V2: Load from central credentials location (/configuration/credentials/credentials.yml) - central_creds_path = getCentralCredentialsPath(WORK_DIR) + central_creds_path = f"{WORK_DIR}/configuration/credentials/credentials.yml" if os.path.exists(central_creds_path): try: with open(central_creds_path, 'r') as f: central_creds = yaml.safe_load(f) or {} for cred_id, cred_config in central_creds.items(): - if cred_id == "sops": # Skip SOPS metadata + if cred_id == "sops": continue if not isinstance(cred_config, dict): - logger.warning(f"Skipping invalid credential entry '{cred_id}' in central credentials: not a dict") + logger.warning(f"Skipping invalid credential entry '{cred_id}': not a dict") continue env_creds[cred_id] = cred_config - logger.debug(f"Loaded credential '{cred_id}' (type: {cred_config.get('type')}) from central credentials") except Exception as e: - logger.warning(f"Failed to load central credentials from {central_creds_path}: {e}") + logger.warning(f"Failed to load central credentials: {e}") - # V1: Backward compatibility - also load from environment-specific credentials if they exist if env and hasattr(env, 'creds') and env.creds: for cred_id, cred_config in env.creds.items(): if cred_id in env_creds: - logger.debug(f"Credential '{cred_id}' already loaded from central location, skipping environment-specific") continue if not isinstance(cred_config, dict): - logger.warning(f"Skipping invalid credential entry '{cred_id}' in environment: not a dict") + logger.warning(f"Skipping invalid credential entry '{cred_id}': not a dict") continue env_creds[cred_id] = cred_config - logger.debug(f"Loaded credential '{cred_id}' (type: {cred_config.get('type')}) from environment") - - if env_creds: - logger.info(f"Loaded {len(env_creds)} credential(s) from credentials.yml for V2 registry support") - else: - logger.debug("No credentials found in credentials.yml (V1 will still work if no auth required)") return env_creds From 40324d595cf7084fc6fa48b4de1576e03f8f8b7e Mon Sep 17 00:00:00 2001 From: BackendBits Date: Tue, 30 Dec 2025 17:38:49 +0530 Subject: [PATCH 18/48] fix: gaps issue in registry v2 Artifact searcher implmentation and fixed issue from pipeline run --- .../artifact_searcher/cloud_auth_helper.py | 2 +- scripts/build_env/handle_sd.py | 50 ++++++++++++++----- 2 files changed, 39 insertions(+), 13 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py index 01e808e46..d857a08d0 100644 --- a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py +++ b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py @@ -2,7 +2,7 @@ import re from typing import Dict, Optional -from loguru import logger +from envgenehelper import logger from artifact_searcher.utils.models import AuthConfig, Registry diff --git a/scripts/build_env/handle_sd.py b/scripts/build_env/handle_sd.py index 389b4125d..ad9bb1273 100644 --- a/scripts/build_env/handle_sd.py +++ b/scripts/build_env/handle_sd.py @@ -10,6 +10,7 @@ from artifact_searcher import artifact from artifact_searcher.utils import models as artifact_models from envgenehelper.business_helper import getenv_and_log, getenv_with_error +from envgenehelper.crypt import decrypt_file from envgenehelper.env_helper import Environment from envgenehelper.file_helper import identify_yaml_extension from envgenehelper.logger import logger @@ -301,31 +302,56 @@ def download_sds_with_version(env, base_sd_path, sd_version, effective_merge_mod def _get_environment_credentials(env: Environment = None) -> dict: + """ + Load credentials from central credentials file and environment-specific credentials. + Supports SOPS, Fernet, and plaintext credential formats. + + Args: + env: Optional Environment object containing environment-specific credentials + + Returns: + Dictionary of credential ID to credential configuration + """ + CREDENTIALS_DIR = "configuration/credentials" + CREDENTIALS_FILENAME = "credentials.yml" + SOPS_METADATA_KEY = "sops" + env_creds = {} + central_creds_path = f"{WORK_DIR}/{CREDENTIALS_DIR}/{CREDENTIALS_FILENAME}" - central_creds_path = f"{WORK_DIR}/configuration/credentials/credentials.yml" if os.path.exists(central_creds_path): try: - with open(central_creds_path, 'r') as f: - central_creds = yaml.safe_load(f) or {} - for cred_id, cred_config in central_creds.items(): - if cred_id == "sops": - continue - if not isinstance(cred_config, dict): - logger.warning(f"Skipping invalid credential entry '{cred_id}': not a dict") - continue - env_creds[cred_id] = cred_config + logger.debug(f"Loading central credentials from: {central_creds_path}") + # decrypt_file handles SOPS, Fernet, and plaintext formats automatically + central_creds = decrypt_file(central_creds_path, in_place=False, allow_default=True) or {} + + for cred_id, cred_config in central_creds.items(): + if cred_id == SOPS_METADATA_KEY: + continue + if not isinstance(cred_config, dict): + logger.warning(f"Skipping invalid credential '{cred_id}': expected dict, got {type(cred_config).__name__}") + continue + env_creds[cred_id] = cred_config + + logger.info(f"Loaded {len(env_creds)} credential(s) from central credentials file") except Exception as e: - logger.warning(f"Failed to load central credentials: {e}") + logger.warning(f"Failed to load central credentials from {central_creds_path}: {e}") + # Merge environment-specific credentials (do not override central credentials) if env and hasattr(env, 'creds') and env.creds: + env_specific_count = 0 for cred_id, cred_config in env.creds.items(): if cred_id in env_creds: + logger.debug(f"Credential '{cred_id}' from environment overridden by central credentials") continue if not isinstance(cred_config, dict): - logger.warning(f"Skipping invalid credential entry '{cred_id}': not a dict") + logger.warning(f"Skipping invalid credential '{cred_id}': expected dict, got {type(cred_config).__name__}") continue env_creds[cred_id] = cred_config + env_specific_count += 1 + + if env_specific_count > 0: + logger.info(f"Loaded {env_specific_count} environment-specific credential(s)") return env_creds From 43fd17666c3de30c48385698731416f3891aeb5c Mon Sep 17 00:00:00 2001 From: BackendBits Date: Wed, 31 Dec 2025 17:38:42 +0530 Subject: [PATCH 19/48] fix: Fixing V2 anonymous access bug for Artifactory and Nexus --- .../artifact-searcher/artifact_searcher/cloud_auth_helper.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py index d857a08d0..3ec310dd3 100644 --- a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py +++ b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py @@ -216,7 +216,7 @@ def _configure_artifactory(searcher: 'MavenArtifactSearcher', creds: Optional[di """Configure Artifactory authentication. Supports anonymous access if creds is None.""" if creds is None: logger.info("Configuring Artifactory with anonymous access (no credentials)") - return searcher + return searcher.with_artifactory(username=None, password=None) return searcher.with_artifactory( username=creds.get("username", ""), @@ -228,7 +228,7 @@ def _configure_nexus(searcher: 'MavenArtifactSearcher', creds: Optional[dict]) - """Configure Nexus authentication. Supports anonymous access if creds is None.""" if creds is None: logger.info("Configuring Nexus with anonymous access (no credentials)") - return searcher + return searcher.with_nexus(username=None, password=None) return searcher.with_nexus( username=creds.get("username", ""), From 41d25bbd84f4fc95780a4c4fd2924f6261231a61 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Wed, 3 Dec 2025 01:00:03 +0530 Subject: [PATCH 20/48] feat: Add RegDef V2 validation with version detection Detect version field in RegDef files and validate against V2 schema when version is 2.0. Use logger for validation messages and move schema paths to constants. --- scripts/build_env/main.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/scripts/build_env/main.py b/scripts/build_env/main.py index e2867c5a7..76232973c 100644 --- a/scripts/build_env/main.py +++ b/scripts/build_env/main.py @@ -1,3 +1,6 @@ +import argparse +import os + from envgenehelper import * from envgenehelper.deployer import * @@ -17,6 +20,9 @@ CLOUD_SCHEMA = "schemas/cloud.schema.json" NAMESPACE_SCHEMA = "schemas/namespace.schema.json" ENV_SPECIFIC_RESOURCE_PROFILE_SCHEMA = "schemas/resource-profile.schema.json" +APPDEF_SCHEMA = "schemas/appdef.schema.json" +REGDEF_V1_SCHEMA = "schemas/regdef.schema.json" +REGDEF_V2_SCHEMA = "schemas/regdef-v2.schema.json" def prepare_folders_for_rendering(env_name, cluster_name, source_env_dir, templates_dir, render_dir, @@ -278,15 +284,18 @@ def validate_appregdefs(render_dir, env_name): logger.info(f"No AppDef YAMLs found in {appdef_dir}") for file in appdef_files: logger.info(f"AppDef file: {file}") - validate_yaml_by_scheme_or_fail(file, "schemas/appdef.schema.json") + validate_yaml_by_scheme_or_fail(file, APPDEF_SCHEMA) if os.path.exists(regdef_dir): regdef_files = findAllYamlsInDir(regdef_dir) if not regdef_files: logger.info(f"No RegDef YAMLs found in {regdef_dir}") for file in regdef_files: - logger.info(f"RegDef file: {file}") - validate_yaml_by_scheme_or_fail(file, "schemas/regdef.schema.json") + logger.info(f"Validating RegDef file: {file}") + regdef_content = openYaml(file) + version = str(regdef_content.get('version', '1.0')) + schema_path = REGDEF_V2_SCHEMA if version != '1.0' else REGDEF_V1_SCHEMA + validate_yaml_by_scheme_or_fail(file, schema_path) def render_environment(env_name, cluster_name, templates_dir, all_instances_dir, output_dir, g_template_version, From 81f2862511be8795d467a16fad628cc5ebb87c3d Mon Sep 17 00:00:00 2001 From: BackendBits Date: Fri, 9 Jan 2026 18:18:23 +0530 Subject: [PATCH 21/48] fix: minor fix --- python/artifact-searcher/artifact_searcher/artifact.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index c96134095..3e4e9b275 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -329,7 +329,7 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt from qubership_pipelines_common_library.v1.maven_client import Artifact as MavenArtifact except ImportError as e: logger.error(f"V2 fallback for '{app.name}': Missing required libraries - {e}") - return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="" + return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") auth_config = CloudAuthHelper.resolve_auth_config(app.registry, "maven") if not auth_config: From f3b1f549bd59d5d291f890d9e7c3e943119f6362 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Tue, 13 Jan 2026 12:16:13 +0530 Subject: [PATCH 22/48] fix: adding test cases for changes --- .../artifact_searcher/test_artifact.py | 381 +++++++++++++++++- scripts/build_env/handle_sd.py | 62 +-- .../tests/env-template/test_env_template.py | 49 ++- .../tests/sd/test_handle_sd_env_creds.py | 46 +++ 4 files changed, 477 insertions(+), 61 deletions(-) create mode 100644 scripts/build_env/tests/sd/test_handle_sd_env_creds.py diff --git a/python/artifact-searcher/artifact_searcher/test_artifact.py b/python/artifact-searcher/artifact_searcher/test_artifact.py index 86d89111a..0660e6a41 100644 --- a/python/artifact-searcher/artifact_searcher/test_artifact.py +++ b/python/artifact-searcher/artifact_searcher/test_artifact.py @@ -5,7 +5,7 @@ os.environ["DEFAULT_REQUEST_TIMEOUT"] = "0.2" # for test cases to run quicker from artifact_searcher.utils import models -from artifact_searcher.artifact import check_artifact_async +from artifact_searcher.artifact import check_artifact_async, _parse_snapshot_version class MockResponse: @@ -132,6 +132,383 @@ async def mock_v2_async(*args, **kwargs): monkeypatch.setattr("artifact_searcher.artifact._check_artifact_v2_async", mock_v2_async) - result = await check_artifact_async(app, models.FileExtension.JSON, "1.0.0", env_creds) + result = await check_artifact_async(app, models.FileExtension.JSON, "1.0.0", env_creds=env_creds) assert result is not None assert result[1][0] == "v2_downloaded" + + +async def test_v2_registry_fallback_to_v1_on_error(monkeypatch): + """Test V2 falls back to V1 when V2 search fails""" + auth_cfg = models.AuthConfig( + credentials_id="aws-creds", + provider="aws", + auth_method="secret", + aws_domain="test-domain", + aws_region="us-east-1", + ) + mvn_cfg = models.MavenConfig( + target_snapshot="snapshots", + target_staging="staging", + target_release="releases", + repository_domain_name="https://test.codeartifact.us-east-1.amazonaws.com/maven/repo/", + auth_config="aws-maven", + ) + dcr_cfg = models.DockerConfig() + reg = models.Registry( + name="aws-registry", + maven_config=mvn_cfg, + docker_config=dcr_cfg, + version="2.0", + auth_config={"aws-maven": auth_cfg}, + ) + app = models.Application( + name="test-app", + artifact_id="test-artifact", + group_id="com.test", + registry=reg, + solution_descriptor=False, + ) + env_creds = {"aws-creds": {"username": "key", "password": "secret"}} + + async def mock_v2_async(*args, **kwargs): + raise Exception("V2 cloud auth failed") + + async def mock_v1_async(*args, **kwargs): + return ("http://v1-url", ("v1_repo", "v1_pointer")) + + monkeypatch.setattr("artifact_searcher.artifact._check_artifact_v2_async", mock_v2_async) + monkeypatch.setattr("artifact_searcher.artifact._check_artifact_v1_async", mock_v1_async) + + result = await check_artifact_async(app, models.FileExtension.JSON, "1.0.0", env_creds=env_creds) + assert result is not None + assert result[0] == "http://v1-url" + assert result[1][0] == "v1_repo" + + +async def test_v1_registry_skips_v2(monkeypatch): + """Test V1 registry (version=1.0) goes directly to V1 search""" + mvn_cfg = models.MavenConfig( + target_snapshot="snapshots", + target_staging="staging", + target_release="releases", + repository_domain_name="https://nexus.example.com/repository/", + ) + dcr_cfg = models.DockerConfig() + reg = models.Registry( + name="nexus-registry", + maven_config=mvn_cfg, + docker_config=dcr_cfg, + version="1.0", + ) + app = models.Application( + name="test-app", + artifact_id="test-artifact", + group_id="com.test", + registry=reg, + solution_descriptor=False, + ) + + v2_called = False + v1_called = False + + async def mock_v2_async(*args, **kwargs): + nonlocal v2_called + v2_called = True + return None + + async def mock_v1_async(*args, **kwargs): + nonlocal v1_called + v1_called = True + return ("http://v1-url", ("v1_repo", "v1_pointer")) + + monkeypatch.setattr("artifact_searcher.artifact._check_artifact_v2_async", mock_v2_async) + monkeypatch.setattr("artifact_searcher.artifact._check_artifact_v1_async", mock_v1_async) + + result = await check_artifact_async(app, models.FileExtension.JSON, "1.0.0") + + assert v1_called + assert not v2_called + assert result is not None + + +async def test_v2_missing_env_creds_fallback(monkeypatch): + """Test V2 with AWS/GCP but no env_creds falls back to V1""" + auth_cfg = models.AuthConfig( + credentials_id="aws-creds", + provider="aws", + auth_method="secret", + aws_domain="test-domain", + aws_region="us-east-1", + ) + mvn_cfg = models.MavenConfig( + target_snapshot="snapshots", + target_staging="staging", + target_release="releases", + repository_domain_name="https://test.codeartifact.us-east-1.amazonaws.com/maven/repo/", + auth_config="aws-maven", + ) + dcr_cfg = models.DockerConfig() + reg = models.Registry( + name="aws-registry", + maven_config=mvn_cfg, + docker_config=dcr_cfg, + version="2.0", + auth_config={"aws-maven": auth_cfg}, + ) + app = models.Application( + name="test-app", + artifact_id="test-artifact", + group_id="com.test", + registry=reg, + solution_descriptor=False, + ) + + async def mock_v1_async(*args, **kwargs): + return ("http://v1-url", ("v1_repo", "v1_pointer")) + + monkeypatch.setattr("artifact_searcher.artifact._check_artifact_v1_async", mock_v1_async) + + # Call without env_creds + result = await check_artifact_async(app, models.FileExtension.JSON, "1.0.0") + + assert result is not None + assert result[0] == "http://v1-url" + + +async def test_v2_missing_auth_config_fallback(monkeypatch): + """Test V2 without auth_config falls back to V1""" + mvn_cfg = models.MavenConfig( + target_snapshot="snapshots", + target_staging="staging", + target_release="releases", + repository_domain_name="https://test.codeartifact.us-east-1.amazonaws.com/maven/repo/", + # No auth_config + ) + dcr_cfg = models.DockerConfig() + reg = models.Registry( + name="aws-registry", + maven_config=mvn_cfg, + docker_config=dcr_cfg, + version="2.0", + ) + app = models.Application( + name="test-app", + artifact_id="test-artifact", + group_id="com.test", + registry=reg, + solution_descriptor=False, + ) + env_creds = {"aws-creds": {"username": "key", "password": "secret"}} + + async def mock_v1_async(*args, **kwargs): + return ("http://v1-url", ("v1_repo", "v1_pointer")) + + monkeypatch.setattr("artifact_searcher.artifact._check_artifact_v1_async", mock_v1_async) + + result = await check_artifact_async(app, models.FileExtension.JSON, "1.0.0", env_creds=env_creds) + + assert result is not None + assert result[0] == "http://v1-url" + + +async def test_v2_gcp_registry(monkeypatch): + """Test V2 with GCP Artifact Registry""" + auth_cfg = models.AuthConfig( + credentials_id="gcp-creds", + provider="gcp", + auth_method="service_account", + gcp_project="test-project", + gcp_location="us-central1", + gcp_repository="test-repo", + ) + mvn_cfg = models.MavenConfig( + target_snapshot="snapshots", + target_staging="staging", + target_release="releases", + repository_domain_name="https://us-central1-maven.pkg.dev/test-project/test-repo/", + auth_config="gcp-maven", + ) + dcr_cfg = models.DockerConfig() + reg = models.Registry( + name="gcp-registry", + maven_config=mvn_cfg, + docker_config=dcr_cfg, + version="2.0", + auth_config={"gcp-maven": auth_cfg}, + ) + app = models.Application( + name="test-app", + artifact_id="test-artifact", + group_id="com.test", + registry=reg, + solution_descriptor=False, + ) + env_creds = {"gcp-creds": {"username": "_json_key", "password": '{"type": "service_account"}'}} + + async def mock_v2_async(*args, **kwargs): + return ("http://gcp-url", ("v2_downloaded", "/tmp/artifact.json")) + + monkeypatch.setattr("artifact_searcher.artifact._check_artifact_v2_async", mock_v2_async) + + result = await check_artifact_async(app, models.FileExtension.JSON, "1.0.0", env_creds=env_creds) + + assert result is not None + assert result[1][0] == "v2_downloaded" + + +async def test_check_artifact_async_with_classifier(monkeypatch): + """Test check_artifact_async passes classifier parameter correctly""" + mvn_cfg = models.MavenConfig( + target_snapshot="snapshots", + target_staging="staging", + target_release="releases", + repository_domain_name="https://nexus.example.com/repository/", + ) + dcr_cfg = models.DockerConfig() + reg = models.Registry( + name="nexus-registry", + maven_config=mvn_cfg, + docker_config=dcr_cfg, + ) + app = models.Application( + name="test-app", + artifact_id="test-artifact", + group_id="com.test", + registry=reg, + solution_descriptor=False, + ) + + classifier_passed = None + + async def mock_v1_async(*args, **kwargs): + nonlocal classifier_passed + classifier_passed = kwargs.get('classifier') if kwargs else args[3] if len(args) > 3 else "" + return ("http://url", ("repo", "pointer")) + + monkeypatch.setattr("artifact_searcher.artifact._check_artifact_v1_async", mock_v1_async) + + await check_artifact_async(app, models.FileExtension.JSON, "1.0.0", classifier="sources") + + assert classifier_passed == "sources" + + +def test_parse_snapshot_version_with_matching_extension(): + """Test _parse_snapshot_version finds matching extension""" + metadata_xml = """ + + + + + + json + 1.0.0-20240702.123456-1 + + + + zip + 1.0.0-20240702.123456-2 + + + + + """ + + app = models.Application( + name="test-app", + artifact_id="test-artifact", + group_id="com.test", + registry=None, + solution_descriptor=False, + ) + + result = _parse_snapshot_version(metadata_xml, app, 1, models.FileExtension.JSON, "1.0.0-SNAPSHOT") + + assert result == "1.0.0-20240702.123456-1" + + +def test_parse_snapshot_version_with_classifier(): + """Test _parse_snapshot_version finds matching extension and classifier""" + metadata_xml = """ + + + + + + json + 1.0.0-20240702.123456-1 + + + sources + json + 1.0.0-20240702.123456-2 + + + + + """ + + app = models.Application( + name="test-app", + artifact_id="test-artifact", + group_id="com.test", + registry=None, + solution_descriptor=False, + ) + + result = _parse_snapshot_version(metadata_xml, app, 1, models.FileExtension.JSON, "1.0.0-SNAPSHOT", "sources") + + assert result == "1.0.0-20240702.123456-2" + + +def test_parse_snapshot_version_no_matching_version(): + """Test _parse_snapshot_version returns None when no match found""" + metadata_xml = """ + + + + + + zip + 1.0.0-20240702.123456-1 + + + + + """ + + app = models.Application( + name="test-app", + artifact_id="test-artifact", + group_id="com.test", + registry=None, + solution_descriptor=False, + ) + + # Looking for JSON but only ZIP available + result = _parse_snapshot_version(metadata_xml, app, 1, models.FileExtension.JSON, "1.0.0-SNAPSHOT") + + assert result is None + + +def test_parse_snapshot_version_empty_snapshot_versions(): + """Test _parse_snapshot_version returns None when no snapshotVersions""" + metadata_xml = """ + + + + + + + """ + + app = models.Application( + name="test-app", + artifact_id="test-artifact", + group_id="com.test", + registry=None, + solution_descriptor=False, + ) + + result = _parse_snapshot_version(metadata_xml, app, 1, models.FileExtension.JSON, "1.0.0-SNAPSHOT") + + assert result is None diff --git a/scripts/build_env/handle_sd.py b/scripts/build_env/handle_sd.py index ad9bb1273..d290d4a25 100644 --- a/scripts/build_env/handle_sd.py +++ b/scripts/build_env/handle_sd.py @@ -9,8 +9,8 @@ import yaml from artifact_searcher import artifact from artifact_searcher.utils import models as artifact_models +from envgenehelper import get_cred_config from envgenehelper.business_helper import getenv_and_log, getenv_with_error -from envgenehelper.crypt import decrypt_file from envgenehelper.env_helper import Environment from envgenehelper.file_helper import identify_yaml_extension from envgenehelper.logger import logger @@ -301,66 +301,12 @@ def download_sds_with_version(env, base_sd_path, sd_version, effective_merge_mod extract_sds_from_json(env, base_sd_path, sd_data_json, effective_merge_mode) -def _get_environment_credentials(env: Environment = None) -> dict: - """ - Load credentials from central credentials file and environment-specific credentials. - Supports SOPS, Fernet, and plaintext credential formats. - - Args: - env: Optional Environment object containing environment-specific credentials - - Returns: - Dictionary of credential ID to credential configuration - """ - CREDENTIALS_DIR = "configuration/credentials" - CREDENTIALS_FILENAME = "credentials.yml" - SOPS_METADATA_KEY = "sops" - - env_creds = {} - central_creds_path = f"{WORK_DIR}/{CREDENTIALS_DIR}/{CREDENTIALS_FILENAME}" - - if os.path.exists(central_creds_path): - try: - logger.debug(f"Loading central credentials from: {central_creds_path}") - # decrypt_file handles SOPS, Fernet, and plaintext formats automatically - central_creds = decrypt_file(central_creds_path, in_place=False, allow_default=True) or {} - - for cred_id, cred_config in central_creds.items(): - if cred_id == SOPS_METADATA_KEY: - continue - if not isinstance(cred_config, dict): - logger.warning(f"Skipping invalid credential '{cred_id}': expected dict, got {type(cred_config).__name__}") - continue - env_creds[cred_id] = cred_config - - logger.info(f"Loaded {len(env_creds)} credential(s) from central credentials file") - except Exception as e: - logger.warning(f"Failed to load central credentials from {central_creds_path}: {e}") - - # Merge environment-specific credentials (do not override central credentials) - if env and hasattr(env, 'creds') and env.creds: - env_specific_count = 0 - for cred_id, cred_config in env.creds.items(): - if cred_id in env_creds: - logger.debug(f"Credential '{cred_id}' from environment overridden by central credentials") - continue - if not isinstance(cred_config, dict): - logger.warning(f"Skipping invalid credential '{cred_id}': expected dict, got {type(cred_config).__name__}") - continue - env_creds[cred_id] = cred_config - env_specific_count += 1 - - if env_specific_count > 0: - logger.info(f"Loaded {env_specific_count} environment-specific credential(s)") - - return env_creds - - def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine, env: Environment = None) -> dict[str, object]: app_def = get_appdef_for_app(f"{app_name}:{version}", app_name, plugins) - env_creds = _get_environment_credentials(env) - artifact_info = asyncio.run(artifact.check_artifact_async(app_def, artifact.FileExtension.JSON, version, env_creds)) + # Use existing get_cred_config() utility for credentials + env_creds = get_cred_config() + artifact_info = asyncio.run(artifact.check_artifact_async(app_def, artifact.FileExtension.JSON, version, env_creds=env_creds)) if not artifact_info: raise ValueError( f'Solution descriptor content was not received for {app_name}:{version}') diff --git a/scripts/build_env/tests/env-template/test_env_template.py b/scripts/build_env/tests/env-template/test_env_template.py index 4204cfdc1..3e2c9a99b 100644 --- a/scripts/build_env/tests/env-template/test_env_template.py +++ b/scripts/build_env/tests/env-template/test_env_template.py @@ -1,10 +1,14 @@ from os import environ from pathlib import Path +from unittest.mock import patch import pytest import responses from aioresponses import aioresponses -from env_template.process_env_template import process_env_template +from env_template.process_env_template import ( + process_env_template, + extract_snapshot_version +) from envgenehelper.test_helpers import TestHelpers GROUP_ID = "org.qubership" @@ -207,3 +211,46 @@ def test_old_logic_with_zip(self): assert len(responses.calls) == 4 assert responses.calls[3].request.url == tmpl_zip_url + + def test_extract_snapshot_version_with_snapshot(self): + """Test snapshot version extraction from URL""" + url = f"{SNAPSHOT_BASE}/{BASE_PATH}/{ARTIFACT_NAME}.json" + snapshot_version = VERSION + + result = extract_snapshot_version(url, snapshot_version) + + assert result == SNAPSHOT_VERSION + assert "-SNAPSHOT" not in result + assert SNAPSHOT_TIMESTAMP in result + + def test_extract_snapshot_version_without_snapshot(self): + """Test version extraction from non-snapshot URL""" + release_version = "1.0.0" + url = f"{STAGING_BASE}/{GROUP_PATH}/{ARTIFACT_ID}/{release_version}/{ARTIFACT_ID}-{release_version}.zip" + + result = extract_snapshot_version(url, release_version) + + assert result == release_version + + @responses.activate + @patch('env_template.process_env_template.get_cred_config') + def test_new_logic_with_v2_credentials(self, mock_get_creds, mock_aio_response): + """Test new logic with RegDef v2 credentials passed through""" + set_env("env-01") + + mock_get_creds.return_value = { + 'aws-creds': {'type': 'usernamePassword', 'data': {'username': 'key', 'password': 'secret'}} + } + + mock_metadata(mock_aio_response) + mock_dd_exists(mock_aio_response, exists=True) + mock_dd_response() + mock_zip(STAGING_ZIP_URL) + + with patch('env_template.process_env_template.artifact.check_artifact_async') as mock_check: + mock_check.return_value = (DD_URL, ("repo", "pointer")) + + process_env_template() + + # Verify get_cred_config was called + assert mock_get_creds.called diff --git a/scripts/build_env/tests/sd/test_handle_sd_env_creds.py b/scripts/build_env/tests/sd/test_handle_sd_env_creds.py new file mode 100644 index 000000000..17deafc9b --- /dev/null +++ b/scripts/build_env/tests/sd/test_handle_sd_env_creds.py @@ -0,0 +1,46 @@ +import asyncio +import os +from pathlib import Path +from unittest.mock import patch, MagicMock + +import pytest +from ruamel.yaml import YAML + +os.environ['ENVIRONMENT_NAME'] = "temporary" +os.environ['CLUSTER_NAME'] = "temporary" +os.environ['CI_PROJECT_DIR'] = "temporary" + +from handle_sd import download_sd_by_appver +from envgenehelper.env_helper import Environment + +yaml = YAML() + + +class TestDownloadSdWithEnvCreds: + """Test that download_sd_by_appver uses get_cred_config correctly""" + + @patch('handle_sd.get_cred_config') + @patch('handle_sd.artifact.check_artifact_async') + @patch('handle_sd.get_appdef_for_app') + def test_download_sd_uses_get_cred_config(self, mock_get_appdef, mock_check_artifact, mock_get_creds): + """Test that download_sd_by_appver uses existing get_cred_config utility""" + mock_get_creds.return_value = { + 'aws-creds': {'type': 'usernamePassword', 'data': {'username': 'key', 'password': 'secret'}} + } + mock_app_def = MagicMock() + mock_get_appdef.return_value = mock_app_def + + def capture_run(coro): + return ("http://sd-url", ("repo", "/tmp/sd.json")) + + with patch('handle_sd.asyncio.run', side_effect=capture_run): + with patch('handle_sd.open', create=True) as mock_open: + mock_open.return_value.__enter__.return_value.read.return_value = '{"applications": []}' + + try: + env = Environment("/test/path", "test-cluster", "test-env") + download_sd_by_appver("test-app", "1.0.0", MagicMock(), env) + except: + pass + + assert mock_get_creds.called From 084bb5b8cb2e311324cb896998c9e799dfc50d3a Mon Sep 17 00:00:00 2001 From: BackendBits Date: Tue, 13 Jan 2026 12:28:04 +0530 Subject: [PATCH 23/48] fix: adding test cases for changes 2 --- .../artifact_searcher/test_artifact.py | 50 +++++++++++++++++-- 1 file changed, 45 insertions(+), 5 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/test_artifact.py b/python/artifact-searcher/artifact_searcher/test_artifact.py index 0660e6a41..60ce330fd 100644 --- a/python/artifact-searcher/artifact_searcher/test_artifact.py +++ b/python/artifact-searcher/artifact_searcher/test_artifact.py @@ -382,7 +382,7 @@ async def test_check_artifact_async_with_classifier(monkeypatch): async def mock_v1_async(*args, **kwargs): nonlocal classifier_passed - classifier_passed = kwargs.get('classifier') if kwargs else args[3] if len(args) > 3 else "" + classifier_passed = kwargs.get('classifier', args[4] if len(args) > 4 else "") return ("http://url", ("repo", "pointer")) monkeypatch.setattr("artifact_searcher.artifact._check_artifact_v1_async", mock_v1_async) @@ -413,11 +413,21 @@ def test_parse_snapshot_version_with_matching_extension(): """ + dummy_registry = models.Registry( + name="dummy", + maven_config=models.MavenConfig( + target_snapshot="snapshots", + target_staging="staging", + target_release="releases", + repository_domain_name="http://dummy.repo/" + ), + docker_config=models.DockerConfig() + ) app = models.Application( name="test-app", artifact_id="test-artifact", group_id="com.test", - registry=None, + registry=dummy_registry, solution_descriptor=False, ) @@ -447,11 +457,21 @@ def test_parse_snapshot_version_with_classifier(): """ + dummy_registry = models.Registry( + name="dummy", + maven_config=models.MavenConfig( + target_snapshot="snapshots", + target_staging="staging", + target_release="releases", + repository_domain_name="http://dummy.repo/" + ), + docker_config=models.DockerConfig() + ) app = models.Application( name="test-app", artifact_id="test-artifact", group_id="com.test", - registry=None, + registry=dummy_registry, solution_descriptor=False, ) @@ -476,11 +496,21 @@ def test_parse_snapshot_version_no_matching_version(): """ + dummy_registry = models.Registry( + name="dummy", + maven_config=models.MavenConfig( + target_snapshot="snapshots", + target_staging="staging", + target_release="releases", + repository_domain_name="http://dummy.repo/" + ), + docker_config=models.DockerConfig() + ) app = models.Application( name="test-app", artifact_id="test-artifact", group_id="com.test", - registry=None, + registry=dummy_registry, solution_descriptor=False, ) @@ -501,11 +531,21 @@ def test_parse_snapshot_version_empty_snapshot_versions(): """ + dummy_registry = models.Registry( + name="dummy", + maven_config=models.MavenConfig( + target_snapshot="snapshots", + target_staging="staging", + target_release="releases", + repository_domain_name="http://dummy.repo/" + ), + docker_config=models.DockerConfig() + ) app = models.Application( name="test-app", artifact_id="test-artifact", group_id="com.test", - registry=None, + registry=dummy_registry, solution_descriptor=False, ) From a26b694660e8e5d986561fe6274b8e09b14fc684 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Tue, 13 Jan 2026 12:38:21 +0530 Subject: [PATCH 24/48] fix: adding test cases for changes 3 --- scripts/build_env/tests/env-template/test_env_template.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/build_env/tests/env-template/test_env_template.py b/scripts/build_env/tests/env-template/test_env_template.py index 3e2c9a99b..1c7e3ac64 100644 --- a/scripts/build_env/tests/env-template/test_env_template.py +++ b/scripts/build_env/tests/env-template/test_env_template.py @@ -239,6 +239,7 @@ def test_new_logic_with_v2_credentials(self, mock_get_creds, mock_aio_response): set_env("env-01") mock_get_creds.return_value = { + 'artifactory-cred': {'type': 'usernamePassword', 'data': {'username': 'test-user', 'password': 'test-pass'}}, 'aws-creds': {'type': 'usernamePassword', 'data': {'username': 'key', 'password': 'secret'}} } From 1ee73dc218f365ec5ad1aa04037d0c4c908d5d5f Mon Sep 17 00:00:00 2001 From: BackendBits Date: Tue, 13 Jan 2026 13:17:06 +0530 Subject: [PATCH 25/48] fix: adding test cases for changes 4 --- .../tests/sd/test_handle_sd_env_creds.py | 46 ------------------- .../tests/sd/test_handle_sd_local.py | 30 +++++++++++- 2 files changed, 29 insertions(+), 47 deletions(-) delete mode 100644 scripts/build_env/tests/sd/test_handle_sd_env_creds.py diff --git a/scripts/build_env/tests/sd/test_handle_sd_env_creds.py b/scripts/build_env/tests/sd/test_handle_sd_env_creds.py deleted file mode 100644 index 17deafc9b..000000000 --- a/scripts/build_env/tests/sd/test_handle_sd_env_creds.py +++ /dev/null @@ -1,46 +0,0 @@ -import asyncio -import os -from pathlib import Path -from unittest.mock import patch, MagicMock - -import pytest -from ruamel.yaml import YAML - -os.environ['ENVIRONMENT_NAME'] = "temporary" -os.environ['CLUSTER_NAME'] = "temporary" -os.environ['CI_PROJECT_DIR'] = "temporary" - -from handle_sd import download_sd_by_appver -from envgenehelper.env_helper import Environment - -yaml = YAML() - - -class TestDownloadSdWithEnvCreds: - """Test that download_sd_by_appver uses get_cred_config correctly""" - - @patch('handle_sd.get_cred_config') - @patch('handle_sd.artifact.check_artifact_async') - @patch('handle_sd.get_appdef_for_app') - def test_download_sd_uses_get_cred_config(self, mock_get_appdef, mock_check_artifact, mock_get_creds): - """Test that download_sd_by_appver uses existing get_cred_config utility""" - mock_get_creds.return_value = { - 'aws-creds': {'type': 'usernamePassword', 'data': {'username': 'key', 'password': 'secret'}} - } - mock_app_def = MagicMock() - mock_get_appdef.return_value = mock_app_def - - def capture_run(coro): - return ("http://sd-url", ("repo", "/tmp/sd.json")) - - with patch('handle_sd.asyncio.run', side_effect=capture_run): - with patch('handle_sd.open', create=True) as mock_open: - mock_open.return_value.__enter__.return_value.read.return_value = '{"applications": []}' - - try: - env = Environment("/test/path", "test-cluster", "test-env") - download_sd_by_appver("test-app", "1.0.0", MagicMock(), env) - except: - pass - - assert mock_get_creds.called diff --git a/scripts/build_env/tests/sd/test_handle_sd_local.py b/scripts/build_env/tests/sd/test_handle_sd_local.py index de9edadae..daeaee4a7 100644 --- a/scripts/build_env/tests/sd/test_handle_sd_local.py +++ b/scripts/build_env/tests/sd/test_handle_sd_local.py @@ -1,6 +1,7 @@ import os import pytest +from unittest.mock import patch, MagicMock from ruamel.yaml import YAML from test_sd_helpers import do_prerequisites, assert_sd_contents, load_test_pipeline_sd_data @@ -9,7 +10,7 @@ os.environ['CLUSTER_NAME'] = "temporary" os.environ['CI_PROJECT_DIR'] = "temporary" -from handle_sd import handle_sd +from handle_sd import handle_sd, download_sd_by_appver from envgenehelper import * from envgenehelper.env_helper import Environment @@ -54,3 +55,30 @@ def test_sd_positive(test_case_name): assert_sd_contents(TEST_SD_DIR, OUTPUT_DIR, test_case_name, actual_dir, test_suits_map) logger.info(f"=====SUCCESS - {test_case_name}======") + + +@patch('handle_sd.get_cred_config') +@patch('handle_sd.artifact.check_artifact_async') +@patch('handle_sd.get_appdef_for_app') +def test_download_sd_uses_get_cred_config(mock_get_appdef, mock_check_artifact, mock_get_creds): + """Test that download_sd_by_appver uses existing get_cred_config utility""" + mock_get_creds.return_value = { + 'aws-creds': {'type': 'usernamePassword', 'data': {'username': 'key', 'password': 'secret'}} + } + mock_app_def = MagicMock() + mock_get_appdef.return_value = mock_app_def + + def capture_run(coro): + return ("http://sd-url", ("repo", "/tmp/sd.json")) + + with patch('handle_sd.asyncio.run', side_effect=capture_run): + with patch('handle_sd.open', create=True) as mock_open: + mock_open.return_value.__enter__.return_value.read.return_value = '{"applications": []}' + + try: + env = Environment("/test/path", "test-cluster", "test-env") + download_sd_by_appver("test-app", "1.0.0", MagicMock(), env) + except: + pass + + assert mock_get_creds.called From 4396d18aeaaf743f0d234a70f6a03d057b1a7ed0 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Tue, 13 Jan 2026 17:21:20 +0530 Subject: [PATCH 26/48] fix: adding test cases for changes 5 --- scripts/build_env/env_template/process_env_template.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/build_env/env_template/process_env_template.py b/scripts/build_env/env_template/process_env_template.py index e9924f727..8e6c36562 100644 --- a/scripts/build_env/env_template/process_env_template.py +++ b/scripts/build_env/env_template/process_env_template.py @@ -58,10 +58,11 @@ def download_artifact_new_logic(env_definition: dict) -> str: app_name, app_version = parse_artifact_appver(env_definition) app_def = load_artifact_definition(app_name) cred = get_registry_creds(app_def.registry) + env_creds = get_cred_config() template_url = None resolved_version = app_version - dd_artifact_info = asyncio.run(artifact.check_artifact_async(app_def, FileExtension.JSON, app_version)) + dd_artifact_info = asyncio.run(artifact.check_artifact_async(app_def, FileExtension.JSON, app_version, env_creds=env_creds)) if dd_artifact_info: logger.info("Loading environment template artifact info from deployment descriptor...") dd_url, dd_repo = dd_artifact_info @@ -81,7 +82,7 @@ def download_artifact_new_logic(env_definition: dict) -> str: else: logger.info("Loading environment template artifact from zip directly...") group_id, artifact_id, version = app_def.group_id, app_def.artifact_id, app_version - artifact_info = asyncio.run(artifact.check_artifact_async(app_def, FileExtension.ZIP, app_version)) + artifact_info = asyncio.run(artifact.check_artifact_async(app_def, FileExtension.ZIP, app_version, env_creds=env_creds)) if artifact_info: template_url, _ = artifact_info if "-SNAPSHOT" in app_version: From 468fc26b6b56bd0065948f8bbc9878e521efa57d Mon Sep 17 00:00:00 2001 From: BackendBits Date: Wed, 14 Jan 2026 13:18:49 +0530 Subject: [PATCH 27/48] fix: Added minor changes --- .../artifact_searcher/artifact.py | 40 +++++++++++++++---- .../artifact_searcher/cloud_auth_helper.py | 2 + 2 files changed, 35 insertions(+), 7 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index 3e4e9b275..e2a8b23b3 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -13,10 +13,21 @@ import aiohttp import requests from aiohttp import BasicAuth +from requests.auth import HTTPBasicAuth + from artifact_searcher.utils.constants import DEFAULT_REQUEST_TIMEOUT, TCP_CONNECTION_LIMIT, METADATA_XML from artifact_searcher.utils.models import Registry, Application, FileExtension, Credentials, ArtifactInfo from envgenehelper import logger -from requests.auth import HTTPBasicAuth + +try: + from qubership_pipelines_common_library.v1.maven_client import Artifact as MavenArtifact +except ImportError: + MavenArtifact = None + +try: + from artifact_searcher.cloud_auth_helper import CloudAuthHelper +except ImportError: + CloudAuthHelper = None WORKSPACE = os.getenv("WORKSPACE", Path(tempfile.gettempdir()) / "zips") @@ -320,15 +331,24 @@ async def check_artifact_async( async def _check_artifact_v2_async(app: Application, artifact_extension: FileExtension, version: str, env_creds: Optional[dict]) -> Optional[tuple[str, tuple[str, str]]]: + """Resolve and download artifacts using RegDef V2 cloud configuration. + + Uses CloudAuthHelper and the shared Maven client to search and download + artifacts from cloud-backed registries (AWS, GCP, Artifactory, Nexus). + Falls back to the V1 HTTP-based logic when configuration or credentials + are missing, or when any unrecoverable error occurs. + """ if not getattr(app.registry.maven_config, 'auth_config', None): logger.error(f"V2 fallback for '{app.name}': Registry '{app.registry.name}' version 2.0 missing maven_config.authConfig") return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") - try: - from artifact_searcher.cloud_auth_helper import CloudAuthHelper - from qubership_pipelines_common_library.v1.maven_client import Artifact as MavenArtifact - except ImportError as e: - logger.error(f"V2 fallback for '{app.name}': Missing required libraries - {e}") + if CloudAuthHelper is None or MavenArtifact is None: + missing = [] + if CloudAuthHelper is None: + missing.append("artifact_searcher.cloud_auth_helper") + if MavenArtifact is None: + missing.append("qubership_pipelines_common_library.v1.maven_client.Artifact") + logger.error(f"V2 fallback for '{app.name}': Missing required libraries - {', '.join(missing)}") return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") auth_config = CloudAuthHelper.resolve_auth_config(app.registry, "maven") @@ -446,6 +466,13 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt async def _v2_download_with_fallback(searcher, url: str, local_path: str, auth_config, registry: Registry, env_creds: Optional[dict]) -> bool: + """Download artifact using MavenArtifactSearcher with HTTP fallback. + + Attempts to download via the configured MavenArtifactSearcher first, + bounded by V2_DOWNLOAD_TIMEOUT. For supported providers (GCP, Artifactory, + Nexus), falls back to a direct HTTP GET when the searcher-based download + fails, optionally adding GCP access tokens to the request. + """ loop = asyncio.get_running_loop() try: @@ -463,7 +490,6 @@ async def _v2_download_with_fallback(searcher, url: str, local_path: str, auth_c return False try: - from artifact_searcher.cloud_auth_helper import CloudAuthHelper headers = {} if auth_config.provider == "gcp": sa_json = CloudAuthHelper.get_gcp_credentials_from_registry(registry, env_creds) diff --git a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py index 3ec310dd3..0e13ffe3b 100644 --- a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py +++ b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py @@ -177,6 +177,7 @@ def create_maven_searcher(registry: Registry, env_creds: Optional[Dict[str, dict @staticmethod def _configure_aws(searcher: 'MavenArtifactSearcher', auth_config: AuthConfig, creds: dict, registry_url: str) -> 'MavenArtifactSearcher': + """Configure MavenArtifactSearcher for AWS CodeArtifact using username/password creds.""" if not auth_config.aws_domain: raise ValueError("AWS auth requires aws_domain in authConfig") region = CloudAuthHelper._extract_region(registry_url, auth_config) @@ -193,6 +194,7 @@ def _configure_aws(searcher: 'MavenArtifactSearcher', auth_config: AuthConfig, @staticmethod def _configure_gcp(searcher: 'MavenArtifactSearcher', auth_config: AuthConfig, creds: dict, registry_url: str) -> 'MavenArtifactSearcher': + """Configure MavenArtifactSearcher for GCP Artifact Registry using service account JSON.""" if auth_config.auth_method != "service_account": raise ValueError(f"GCP auth_method '{auth_config.auth_method}' not supported") if not auth_config.gcp_reg_project: From 34eb1321ee8e0d1b41bd7b67d7b4c6935dad53a3 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Wed, 14 Jan 2026 13:44:01 +0530 Subject: [PATCH 28/48] fix: Added minor changes 2 --- python/artifact-searcher/artifact_searcher/artifact.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index e2a8b23b3..143418e69 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -356,7 +356,6 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt logger.error(f"V2 fallback for '{app.name}': Could not resolve authConfig for registry '{app.registry.name}'") return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") - # Note: provider is required in RegDef v2 and validated in cloud_auth_helper # AWS and GCP require credentials; Artifactory/Nexus can work with anonymous access if auth_config.provider in ["aws", "gcp"]: @@ -529,7 +528,7 @@ async def _check_artifact_v1_async( fixed_domain = convert_nexus_repo_url_to_index_view(original_domain) if fixed_domain != original_domain: logger.info(f"Retrying artifact check with edited domain: {fixed_domain}") - result = await _attempt_check(app, version, artifact_extension, fixed_domain, cred, classifier) # Now works + result = await _attempt_check(app, version, artifact_extension, fixed_domain, cred, classifier) if result is not None: return result else: From e628c52d8dae6bc16b1ac9ee1133b4cdf839c607 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Fri, 16 Jan 2026 17:09:30 +0530 Subject: [PATCH 29/48] fix: Added minor changes 3 --- .../artifact_searcher/artifact.py | 48 ++++-- .../artifact_searcher/test_artifact.py | 157 ++++++++++++++++++ 2 files changed, 191 insertions(+), 14 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index 143418e69..c59a6726c 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -98,7 +98,8 @@ async def resolve_snapshot_version_async( stop_snapshot_event_for_others.set() logger.info( f"[Task {task_id}] [Application: {app.name}: {version}] - Successfully fetched maven-metadata.xml: {metadata_url}") - return resolved_version, task_id + return resolved_version, task_id + return None except Exception as e: logger.warning( f"[Task {task_id}] [Application: {app.name}: {version}] - Error resolving snapshot version from {metadata_url}: {e}") @@ -113,21 +114,40 @@ def _parse_snapshot_version( classifier: str = "" ) -> str | None: root = ET.fromstring(content) + + # Try new-style first (Maven 3+) snapshot_versions = root.findall(".//snapshotVersions/snapshotVersion") - if not snapshot_versions: - logger.warning(f"[Application: {app.name}: {version}] - No found") - return - - for node in snapshot_versions: - node_classifier = node.findtext("classifier", default="") - node_extension = node.findtext("extension", default="") - value = node.findtext("value") - if node_classifier == classifier and node_extension == extension.value: + if snapshot_versions: + for node in snapshot_versions: + node_classifier = node.findtext("classifier", default="") + node_extension = node.findtext("extension", default="") + value = node.findtext("value") + if node_classifier == classifier and node_extension == extension.value: + logger.info( + f"[Task {task_id}] [Application: {app.name}: {version}] - Resolved snapshot version '{value}'") + return value + logger.warning(f"[Task {task_id}] [Application: {app.name}: {version}] - No matching snapshotVersion found") + return None + + # Fallback to old-style metadata (Maven 2 / some Nexus repos) + snapshot_node = root.find(".//snapshot") + if snapshot_node is not None: + timestamp = snapshot_node.findtext("timestamp") + build_number = snapshot_node.findtext("buildNumber") + + if timestamp and build_number: + # Convert timestamp from "yyyyMMdd.HHmmss" format and build timestamped version + base_version = version.replace("-SNAPSHOT", "") + resolved = f"{base_version}-{timestamp}-{build_number}" logger.info( - f"[Task {task_id}] [Application: {app.name}: {version}] - Resolved snapshot version '{value}'") - return value - - logger.warning(f"[Task {task_id}] [Application: {app.name}: {version}] - No matching snapshotVersion found") + f"[Task {task_id}] [Application: {app.name}: {version}] - Resolved snapshot version '{resolved}' from old-style metadata") + return resolved + + logger.warning(f"[Task {task_id}] [Application: {app.name}: {version}] - found but missing timestamp or buildNumber") + return None + + logger.warning(f"[Application: {app.name}: {version}] - No or found in metadata") + return None def version_to_folder_name(version: str) -> str: diff --git a/python/artifact-searcher/artifact_searcher/test_artifact.py b/python/artifact-searcher/artifact_searcher/test_artifact.py index 60ce330fd..4c6870f41 100644 --- a/python/artifact-searcher/artifact_searcher/test_artifact.py +++ b/python/artifact-searcher/artifact_searcher/test_artifact.py @@ -552,3 +552,160 @@ def test_parse_snapshot_version_empty_snapshot_versions(): result = _parse_snapshot_version(metadata_xml, app, 1, models.FileExtension.JSON, "1.0.0-SNAPSHOT") assert result is None + + +def test_parse_snapshot_version_old_style_metadata(): + """Test _parse_snapshot_version supports old-style metadata (Maven 2 format)""" + metadata_xml = """ + + com.netcracker.cloud.code2prod.deployment-descriptor + c2p-test-sd-1 + feature-sd_public_cloud_registry_testing-SNAPSHOT + + + 20260102.092159 + 1 + + 20260102092159 + + + """ + + dummy_registry = models.Registry( + name="dummy", + maven_config=models.MavenConfig( + target_snapshot="snapshots", + target_staging="staging", + target_release="releases", + repository_domain_name="http://dummy.repo/" + ), + docker_config=models.DockerConfig() + ) + app = models.Application( + name="test-app", + artifact_id="test-artifact", + group_id="com.test", + registry=dummy_registry, + solution_descriptor=False, + ) + + result = _parse_snapshot_version( + metadata_xml, app, 1, models.FileExtension.JSON, + "feature-sd_public_cloud_registry_testing-SNAPSHOT" + ) + + assert result == "feature-sd_public_cloud_registry_testing-20260102.092159-1" + + +def test_parse_snapshot_version_old_style_missing_timestamp(): + """Test _parse_snapshot_version returns None when has no timestamp""" + metadata_xml = """ + + + + 1 + + + + """ + + dummy_registry = models.Registry( + name="dummy", + maven_config=models.MavenConfig( + target_snapshot="snapshots", + target_staging="staging", + target_release="releases", + repository_domain_name="http://dummy.repo/" + ), + docker_config=models.DockerConfig() + ) + app = models.Application( + name="test-app", + artifact_id="test-artifact", + group_id="com.test", + registry=dummy_registry, + solution_descriptor=False, + ) + + result = _parse_snapshot_version(metadata_xml, app, 1, models.FileExtension.JSON, "1.0.0-SNAPSHOT") + + assert result is None + + +def test_parse_snapshot_version_old_style_missing_buildnumber(): + """Test _parse_snapshot_version returns None when has no buildNumber""" + metadata_xml = """ + + + + 20260102.092159 + + + + """ + + dummy_registry = models.Registry( + name="dummy", + maven_config=models.MavenConfig( + target_snapshot="snapshots", + target_staging="staging", + target_release="releases", + repository_domain_name="http://dummy.repo/" + ), + docker_config=models.DockerConfig() + ) + app = models.Application( + name="test-app", + artifact_id="test-artifact", + group_id="com.test", + registry=dummy_registry, + solution_descriptor=False, + ) + + result = _parse_snapshot_version(metadata_xml, app, 1, models.FileExtension.JSON, "1.0.0-SNAPSHOT") + + assert result is None + + +def test_parse_snapshot_version_prefers_new_style_over_old(): + """Test _parse_snapshot_version prefers new-style when both are present""" + metadata_xml = """ + + + + 20240101.120000 + 99 + + + + + json + 1.0.0-20240702.123456-1 + + + + + """ + + dummy_registry = models.Registry( + name="dummy", + maven_config=models.MavenConfig( + target_snapshot="snapshots", + target_staging="staging", + target_release="releases", + repository_domain_name="http://dummy.repo/" + ), + docker_config=models.DockerConfig() + ) + app = models.Application( + name="test-app", + artifact_id="test-artifact", + group_id="com.test", + registry=dummy_registry, + solution_descriptor=False, + ) + + result = _parse_snapshot_version(metadata_xml, app, 1, models.FileExtension.JSON, "1.0.0-SNAPSHOT") + + # Should use new-style value, not old-style + assert result == "1.0.0-20240702.123456-1" From ac328cdac423baa2cf509fef43bd30c4bf29df44 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Tue, 27 Jan 2026 12:54:42 +0530 Subject: [PATCH 30/48] fix: redownload issue --- .../build_env/env_template/process_env_template.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/scripts/build_env/env_template/process_env_template.py b/scripts/build_env/env_template/process_env_template.py index 6f50571d0..0a90f8808 100644 --- a/scripts/build_env/env_template/process_env_template.py +++ b/scripts/build_env/env_template/process_env_template.py @@ -84,7 +84,18 @@ def download_artifact_new_logic(env_definition: dict) -> str: group_id, artifact_id, version = app_def.group_id, app_def.artifact_id, app_version artifact_info = asyncio.run(artifact.check_artifact_async(app_def, FileExtension.ZIP, app_version, cred=cred, env_creds=env_creds)) if artifact_info: - template_url, _ = artifact_info + template_url, repo_info = artifact_info + # Check if v2 already downloaded the artifact + if isinstance(repo_info, tuple) and len(repo_info) == 2 and repo_info[0] == "v2_downloaded": + local_path = repo_info[1] + logger.info(f"V2 artifact already downloaded at: {local_path}") + import shutil + shutil.copy(local_path, artifact_dest) + logger.info(f"Copied V2 artifact to: {artifact_dest}") + if "-SNAPSHOT" in app_version: + resolved_version = extract_snapshot_version(template_url, app_version) + unpack_archive(artifact_dest, build_env_path) + return resolved_version if "-SNAPSHOT" in app_version: resolved_version = extract_snapshot_version(template_url, app_version) if not template_url: From 338da3fa0c819e02e48797252773bcbe2e8d0420 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Tue, 27 Jan 2026 20:45:46 +0530 Subject: [PATCH 31/48] fix: added comments --- .../artifact_searcher/artifact.py | 2 +- .../env_template/process_env_template.py | 40 ++++++++++++++----- 2 files changed, 32 insertions(+), 10 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index 95a035bf3..ca653cdea 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -115,7 +115,7 @@ def _parse_snapshot_version( ) -> str | None: root = ET.fromstring(content) - # Try new-style first (Maven 3+) + # Trying new-style first (Maven 3+) if its not found then its switched to Old Style snapshot_versions = root.findall(".//snapshotVersions/snapshotVersion") if snapshot_versions: for node in snapshot_versions: diff --git a/scripts/build_env/env_template/process_env_template.py b/scripts/build_env/env_template/process_env_template.py index 0a90f8808..0e5b03c63 100644 --- a/scripts/build_env/env_template/process_env_template.py +++ b/scripts/build_env/env_template/process_env_template.py @@ -1,7 +1,9 @@ import asyncio import os +import shutil import tempfile from pathlib import Path +from typing import Optional from artifact_searcher import artifact from artifact_searcher.utils.models import FileExtension, Credentials, Registry, Application @@ -16,13 +18,15 @@ build_env_path = "/build_env" -def parse_artifact_appver(env_definition: dict) -> [str, str]: +def parse_artifact_appver(env_definition: dict) -> tuple[str, str]: + """Extract artifact name and version from env_definition.yml.""" artifact_appver = env_definition['envTemplate'].get('artifact', '') logger.info(f"Environment template artifact version: {artifact_appver}") return artifact_appver.split(':') -def get_registry_creds(registry: Registry) -> Credentials: +def get_registry_creds(registry: Registry) -> Optional[Credentials]: + """Resolve V1 registry credentials. Returns None for registries without credentials.""" cred_config = render_creds() cred_id = registry.credentials_id if cred_id: @@ -36,11 +40,15 @@ def get_registry_creds(registry: Registry) -> Credentials: def parse_maven_coord_from_dd(dd_config: dict) -> tuple[str, str, str]: + """Extract Maven coordinates (groupId:artifactId:version) from deployment descriptor.""" artifact_str = dd_config['configurations'][0]['artifacts'][0].get('id') return artifact_str.split(':') def extract_snapshot_version(url: str, snapshot_version: str) -> str: + """Extract timestamped SNAPSHOT version from artifact URL. + Converts 'feature-branch-SNAPSHOT' to 'feature-branch-20250113.102430-45'. + """ base = snapshot_version.replace("-SNAPSHOT", "") filename = url.split("/")[-1] name = filename.rsplit(".", 1)[0] @@ -48,20 +56,29 @@ def extract_snapshot_version(url: str, snapshot_version: str) -> str: return name[pos:] -# logic downloading template by artifact definition def download_artifact_new_logic(env_definition: dict) -> str: + """Download environment template using artifact definition (V2-aware). + + Supports both V1 and V2 registries. For V2 cloud registries (AWS/GCP/Artifactory/Nexus), + uses CloudAuthHelper and MavenArtifactSearcher with automatic fallback to V1. + + Returns: + Resolved version string (with SNAPSHOT timestamp if applicable) + """ app_name, app_version = parse_artifact_appver(env_definition) + # Load artifact definition and credentials base_dir = getenv_with_error('CI_PROJECT_DIR') artifact_path = getAppDefinitionPath(base_dir, app_name) if not artifact_path: raise FileNotFoundError(f"No artifact definition file found for {app_name} with .yaml or .yml extension") app_def = Application.model_validate(openYaml(artifact_path)) - cred = get_registry_creds(app_def.registry) - env_creds = get_cred_config() + cred = get_registry_creds(app_def.registry) # V1 credentials + env_creds = get_cred_config() # V2 credentials (Jenkins credential store) template_url = None resolved_version = app_version + # Try deployment descriptor first (multi-artifact solutions) dd_artifact_info = asyncio.run(artifact.check_artifact_async(app_def, FileExtension.JSON, app_version, cred=cred, env_creds=env_creds)) if dd_artifact_info: logger.info("Loading environment template artifact info from deployment descriptor...") @@ -80,17 +97,17 @@ def download_artifact_new_logic(env_definition: dict) -> str: repo_url = dd_config.get("configurations", [{}])[0].get("maven_repository") or dd_repo template_url = artifact.check_artifact(repo_url, group_id, artifact_id, version, FileExtension.ZIP) else: + # No deployment descriptor, download ZIP directly logger.info("Loading environment template artifact from zip directly...") group_id, artifact_id, version = app_def.group_id, app_def.artifact_id, app_version artifact_info = asyncio.run(artifact.check_artifact_async(app_def, FileExtension.ZIP, app_version, cred=cred, env_creds=env_creds)) if artifact_info: template_url, repo_info = artifact_info - # Check if v2 already downloaded the artifact + # V2 optimization: artifact already downloaded by MavenArtifactSearcher if isinstance(repo_info, tuple) and len(repo_info) == 2 and repo_info[0] == "v2_downloaded": local_path = repo_info[1] logger.info(f"V2 artifact already downloaded at: {local_path}") - import shutil - shutil.copy(local_path, artifact_dest) + shutil.copy(local_path, artifact_dest) # Copy to standard location logger.info(f"Copied V2 artifact to: {artifact_dest}") if "-SNAPSHOT" in app_version: resolved_version = extract_snapshot_version(template_url, app_version) @@ -100,6 +117,8 @@ def download_artifact_new_logic(env_definition: dict) -> str: resolved_version = extract_snapshot_version(template_url, app_version) if not template_url: raise ValueError(f"artifact not found group_id={group_id}, artifact_id={artifact_id}, version={version}") + + # V1 path or V2 fallback: download via HTTP logger.info(f"Environment template url has been resolved: {template_url}") artifact.download(template_url, artifact_dest, cred) unpack_archive(artifact_dest, build_env_path) @@ -107,6 +126,7 @@ def download_artifact_new_logic(env_definition: dict) -> str: def render_creds() -> dict: + """Render credential templates with environment variables.""" cred_config = get_cred_config() context = Context() context.env_vars.update(dict(os.environ)) @@ -115,7 +135,7 @@ def render_creds() -> dict: return rendered -# logic downloading template by exact coordinates and repo, deprecated +# downloading template by exact coordinates and repo, deprecated def download_artifact_old_logic(env_definition: dict, project_dir: str) -> str: template_artifact = env_definition['envTemplate']['templateArtifact'] artifact_info = template_artifact['artifact'] @@ -165,6 +185,7 @@ def download_artifact_old_logic(env_definition: dict, project_dir: str) -> str: def process_env_template() -> str: + """Main entry point for template download. Routes to new or old logic based on env_definition format.""" env_template_test = os.getenv("ENV_TEMPLATE_TEST", "").lower() == "true" if env_template_test: run_env_test_setup() @@ -174,6 +195,7 @@ def process_env_template() -> str: env_dir = Path(f"{project_dir}/environments/{cluster}/{environment}") env_definition = getEnvDefinition(env_dir) + # New format: uses artifact definitions (V2-aware) if 'artifact' in env_definition.get('envTemplate', {}): logger.info("Use template downloading new logic") return download_artifact_new_logic(env_definition) From b1aa3f87a80a6137d68013099b389f2a9d1c7fc4 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Thu, 29 Jan 2026 21:01:14 +0530 Subject: [PATCH 32/48] fix: nexus v2 issues --- .../artifact_searcher/artifact.py | 199 +++++++++++++----- .../artifact_searcher/cloud_auth_helper.py | 122 +++++++++-- 2 files changed, 245 insertions(+), 76 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index ca653cdea..b2fecf2b7 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -143,11 +143,11 @@ def _parse_snapshot_version( f"[Task {task_id}] [Application: {app.name}: {version}] - Resolved snapshot version '{resolved}' from old-style metadata") return resolved - logger.warning(f"[Task {task_id}] [Application: {app.name}: {version}] - found but missing timestamp or buildNumber") - return None + logger.warning(f"[Task {task_id}] [Application: {app.name}: {version}] - found but missing timestamp or buildNumber, will try direct SNAPSHOT filename") + return version - logger.warning(f"[Application: {app.name}: {version}] - No or found in metadata") - return None + logger.warning(f"[Application: {app.name}: {version}] - No or found in metadata, will try direct SNAPSHOT filename") + return version def version_to_folder_name(version: str) -> str: @@ -261,6 +261,15 @@ async def check_artifact_by_full_url_async( return full_url, repo logger.warning( f"[Task {task_id}] [Application: {app.name}: {version}] - Artifact not found at URL {full_url}, status: {response.status}") + + # Fallback: Try direct -SNAPSHOT filename if resolved version failed (Nexus compatibility) + if version.endswith("-SNAPSHOT") and resolved_version != version: + fallback_url = create_full_url(app, version, repo_value, artifact_extension, classifier) + async with session.head(fallback_url) as fallback_response: + if fallback_response.status == 200: + stop_artifact_event.set() + logger.info(f"[Task {task_id}] [Application: {app.name}: {version}] - Artifact found with direct SNAPSHOT fallback: {fallback_url}") + return fallback_url, repo except Exception as e: logger.warning( f"[Task {task_id}] [Application: {app.name}: {version}] - Error checking artifact URL {full_url}: {e}") @@ -292,6 +301,7 @@ async def _attempt_check( classifier: str = "" ) -> Optional[tuple[str, tuple[str, str]]]: repos_dict = get_repo_value_pointer_dict(app.registry) + original_domain = app.registry.maven_config.repository_domain_name if registry_url: app.registry.maven_config.repository_domain_name = registry_url @@ -299,29 +309,34 @@ async def _attempt_check( timeout = aiohttp.ClientTimeout(total=DEFAULT_REQUEST_TIMEOUT) stop_snapshot_event_for_others = asyncio.Event() stop_artifact_event = asyncio.Event() - async with aiohttp.ClientSession(timeout=timeout, auth=auth) as session: - async with asyncio.TaskGroup() as tg: - tasks = [ - tg.create_task( - check_artifact_by_full_url_async( - app, - version, - repo, - artifact_extension, - stop_snapshot_event_for_others, - stop_artifact_event, - session, - i, - classifier + try: + async with aiohttp.ClientSession(timeout=timeout, auth=auth) as session: + async with asyncio.TaskGroup() as tg: + tasks = [ + tg.create_task( + check_artifact_by_full_url_async( + app, + version, + repo, + artifact_extension, + stop_snapshot_event_for_others, + stop_artifact_event, + session, + i, + classifier + ) ) - ) - for i, repo in enumerate(repos_dict.items()) - ] + for i, repo in enumerate(repos_dict.items()) + ] - for task in tasks: - result = task.result() - if result is not None: - return result + for task in tasks: + result = task.result() + if result is not None: + return result + finally: + # Always restore original repository domain to avoid persisting browse-index URL + if registry_url: + app.registry.maven_config.repository_domain_name = original_domain async def check_artifact_async( @@ -351,17 +366,24 @@ async def check_artifact_async( async def _check_artifact_v2_async(app: Application, artifact_extension: FileExtension, version: str, env_creds: Optional[dict]) -> Optional[tuple[str, tuple[str, str]]]: - """Resolve and download artifacts using RegDef V2 cloud configuration. + """Search for and download artifacts using the V2 cloud registry approach. - Uses CloudAuthHelper and the shared Maven client to search and download - artifacts from cloud-backed registries (AWS, GCP, Artifactory, Nexus). - Falls back to the V1 HTTP-based logic when configuration or credentials - are missing, or when any unrecoverable error occurs. + This is the modern way to find artifacts in cloud registries like AWS CodeArtifact, + GCP Artifact Registry, Artifactory, and Nexus. It uses the shared MavenArtifactSearcher + library to talk to these different registry types in a unified way. + + If anything goes wrong (missing config, wrong credentials, search fails), we automatically + fall back to the older V1 method which tries direct HTTP URLs. + + Returns: + URL to the artifact and download location info, or None if not found """ + # V2 requires authConfig to know how to authenticate with the registry if not getattr(app.registry.maven_config, 'auth_config', None): logger.error(f"V2 fallback for '{app.name}': Registry '{app.registry.name}' version 2.0 missing maven_config.authConfig") return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") + # Check if required libraries are available (they're optional dependencies) if CloudAuthHelper is None or MavenArtifact is None: missing = [] if CloudAuthHelper is None: @@ -371,13 +393,15 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt logger.error(f"V2 fallback for '{app.name}': Missing required libraries - {', '.join(missing)}") return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") + # Get authentication settings from the registry definition auth_config = CloudAuthHelper.resolve_auth_config(app.registry, "maven") if not auth_config: logger.error(f"V2 fallback for '{app.name}': Could not resolve authConfig for registry '{app.registry.name}'") return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") - - # AWS and GCP require credentials; Artifactory/Nexus can work with anonymous access + # Some cloud providers need credentials, others allow anonymous access + # AWS and GCP: must have credentials (their APIs don't allow anonymous) + # Artifactory/Nexus: can work without credentials if repository allows public read if auth_config.provider in ["aws", "gcp"]: if not env_creds: logger.error(f"V2 fallback for '{app.name}': {auth_config.provider} requires credentials but env_creds is empty") @@ -390,6 +414,60 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt logger.info(f"V2 search for {app.name} with provider={auth_config.provider}") loop = asyncio.get_running_loop() + # Handle SNAPSHOT versions (development/pre-release builds) + # SNAPSHOTs work differently: we search using the base version like "1.0-SNAPSHOT" + # but download using the actual timestamped version like "1.0-20260129.071325-2" + # This is because Nexus/Artifactory index by base version for searching + resolved_version = version # This will become the timestamped version if it's a SNAPSHOT + search_version = version # Always use base version for searching + + if version.endswith("-SNAPSHOT"): + logger.info(f"Resolving SNAPSHOT version for verification: {app.artifact_id}:{version}") + + # Need credentials to fetch maven-metadata.xml (even if repository allows anonymous download) + cred = None + if auth_config.credentials_id and env_creds: + cred_data = env_creds.get(auth_config.credentials_id) + if cred_data and cred_data.get('username'): + from artifact_searcher.utils.models import Credentials + cred = Credentials(username=cred_data['username'], password=cred_data['password']) + + auth = BasicAuth(login=cred.username, password=cred.password) if cred else None + timeout = aiohttp.ClientTimeout(total=DEFAULT_REQUEST_TIMEOUT) + + # Try to resolve the SNAPSHOT to its actual timestamped version + # We check multiple repositories in order (snapshot repo, then public/group repos) + async with aiohttp.ClientSession(timeout=timeout, auth=auth) as session: + repos_dict = get_repo_value_pointer_dict(app.registry) + + # Loop through configured repositories until we find maven-metadata.xml + for repo_value, repo_pointer in repos_dict.items(): + if not repo_value: + continue + + try: + # This fetches maven-metadata.xml and extracts the timestamped version + result = await resolve_snapshot_version_async( + session, app, version, repo_value, 0, + asyncio.Event(), asyncio.Event(), + artifact_extension, classifier="" + ) + + if result: + resolved_version = result[0] + logger.info(f"V2 resolved SNAPSHOT: {version} -> {resolved_version}") + break + except Exception as e: + logger.debug(f"Failed to resolve SNAPSHOT from {repo_pointer}: {e}") + continue + + # If we couldn't resolve the SNAPSHOT version, fall back to V1 + if resolved_version == version: + logger.warning(f"Could not resolve SNAPSHOT, falling back to V1") + return await _check_artifact_v1_async(app, artifact_extension, version, cred=cred, classifier="") + + # Create the searcher object that knows how to talk to this specific registry type + # This handles AWS, GCP, Artifactory, and Nexus in a unified way try: searcher = await loop.run_in_executor(None, CloudAuthHelper.create_maven_searcher, app.registry, env_creds) except KeyError as e: @@ -402,29 +480,38 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt logger.error(f"V2 fallback for '{app.name}': Failed to create searcher - {e}", exc_info=True) return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") - artifact_string = f"{app.group_id}:{app.artifact_id}:{version}" + # Build the artifact identifier for searching + # Important: We search with the base SNAPSHOT version (e.g., "1.0-SNAPSHOT") + # because that's how artifacts are indexed in Nexus/Artifactory search APIs + artifact_string = f"{app.group_id}:{app.artifact_id}:{search_version}" maven_artifact = MavenArtifact.from_string(artifact_string) maven_artifact.extension = artifact_extension.value + logger.info(f"V2 searching: {artifact_string}.{artifact_extension.value}") + if resolved_version != search_version: + logger.info(f"V2 resolved version for download: {resolved_version}") + max_retries = 2 last_error = None local_path = None maven_url = None + # Try up to 2 times in case of temporary network issues or expired credentials for attempt in range(max_retries): try: if attempt > 0: - logger.info(f"Retry {attempt} for {app.name} after 5s delay...") + # On retry, recreate the searcher in case credentials expired + logger.info(f"V2 retry {attempt} for {app.name} after 5s delay...") await asyncio.sleep(5) searcher = await loop.run_in_executor(None, CloudAuthHelper.create_maven_searcher, app.registry, env_creds) - # Wrap find_artifact_urls with timeout to prevent indefinite hangs + # Search for the artifact with a timeout to avoid hanging forever urls = await asyncio.wait_for( loop.run_in_executor(None, partial(searcher.find_artifact_urls, artifact=maven_artifact)), timeout=V2_SEARCH_TIMEOUT ) if not urls: - logger.warning(f"No artifacts found for {app.artifact_id}:{version}") + logger.warning(f"V2 search returned no artifacts for {app.artifact_id}:{version}") return None maven_url = urls[0] @@ -453,31 +540,44 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt last_error = e error_str = str(e).lower() - # Log registry response if available (for HTTP errors) + # Special case: Nexus search API returns 404 when artifact isn't in the search index yet + # (it takes time for newly uploaded artifacts to appear in search) + if "404" in error_str and "search request" in error_str: + logger.info(f"V2 search index miss for {app.name} - artifact may not be indexed in Nexus search DB") + logger.info(f"Falling back to V1 direct HTTP lookup") + return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") + + # Try to extract useful HTTP error details for debugging if hasattr(e, 'response'): try: - response_text = e.response.text[:500] if hasattr(e.response, 'text') else str(e.response)[:500] - logger.error(f"Registry response for {app.name}: HTTP {getattr(e.response, 'status_code', 'N/A')}") - logger.error(f"Response body (first 500 chars): {response_text}") + status = getattr(e.response, 'status_code', 'N/A') + url = getattr(e.response, 'url', 'N/A') + logger.error(f"V2 HTTP {status} from {url}") except Exception: pass + # Some errors are temporary and worth retrying: + # 401/unauthorized: credentials might need refresh + # timeout: network might be slow if attempt < max_retries - 1 and any(x in error_str for x in ["401", "unauthorized", "forbidden", "expired", "timeout"]): - logger.warning(f"V2 error for {app.name}: {e}, retrying...") + logger.warning(f"V2 transient error for {app.name}, retrying: {e}") continue - logger.error(f"V2 fallback for '{app.name}': Failed after {attempt + 1} attempt(s) - {e}") + logger.error(f"V2 error for '{app.name}': {e}") return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") else: logger.error(f"V2 fallback for '{app.name}': All {max_retries} attempts exhausted - {last_error}") return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") + # AWS CodeArtifact returns resource IDs instead of direct URLs + # We need to construct the full download URL ourselves if auth_config.provider == "aws": registry_domain = app.registry.maven_config.repository_domain_name folder_name = version_to_folder_name(version) repo_path = app.registry.maven_config.target_snapshot if folder_name.endswith("-SNAPSHOT") else app.registry.maven_config.target_release full_url = f"{registry_domain.rstrip('/')}/{repo_path.rstrip('/')}/{maven_url}" else: + # Other providers (GCP, Artifactory, Nexus) return ready-to-use URLs full_url = maven_url return full_url, ("v2_downloaded", local_path) @@ -541,19 +641,8 @@ async def _check_artifact_v1_async( if result is not None: return result - if not app.registry.maven_config.is_nexus: - return result - - original_domain = app.registry.maven_config.repository_domain_name - fixed_domain = convert_nexus_repo_url_to_index_view(original_domain) - if fixed_domain != original_domain: - logger.info(f"Retrying artifact check with edited domain: {fixed_domain}") - result = await _attempt_check(app, version, artifact_extension, fixed_domain, cred, classifier) - if result is not None: - return result - else: - logger.debug("Domain is same after editing, skipping retry") - + # Browse URL retry removed - browse endpoints don't support downloads + # V1 fallback will rely on V2 search or direct repository URLs only logger.warning("Artifact not found") return None diff --git a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py index 0e13ffe3b..28cef0b45 100644 --- a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py +++ b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py @@ -24,11 +24,27 @@ class CloudAuthHelper: - """Helper for V2 cloud registry authentication (AWS, GCP, Artifactory, Nexus).""" + """Helper to connect to cloud registries using V2 authentication. + + This class knows how to authenticate with different types of artifact registries: + - AWS CodeArtifact (uses AWS access keys) + - GCP Artifact Registry (uses service account JSON) + - Artifactory (uses username/password) + - Nexus (uses username/password or anonymous access) + + It creates a MavenArtifactSearcher configured for each specific provider. + """ @staticmethod def resolve_auth_config(registry: Registry, artifact_type: str = "maven") -> Optional[AuthConfig]: - """Resolve authConfig from registry maven config reference.""" + """Find the authentication settings for this registry. + + Each registry can have multiple authConfig entries (for different artifact types). + This looks up which authConfig to use based on what the maven_config references. + + Returns: + AuthConfig object with provider and credentials info, or None if not configured + """ if artifact_type != "maven": return None @@ -50,11 +66,17 @@ def resolve_auth_config(registry: Registry, artifact_type: str = "maven") -> Opt @staticmethod def resolve_credentials(auth_config: AuthConfig, env_creds: Optional[Dict[str, dict]]) -> Optional[dict]: - """Resolve credentials from env_creds based on auth_config.credentials_id. + """Get the actual username/password or secrets from the credentials vault. + + The authConfig tells us the credentials ID to look up. This function finds + those credentials in the environment's credential store and extracts them. + + Special handling: + - Empty username/password = anonymous access (returns None) + - Different credential types: usernamePassword, secret (for GCP service accounts) Returns: - dict: Credential data if found and non-anonymous - None: For anonymous access (no credentialsId or empty username/password) + dict with username/password or secret, or None for anonymous access """ cred_id = auth_config.credentials_id if not cred_id: @@ -66,11 +88,12 @@ def resolve_credentials(auth_config: AuthConfig, env_creds: Optional[Dict[str, d cred_entry = env_creds[cred_id] - # Extract credential data from the new structure: {"type": "...", "data": {...}} + # Credentials can be structured as {"type": "usernamePassword", "data": {"username": "..."}} + # or as a flat dict {"username": "...", "password": "..."} cred_type = cred_entry.get("type") if isinstance(cred_entry, dict) else None cred_data = cred_entry.get("data", cred_entry) if isinstance(cred_entry, dict) else cred_entry - # Check for anonymous access (empty username/password for usernamePassword type) + # For Nexus/Artifactory: empty username+password means anonymous/public access if cred_type == "usernamePassword": username = cred_data.get("username", "") password = cred_data.get("password", "") @@ -91,10 +114,12 @@ def resolve_credentials(auth_config: AuthConfig, env_creds: Optional[Dict[str, d logger.info(f"Resolved credentials for '{cred_id}' (type: {cred_type})") - # Validate required fields per provider + # Make sure we have the right credential format for each cloud provider + # AWS needs username (access key ID) and password (secret access key) if auth_config.provider == "aws": if "username" not in creds or "password" not in creds: raise ValueError(f"AWS credentials must have 'username' and 'password'") + # GCP needs a service account JSON file (stored as 'secret') elif auth_config.provider == "gcp" and auth_config.auth_method == "service_account": if "secret" not in creds: raise ValueError(f"GCP service_account credentials must have 'secret'") @@ -103,7 +128,13 @@ def resolve_credentials(auth_config: AuthConfig, env_creds: Optional[Dict[str, d @staticmethod def _extract_repository_name(url: str) -> str: - """Extract repository name from registry URL.""" + """Extract the repository name from a registry URL. + + Different providers have different URL patterns: + - AWS: https://domain.d.codeartifact.region.amazonaws.com/maven/repo-name/ + - GCP: https://region-maven.pkg.dev/project/repo-name/ + - Others: uses last path segment + """ url = url.rstrip("/") # AWS CodeArtifact: .../maven//... if "codeartifact" in url and "/maven/" in url: @@ -120,7 +151,13 @@ def _extract_repository_name(url: str) -> str: @staticmethod def _extract_region(url: str, auth_config: AuthConfig) -> str: - """Extract region from URL or auth_config. Prefers explicit config over URL extraction.""" + """Figure out which cloud region to use. + + Tries these sources in order: + 1. Explicit region in authConfig (if configured) + 2. Extract from URL pattern (e.g., 'us-west-2.amazonaws.com') + 3. Default to 'us-east-1' if can't determine + """ if auth_config.provider == "aws" and auth_config.aws_region: return auth_config.aws_region aws_match = re.search(r'\.([a-z0-9-]+)\.amazonaws\.com', url) @@ -134,10 +171,15 @@ def _extract_region(url: str, auth_config: AuthConfig) -> str: @staticmethod def create_maven_searcher(registry: Registry, env_creds: Optional[Dict[str, dict]]) -> 'MavenArtifactSearcher': - """Create configured MavenArtifactSearcher for the registry provider. + """Create a searcher object that knows how to find artifacts in this registry. + + This is the main entry point for V2 artifact searching. It: + 1. Reads the registry configuration to determine the provider type + 2. Loads the appropriate credentials from the vault + 3. Creates and configures a MavenArtifactSearcher for that specific provider - Provider auto-detection: If auth_config.provider is not specified, it will be - auto-detected from the registry URL. + Returns: + Configured MavenArtifactSearcher ready to search for and download artifacts """ if MavenArtifactSearcher is None: raise ImportError("qubership_pipelines_common_library not available") @@ -157,11 +199,22 @@ def create_maven_searcher(registry: Registry, env_creds: Optional[Dict[str, dict if provider not in ["aws", "gcp", "artifactory", "nexus"]: raise ValueError(f"Unsupported provider: {provider}") - # Resolve credentials (returns None for anonymous access) + # Nexus URLs need special handling: + # Download URLs use: http://nexus/repository/repo-name/... + # Search API uses: http://nexus/service/rest/v1/search/... + # So we need to remove the /repository/ suffix before initializing the searcher + if provider == "nexus" and registry_url.endswith("/repository/"): + registry_url = registry_url[:-len("repository/")] + logger.info(f"Nexus: adjusted registry URL to {registry_url} for search API") + + # Get the credentials (or None if anonymous access is allowed) creds = CloudAuthHelper.resolve_credentials(auth_config, env_creds) + + # Create the base searcher object - provider-specific config comes next searcher = MavenArtifactSearcher(registry_url, params={"timeout": DEFAULT_SEARCHER_TIMEOUT}) - # AWS and GCP require credentials - cannot work anonymously + # Check if anonymous access is allowed for this provider type + # AWS and GCP APIs require authentication - they don't support anonymous access if provider in ["aws", "gcp"] and creds is None: raise ValueError(f"{provider.upper()} requires credentials - anonymous access not supported") @@ -172,12 +225,20 @@ def create_maven_searcher(registry: Registry, env_creds: Optional[Dict[str, dict elif provider == "artifactory": return CloudAuthHelper._configure_artifactory(searcher, creds) else: # nexus - return CloudAuthHelper._configure_nexus(searcher, creds) + return CloudAuthHelper._configure_nexus(searcher, creds, registry) @staticmethod def _configure_aws(searcher: 'MavenArtifactSearcher', auth_config: AuthConfig, creds: dict, registry_url: str) -> 'MavenArtifactSearcher': - """Configure MavenArtifactSearcher for AWS CodeArtifact using username/password creds.""" + """Set up the searcher to work with AWS CodeArtifact. + + AWS needs: + - Access key (stored as username) + - Secret key (stored as password) + - Domain name (from authConfig) + - Region (from authConfig or URL) + - Repository name (from URL) + """ if not auth_config.aws_domain: raise ValueError("AWS auth requires aws_domain in authConfig") region = CloudAuthHelper._extract_region(registry_url, auth_config) @@ -194,7 +255,14 @@ def _configure_aws(searcher: 'MavenArtifactSearcher', auth_config: AuthConfig, @staticmethod def _configure_gcp(searcher: 'MavenArtifactSearcher', auth_config: AuthConfig, creds: dict, registry_url: str) -> 'MavenArtifactSearcher': - """Configure MavenArtifactSearcher for GCP Artifact Registry using service account JSON.""" + """Set up the searcher to work with GCP Artifact Registry. + + GCP needs: + - Service account JSON (stored as secret) + - Project name (from authConfig or URL) + - Region (from URL, like 'us-central1') + - Repository name (from URL) + """ if auth_config.auth_method != "service_account": raise ValueError(f"GCP auth_method '{auth_config.auth_method}' not supported") if not auth_config.gcp_reg_project: @@ -215,7 +283,11 @@ def _configure_gcp(searcher: 'MavenArtifactSearcher', auth_config: AuthConfig, @staticmethod def _configure_artifactory(searcher: 'MavenArtifactSearcher', creds: Optional[dict]) -> 'MavenArtifactSearcher': - """Configure Artifactory authentication. Supports anonymous access if creds is None.""" + """Set up the searcher to work with Artifactory. + + Artifactory is simpler - just username and password. + Can work anonymously if the repository allows public access. + """ if creds is None: logger.info("Configuring Artifactory with anonymous access (no credentials)") return searcher.with_artifactory(username=None, password=None) @@ -226,8 +298,16 @@ def _configure_artifactory(searcher: 'MavenArtifactSearcher', creds: Optional[di ) @staticmethod - def _configure_nexus(searcher: 'MavenArtifactSearcher', creds: Optional[dict]) -> 'MavenArtifactSearcher': - """Configure Nexus authentication. Supports anonymous access if creds is None.""" + def _configure_nexus(searcher: 'MavenArtifactSearcher', creds: Optional[dict], registry: Registry) -> 'MavenArtifactSearcher': + """Set up the searcher to work with Nexus Repository Manager. + + Nexus is simple - just username and password, or anonymous if allowed. + + Important: The MavenArtifactSearcher library searches across ALL repositories + in Nexus (we can't limit to a specific repository). This is a library limitation, + not a bug in our code. Nexus will return results from any repository the user + has access to. + """ if creds is None: logger.info("Configuring Nexus with anonymous access (no credentials)") return searcher.with_nexus(username=None, password=None) From 09142f36bffde039ecaed83e084473fda9c48d87 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Thu, 29 Jan 2026 21:36:44 +0530 Subject: [PATCH 33/48] fix: testcase Failure --- python/artifact-searcher/artifact_searcher/artifact.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index b2fecf2b7..2ff9028ee 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -143,11 +143,11 @@ def _parse_snapshot_version( f"[Task {task_id}] [Application: {app.name}: {version}] - Resolved snapshot version '{resolved}' from old-style metadata") return resolved - logger.warning(f"[Task {task_id}] [Application: {app.name}: {version}] - found but missing timestamp or buildNumber, will try direct SNAPSHOT filename") - return version + logger.warning(f"[Task {task_id}] [Application: {app.name}: {version}] - found but missing timestamp or buildNumber") + return None - logger.warning(f"[Application: {app.name}: {version}] - No or found in metadata, will try direct SNAPSHOT filename") - return version + logger.warning(f"[Application: {app.name}: {version}] - No or found in metadata") + return None def version_to_folder_name(version: str) -> str: From 249c520a239b7da0623dc655145f2989e0a0be6f Mon Sep 17 00:00:00 2001 From: BackendBits Date: Thu, 29 Jan 2026 21:51:49 +0530 Subject: [PATCH 34/48] fix: testcase Failure 2 --- python/artifact-searcher/artifact_searcher/artifact.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index 2ff9028ee..e6e44091d 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -54,6 +54,8 @@ def convert_nexus_repo_url_to_index_view(url: str) -> str: def create_artifact_path(app: Application, version: str, repo: str) -> str: registry_url = app.registry.maven_config.repository_domain_name.rstrip("/") + "/" + if app.registry.maven_config.is_nexus: + registry_url = convert_nexus_repo_url_to_index_view(registry_url) group_id = app.group_id.replace(".", "/") folder = version_to_folder_name(version) return urljoin(registry_url, f"{repo}/{group_id}/{app.artifact_id}/{folder}/") From bab882a181a635e96866271ad33c0be03f685ccc Mon Sep 17 00:00:00 2001 From: BackendBits Date: Fri, 30 Jan 2026 00:28:36 +0530 Subject: [PATCH 35/48] fix: resolve issues in test run after merge --- .../artifact_searcher/artifact.py | 11 +++++------ scripts/build_env/appregdef_render.py | 8 ++++---- scripts/build_env/handle_sd.py | 14 +++++++++++++- 3 files changed, 22 insertions(+), 11 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index e6e44091d..3b99dec4e 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -54,8 +54,6 @@ def convert_nexus_repo_url_to_index_view(url: str) -> str: def create_artifact_path(app: Application, version: str, repo: str) -> str: registry_url = app.registry.maven_config.repository_domain_name.rstrip("/") + "/" - if app.registry.maven_config.is_nexus: - registry_url = convert_nexus_repo_url_to_index_view(registry_url) group_id = app.group_id.replace(".", "/") folder = version_to_folder_name(version) return urljoin(registry_url, f"{repo}/{group_id}/{app.artifact_id}/{folder}/") @@ -219,7 +217,8 @@ async def download_async(session, artifact_info: ArtifactInfo) -> ArtifactInfo: artifact_info.local_path = artifact_local_path return artifact_info else: - logger.error(f"Download process with error {response.text}: {url}") + error_text = await response.text() + logger.error(f"Download process with error (status {response.status}): {url} - {error_text}") except Exception as e: logger.error(f"Download process with exception {url}: {e}") @@ -628,7 +627,7 @@ async def _v2_download_with_fallback(searcher, url: str, local_path: str, auth_c logger.info(f"Direct HTTP download successful: {local_path}") return True except Exception as e: - logger.warning(f"Direct HTTP download failed: {e}") + logger.warning(f"Direct HTTP download failed for {url}: {e}", exc_info=True) return False @@ -659,7 +658,7 @@ def unzip_file(artifact_id: str, app_name: str, app_version: str, zip_url: str): zip_file.extract(file, create_app_artifacts_local_path(app_name, app_version)) extracted = True except Exception as e: - logger.error(f"Error unpacking {e}") + logger.error(f"Error unpacking artifact {artifact_id} for {app_name}:{app_version} from {zip_url}: {e}", exc_info=True) if not extracted: logger.warning(f"No files were extracted for application {app_name}:{app_version}") @@ -786,7 +785,7 @@ def resolve_snapshot_version(base_path, extension: FileExtension, cred: Credenti node_classifier = node.findtext("classifier", default="") node_extension = node.findtext("extension", default="") value = node.findtext("value") - if node_classifier == classifier and node_extension == extension: + if node_classifier == classifier and node_extension == extension.value: logger.info(f"Resolved snapshot version '{value}'") return value diff --git a/scripts/build_env/appregdef_render.py b/scripts/build_env/appregdef_render.py index 9cded545f..e43e4f4ef 100644 --- a/scripts/build_env/appregdef_render.py +++ b/scripts/build_env/appregdef_render.py @@ -36,9 +36,9 @@ src = Path(render_dir) / dir_name dst = Path(env_dir) / dir_name - if dst.exists(): - shutil.rmtree(dst) - if src.exists(): - shutil.move(src, dst) + if src.exists() and any(src.iterdir()): + dst.mkdir(parents=True, exist_ok=True) + for item in src.iterdir(): + shutil.copy2(item, dst / item.name) update_generated_versions(env_dir, BUILD_ENV_TAG, template_version) \ No newline at end of file diff --git a/scripts/build_env/handle_sd.py b/scripts/build_env/handle_sd.py index d290d4a25..bac4fecea 100644 --- a/scripts/build_env/handle_sd.py +++ b/scripts/build_env/handle_sd.py @@ -317,7 +317,19 @@ def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine, en logger.debug(f"Reading V2 solution descriptor from local file: {mvn_repo_extra}") with open(mvn_repo_extra, 'r') as f: return json.load(f) - return artifact.download_json_content(sd_url) + + # V1 fallback path or non-V2 registry - need credentials for HTTP download + cred = None + if app_def.registry.credentials_id and env_creds: + cred_data = env_creds.get(app_def.registry.credentials_id) + if cred_data and cred_data.get('username'): + cred = artifact_models.Credentials( + username=cred_data.get('username', ''), + password=cred_data.get('password', '') + ) + logger.debug(f"Using credentials '{app_def.registry.credentials_id}' for SD download") + + return artifact.download_json_content(sd_url, cred) def get_appdef_for_app(appver: str, app_name: str, plugins: PluginEngine) -> artifact_models.Application: From 366b7fa0853d8b0bb0d15479b2734722edefba40 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Fri, 30 Jan 2026 00:38:48 +0530 Subject: [PATCH 36/48] fix: test for path --- python/artifact-searcher/artifact_searcher/test_artifact.py | 1 - 1 file changed, 1 deletion(-) diff --git a/python/artifact-searcher/artifact_searcher/test_artifact.py b/python/artifact-searcher/artifact_searcher/test_artifact.py index 4c6870f41..0255334d9 100644 --- a/python/artifact-searcher/artifact_searcher/test_artifact.py +++ b/python/artifact-searcher/artifact_searcher/test_artifact.py @@ -17,7 +17,6 @@ def __init__(self, status_code): "index_path", [ ("/repository/"), - ("/service/rest/repository/browse/"), ], ) async def test_resolve_snapshot_version(aiohttp_server, index_path, monkeypatch): From 39b0c270cd85069d7102a40978097b7b2a35b3cb Mon Sep 17 00:00:00 2001 From: BackendBits Date: Fri, 30 Jan 2026 09:53:51 +0530 Subject: [PATCH 37/48] fix: improve code and made it complaint with Schema and included Artifacotry and Nexus as provider as its support for V2 --- .../artifact_searcher/artifact.py | 5 +- .../artifact_searcher/cloud_auth_helper.py | 101 ++++++++++++------ schemas/artifact-definition-v2.schema.json | 4 +- schemas/regdef-v2.schema.json | 4 +- 4 files changed, 80 insertions(+), 34 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index 3b99dec4e..9ddbc96ad 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -400,9 +400,12 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt logger.error(f"V2 fallback for '{app.name}': Could not resolve authConfig for registry '{app.registry.name}'") return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") - # Some cloud providers need credentials, others allow anonymous access + # Early validation: Some cloud providers need credentials, others allow anonymous access # AWS and GCP: must have credentials (their APIs don't allow anonymous) # Artifactory/Nexus: can work without credentials if repository allows public read + # Note: This check only works when provider is explicitly set in authConfig. + # If provider is None (will be auto-detected later), this validation is skipped here + # and will be enforced in create_maven_searcher() after auto-detection. if auth_config.provider in ["aws", "gcp"]: if not env_creds: logger.error(f"V2 fallback for '{app.name}': {auth_config.provider} requires credentials but env_creds is empty") diff --git a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py index 28cef0b45..b055eaf9c 100644 --- a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py +++ b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py @@ -130,25 +130,19 @@ def resolve_credentials(auth_config: AuthConfig, env_creds: Optional[Dict[str, d def _extract_repository_name(url: str) -> str: """Extract the repository name from a registry URL. - Different providers have different URL patterns: - - AWS: https://domain.d.codeartifact.region.amazonaws.com/maven/repo-name/ - - GCP: https://region-maven.pkg.dev/project/repo-name/ - - Others: uses last path segment + For CodeArtifact: https://domain-owner.d.codeartifact.region.amazonaws.com/maven/repo-name/ + Returns 'repo-name' + + For Artifact Registry: https://region-maven.pkg.dev/project/repo-name/ + Returns 'repo-name' """ - url = url.rstrip("/") - # AWS CodeArtifact: .../maven//... - if "codeartifact" in url and "/maven/" in url: - parts = url.split("/maven/") - if len(parts) > 1: - return parts[1].split("/")[0] - # GCP Artifact Registry: https://-maven.pkg.dev/// - if "pkg.dev" in url: - parts = url.split("/") - if len(parts) >= 5: - return parts[4] - return url.split("/")[-1] + parts = [p for p in url.rstrip('/').split('/') if p] + if parts: + repo_name = parts[-1] + logger.debug(f"Extracted repository name: {repo_name} from URL: {url}") + return repo_name + raise ValueError(f"Could not extract repository name from URL: {url}") - @staticmethod def _extract_region(url: str, auth_config: AuthConfig) -> str: """Figure out which cloud region to use. @@ -159,16 +153,50 @@ def _extract_region(url: str, auth_config: AuthConfig) -> str: 3. Default to 'us-east-1' if can't determine """ if auth_config.provider == "aws" and auth_config.aws_region: + logger.debug(f"Using explicit AWS region: {auth_config.aws_region}") return auth_config.aws_region aws_match = re.search(r'\.([a-z0-9-]+)\.amazonaws\.com', url) if aws_match: - return aws_match.group(1) - gcp_match = re.search(r'([a-z0-9-]+)-maven\.pkg\.dev', url) - if gcp_match: - return gcp_match.group(1) - logger.warning(f"Could not extract region from URL '{url}', using default 'us-east-1'") + region = aws_match.group(1) + logger.debug(f"Extracted AWS region from URL: {region}") + return region + logger.debug("AWS region not found in URL, defaulting to us-east-1") return "us-east-1" + @staticmethod + def _detect_provider(url: str, auth_config: AuthConfig) -> Optional[str]: + """Auto-detect provider from URL patterns for on-premise registries only. + + Auto-detection is ONLY for Nexus and Artifactory (on-premise registries). + AWS and GCP (cloud registries) require explicit provider specification. + + Returns: + Provider name or None if cannot be detected + """ + # If provider is explicitly set, use it + if auth_config.provider: + logger.debug(f"Using explicit provider: {auth_config.provider}") + return auth_config.provider + + url_lower = url.lower() + + # Auto-detect ONLY for on-premise registries (Nexus and Artifactory) + # AWS and GCP must be explicitly specified + + # Artifactory patterns + if "artifactory" in url_lower or "/artifactory/" in url_lower: + logger.info(f"Auto-detected provider: artifactory from URL pattern") + return "artifactory" + + # Nexus patterns + if "nexus" in url_lower or "/nexus/" in url_lower or "/service/rest/" in url_lower: + logger.info(f"Auto-detected provider: nexus from URL pattern") + return "nexus" + + # AWS and GCP require explicit provider - no auto-detection + logger.warning(f"Could not auto-detect provider from URL: {url}. AWS and GCP require explicit provider specification.") + return None + @staticmethod def create_maven_searcher(registry: Registry, env_creds: Optional[Dict[str, dict]]) -> 'MavenArtifactSearcher': """Create a searcher object that knows how to find artifacts in this registry. @@ -190,11 +218,13 @@ def create_maven_searcher(registry: Registry, env_creds: Optional[Dict[str, dict registry_url = registry.maven_config.repository_domain_name - # Provider is required in RegDef v2 - provider = auth_config.provider + # Try to detect provider if not explicitly set + # Auto-detection works for Nexus and Artifactory (on-premise registries) + # AWS and GCP must be explicitly specified + provider = CloudAuthHelper._detect_provider(registry_url, auth_config) if not provider: - logger.error(f"V2 fallback: provider field is required in authConfig for registry '{registry.name}'") - raise ValueError(f"Provider field is required in authConfig for registry '{registry.name}'") + logger.error(f"V2 fallback: Could not determine provider for registry '{registry.name}'. Please specify provider in authConfig or use recognizable URL pattern (nexus/artifactory)") + raise ValueError(f"Could not determine provider for registry '{registry.name}'") if provider not in ["aws", "gcp", "artifactory", "nexus"]: raise ValueError(f"Unsupported provider: {provider}") @@ -259,24 +289,33 @@ def _configure_gcp(searcher: 'MavenArtifactSearcher', auth_config: AuthConfig, GCP needs: - Service account JSON (stored as secret) - - Project name (from authConfig or URL) + - Project name (from authConfig or extracted from URL) - Region (from URL, like 'us-central1') - Repository name (from URL) """ if auth_config.auth_method != "service_account": raise ValueError(f"GCP auth_method '{auth_config.auth_method}' not supported") - if not auth_config.gcp_reg_project: - raise ValueError("GCP auth requires gcp_reg_project in authConfig") + + # Extract project from authConfig or URL + project = auth_config.gcp_reg_project + if not project: + # Extract from GCP URL pattern: https://-maven.pkg.dev/// + match = re.search(r'pkg\.dev/([^/]+)/', registry_url) + if match: + project = match.group(1) + logger.info(f"Extracted GCP project from URL: {project}") + else: + raise ValueError("GCP auth requires gcp_reg_project in authConfig or valid GCP URL format (https://-maven.pkg.dev///)") sa_data = creds["secret"] sa_json = json.dumps(sa_data) if isinstance(sa_data, dict) else sa_data region = CloudAuthHelper._extract_region(registry_url, auth_config) repo_name = CloudAuthHelper._extract_repository_name(registry_url) - logger.info(f"Configuring GCP Artifact Registry: project={auth_config.gcp_reg_project}, region={region}") + logger.info(f"Configuring GCP Artifact Registry: project={project}, region={region}") return searcher.with_gcp_artifact_registry( credential_params={"service_account_key": sa_json}, - project=auth_config.gcp_reg_project, + project=project, region_name=region, repository=repo_name ) diff --git a/schemas/artifact-definition-v2.schema.json b/schemas/artifact-definition-v2.schema.json index dde0fd065..9f154cae9 100644 --- a/schemas/artifact-definition-v2.schema.json +++ b/schemas/artifact-definition-v2.schema.json @@ -109,7 +109,9 @@ "enum": [ "aws", "azure", - "gcp" + "gcp", + "artifactory", + "nexus" ], "title": "Provider", "description": "Public cloud registry type. Used in case of public cloud registries" diff --git a/schemas/regdef-v2.schema.json b/schemas/regdef-v2.schema.json index 7f1a30249..5b1b3d8f7 100644 --- a/schemas/regdef-v2.schema.json +++ b/schemas/regdef-v2.schema.json @@ -66,7 +66,9 @@ "enum": [ "aws", "azure", - "gcp" + "gcp", + "nexus", + "artifactory" ] }, "authMethod": { From 4b62b34bf932b9d866fae76a94da79a9fc8fb7a5 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Fri, 30 Jan 2026 11:40:08 +0530 Subject: [PATCH 38/48] fix: GCP project extraction regex to handle URLs with/without trailing slash --- .../artifact_searcher/cloud_auth_helper.py | 30 +++++++++++++++---- 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py index b055eaf9c..518336ed3 100644 --- a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py +++ b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py @@ -145,7 +145,7 @@ def _extract_repository_name(url: str) -> str: @staticmethod def _extract_region(url: str, auth_config: AuthConfig) -> str: - """Figure out which cloud region to use. + """Figure out which AWS region to use. Tries these sources in order: 1. Explicit region in authConfig (if configured) @@ -163,6 +163,23 @@ def _extract_region(url: str, auth_config: AuthConfig) -> str: logger.debug("AWS region not found in URL, defaulting to us-east-1") return "us-east-1" + @staticmethod + def _extract_gcp_region(url: str) -> str: + """Extract GCP region from Artifact Registry URL. + + GCP Artifact Registry URL format: https://-maven.pkg.dev/... + Example: https://us-east1-maven.pkg.dev/... -> us-east1 + + Note: GCP regions use format like 'us-east1', NOT 'us-east-1' + """ + match = re.search(r'https://([a-z0-9-]+)-maven\.pkg\.dev', url) + if match: + region = match.group(1) + logger.debug(f"Extracted GCP region from URL: {region}") + return region + logger.warning(f"Could not extract GCP region from URL: {url}, defaulting to us-central1") + return "us-central1" + @staticmethod def _detect_provider(url: str, auth_config: AuthConfig) -> Optional[str]: """Auto-detect provider from URL patterns for on-premise registries only. @@ -270,7 +287,7 @@ def _configure_aws(searcher: 'MavenArtifactSearcher', auth_config: AuthConfig, - Repository name (from URL) """ if not auth_config.aws_domain: - raise ValueError("AWS auth requires aws_domain in authConfig") + raise ValueError("AWS auth requires awsDomain in authConfig") region = CloudAuthHelper._extract_region(registry_url, auth_config) repo_name = CloudAuthHelper._extract_repository_name(registry_url) logger.info(f"Configuring AWS CodeArtifact: domain={auth_config.aws_domain}, region={region}") @@ -299,17 +316,18 @@ def _configure_gcp(searcher: 'MavenArtifactSearcher', auth_config: AuthConfig, # Extract project from authConfig or URL project = auth_config.gcp_reg_project if not project: - # Extract from GCP URL pattern: https://-maven.pkg.dev/// - match = re.search(r'pkg\.dev/([^/]+)/', registry_url) + # Extract from GCP URL pattern: https://-maven.pkg.dev// + # The project is the first path segment after pkg.dev/ + match = re.search(r'pkg\.dev/([^/]+)', registry_url) if match: project = match.group(1) logger.info(f"Extracted GCP project from URL: {project}") else: - raise ValueError("GCP auth requires gcp_reg_project in authConfig or valid GCP URL format (https://-maven.pkg.dev///)") + raise ValueError("GCP auth requires gcpRegProject in authConfig or valid GCP URL format (https://-maven.pkg.dev//)") sa_data = creds["secret"] sa_json = json.dumps(sa_data) if isinstance(sa_data, dict) else sa_data - region = CloudAuthHelper._extract_region(registry_url, auth_config) + region = CloudAuthHelper._extract_gcp_region(registry_url) repo_name = CloudAuthHelper._extract_repository_name(registry_url) logger.info(f"Configuring GCP Artifact Registry: project={project}, region={region}") From bfed5ae49265bf5486e81c66093ec81dee73df40 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Sun, 1 Feb 2026 23:06:22 +0530 Subject: [PATCH 39/48] Fix: Add Ability to Download Multiple SD's --- scripts/build_env/handle_sd.py | 77 ++++++++++++++++++++++++++++------ 1 file changed, 65 insertions(+), 12 deletions(-) diff --git a/scripts/build_env/handle_sd.py b/scripts/build_env/handle_sd.py index bac4fecea..d154164f8 100644 --- a/scripts/build_env/handle_sd.py +++ b/scripts/build_env/handle_sd.py @@ -284,29 +284,63 @@ def download_sds_with_version(env, base_sd_path, sd_version, effective_merge_mod exit(1) app_def_getter_plugins = PluginEngine(plugins_dir='/module/scripts/handle_sd_plugins/app_def_getter') - sd_data_list = [] - for entry in sd_entries: # appvers + + # Validate all entries before starting downloads + for entry in sd_entries: if ":" not in entry: logger.error(f"Invalid SD_VERSION format: '{entry}'. Expected 'name:version'") exit(1) - source_name, version = entry.split(":", 1) - logger.info(f"Starting download of SD: {source_name}-{version}") - - sd_data = download_sd_by_appver(source_name, version, app_def_getter_plugins, env) - - sd_data_list.append(sd_data) + # Download all SDs in parallel using asyncio + logger.info(f"Starting parallel download of {len(sd_entries)} SD(s)...") + sd_data_list = asyncio.run(_download_sds_parallel(sd_entries, app_def_getter_plugins, env)) + logger.info(f"Successfully downloaded {len(sd_data_list)} SD(s)") sd_data_json = json.dumps(sd_data_list) extract_sds_from_json(env, base_sd_path, sd_data_json, effective_merge_mode) -def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine, env: Environment = None) -> dict[str, object]: +async def _download_sds_parallel(sd_entries: list[str], plugins: PluginEngine, env: Environment) -> list[dict]: + """Download multiple SDs in parallel while preserving order. + + Args: + sd_entries: List of "name:version" entries + plugins: Plugin engine for app def resolution + env: Environment object + + Returns: + List of SD data dictionaries in the same order as sd_entries + """ + tasks = [] + for entry in sd_entries: + source_name, version = entry.split(":", 1) + logger.info(f"Queuing download of SD: {source_name}-{version}") + task = _download_sd_by_appver_async(source_name, version, plugins, env) + tasks.append(task) + + # asyncio.gather preserves order of results matching order of tasks + sd_data_list = await asyncio.gather(*tasks) + return sd_data_list + + +async def _download_sd_by_appver_async(app_name: str, version: str, plugins: PluginEngine, env: Environment = None) -> dict[str, object]: + """Async version of SD download by app name and version. + + Args: + app_name: Application name + version: Application version + plugins: Plugin engine for app def resolution + env: Environment object + + Returns: + SD data dictionary + """ + logger.info(f"Starting download of SD: {app_name}-{version}") app_def = get_appdef_for_app(f"{app_name}:{version}", app_name, plugins) # Use existing get_cred_config() utility for credentials env_creds = get_cred_config() - artifact_info = asyncio.run(artifact.check_artifact_async(app_def, artifact.FileExtension.JSON, version, env_creds=env_creds)) + artifact_info = await artifact.check_artifact_async(app_def, artifact.FileExtension.JSON, version, env_creds=env_creds) if not artifact_info: raise ValueError( f'Solution descriptor content was not received for {app_name}:{version}') @@ -316,7 +350,9 @@ def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine, en if mvn_repo_value == "v2_downloaded": logger.debug(f"Reading V2 solution descriptor from local file: {mvn_repo_extra}") with open(mvn_repo_extra, 'r') as f: - return json.load(f) + sd_data = json.load(f) + logger.info(f"Successfully downloaded SD: {app_name}-{version}") + return sd_data # V1 fallback path or non-V2 registry - need credentials for HTTP download cred = None @@ -329,7 +365,24 @@ def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine, en ) logger.debug(f"Using credentials '{app_def.registry.credentials_id}' for SD download") - return artifact.download_json_content(sd_url, cred) + sd_data = artifact.download_json_content(sd_url, cred) + logger.info(f"Successfully downloaded SD: {app_name}-{version}") + return sd_data + + +def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine, env: Environment = None) -> dict[str, object]: + """Synchronous wrapper for backward compatibility (used by tests). + + Args: + app_name: Application name + version: Application version + plugins: Plugin engine for app def resolution + env: Environment object + + Returns: + SD data dictionary + """ + return asyncio.run(_download_sd_by_appver_async(app_name, version, plugins, env)) def get_appdef_for_app(appver: str, app_name: str, plugins: PluginEngine) -> artifact_models.Application: From 2d714d59618c538433372f97ec1c4be9fa1f8163 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Mon, 2 Feb 2026 00:00:39 +0530 Subject: [PATCH 40/48] Fix: Fixed Comments --- .../artifact_searcher/artifact.py | 77 +++++------- .../artifact_searcher/cloud_auth_helper.py | 116 ++++-------------- 2 files changed, 50 insertions(+), 143 deletions(-) diff --git a/python/artifact-searcher/artifact_searcher/artifact.py b/python/artifact-searcher/artifact_searcher/artifact.py index 9ddbc96ad..de53299e4 100644 --- a/python/artifact-searcher/artifact_searcher/artifact.py +++ b/python/artifact-searcher/artifact_searcher/artifact.py @@ -45,7 +45,7 @@ def convert_nexus_repo_url_to_index_view(url: str) -> str: if not parts or parts[-1] != "repository": return url - + # Build new path new_parts = parts[:-1] + ["service", "rest", "repository", "browse"] new_path = "/".join(new_parts) + "/" @@ -153,12 +153,16 @@ def _parse_snapshot_version( def version_to_folder_name(version: str) -> str: """ Normalizes version string for folder naming. + If version is timestamped snapshot (e.g. '1.0.0-20240702.123456-1'), it replaces the timestamp suffix with '-SNAPSHOT'. Otherwise, returns the version unchanged """ snapshot_pattern = re.compile(r"-\d{8}\.\d{6}-\d+$") - return snapshot_pattern.sub("-SNAPSHOT", version) if snapshot_pattern.search(version) else version - + if snapshot_pattern.search(version): + folder = snapshot_pattern.sub("-SNAPSHOT", version) + else: + folder = version + return folder def clean_temp_dir(): if WORKSPACE.exists(): @@ -367,24 +371,20 @@ async def check_artifact_async( async def _check_artifact_v2_async(app: Application, artifact_extension: FileExtension, version: str, env_creds: Optional[dict]) -> Optional[tuple[str, tuple[str, str]]]: - """Search for and download artifacts using the V2 cloud registry approach. - - This is the modern way to find artifacts in cloud registries like AWS CodeArtifact, - GCP Artifact Registry, Artifactory, and Nexus. It uses the shared MavenArtifactSearcher - library to talk to these different registry types in a unified way. + """Search for artifacts using V2 cloud registry approach. - If anything goes wrong (missing config, wrong credentials, search fails), we automatically - fall back to the older V1 method which tries direct HTTP URLs. + Supports AWS CodeArtifact, GCP Artifact Registry, Artifactory, and Nexus via + MavenArtifactSearcher library. Falls back to V1 on failure. Returns: - URL to the artifact and download location info, or None if not found + (artifact_url, download_info) tuple or None """ # V2 requires authConfig to know how to authenticate with the registry if not getattr(app.registry.maven_config, 'auth_config', None): logger.error(f"V2 fallback for '{app.name}': Registry '{app.registry.name}' version 2.0 missing maven_config.authConfig") return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") - # Check if required libraries are available (they're optional dependencies) + # Check optional V2 dependencies if CloudAuthHelper is None or MavenArtifact is None: missing = [] if CloudAuthHelper is None: @@ -394,18 +394,12 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt logger.error(f"V2 fallback for '{app.name}': Missing required libraries - {', '.join(missing)}") return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") - # Get authentication settings from the registry definition auth_config = CloudAuthHelper.resolve_auth_config(app.registry, "maven") if not auth_config: logger.error(f"V2 fallback for '{app.name}': Could not resolve authConfig for registry '{app.registry.name}'") return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") - # Early validation: Some cloud providers need credentials, others allow anonymous access - # AWS and GCP: must have credentials (their APIs don't allow anonymous) - # Artifactory/Nexus: can work without credentials if repository allows public read - # Note: This check only works when provider is explicitly set in authConfig. - # If provider is None (will be auto-detected later), this validation is skipped here - # and will be enforced in create_maven_searcher() after auto-detection. + # AWS/GCP require credentials; Artifactory/Nexus support anonymous if provider is set if auth_config.provider in ["aws", "gcp"]: if not env_creds: logger.error(f"V2 fallback for '{app.name}': {auth_config.provider} requires credentials but env_creds is empty") @@ -418,29 +412,24 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt logger.info(f"V2 search for {app.name} with provider={auth_config.provider}") loop = asyncio.get_running_loop() - # Handle SNAPSHOT versions (development/pre-release builds) - # SNAPSHOTs work differently: we search using the base version like "1.0-SNAPSHOT" - # but download using the actual timestamped version like "1.0-20260129.071325-2" - # This is because Nexus/Artifactory index by base version for searching + # SNAPSHOT: search by base version, download by timestamped version resolved_version = version # This will become the timestamped version if it's a SNAPSHOT search_version = version # Always use base version for searching if version.endswith("-SNAPSHOT"): logger.info(f"Resolving SNAPSHOT version for verification: {app.artifact_id}:{version}") - # Need credentials to fetch maven-metadata.xml (even if repository allows anonymous download) + # Credentials needed for maven-metadata.xml fetch cred = None if auth_config.credentials_id and env_creds: cred_data = env_creds.get(auth_config.credentials_id) if cred_data and cred_data.get('username'): - from artifact_searcher.utils.models import Credentials cred = Credentials(username=cred_data['username'], password=cred_data['password']) auth = BasicAuth(login=cred.username, password=cred.password) if cred else None timeout = aiohttp.ClientTimeout(total=DEFAULT_REQUEST_TIMEOUT) - # Try to resolve the SNAPSHOT to its actual timestamped version - # We check multiple repositories in order (snapshot repo, then public/group repos) + # Resolve SNAPSHOT to timestamped version across repos async with aiohttp.ClientSession(timeout=timeout, auth=auth) as session: repos_dict = get_repo_value_pointer_dict(app.registry) @@ -450,7 +439,7 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt continue try: - # This fetches maven-metadata.xml and extracts the timestamped version + # Fetch maven-metadata.xml for timestamped version result = await resolve_snapshot_version_async( session, app, version, repo_value, 0, asyncio.Event(), asyncio.Event(), @@ -470,8 +459,7 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt logger.warning(f"Could not resolve SNAPSHOT, falling back to V1") return await _check_artifact_v1_async(app, artifact_extension, version, cred=cred, classifier="") - # Create the searcher object that knows how to talk to this specific registry type - # This handles AWS, GCP, Artifactory, and Nexus in a unified way + # Create registry-specific searcher (AWS, GCP, Artifactory, Nexus) try: searcher = await loop.run_in_executor(None, CloudAuthHelper.create_maven_searcher, app.registry, env_creds) except KeyError as e: @@ -484,9 +472,7 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt logger.error(f"V2 fallback for '{app.name}': Failed to create searcher - {e}", exc_info=True) return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") - # Build the artifact identifier for searching - # Important: We search with the base SNAPSHOT version (e.g., "1.0-SNAPSHOT") - # because that's how artifacts are indexed in Nexus/Artifactory search APIs + # Build artifact identifier (use base SNAPSHOT version for search APIs) artifact_string = f"{app.group_id}:{app.artifact_id}:{search_version}" maven_artifact = MavenArtifact.from_string(artifact_string) maven_artifact.extension = artifact_extension.value @@ -500,7 +486,7 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt local_path = None maven_url = None - # Try up to 2 times in case of temporary network issues or expired credentials + # Retry on transient errors (401, timeout, expired) for attempt in range(max_retries): try: if attempt > 0: @@ -509,7 +495,6 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt await asyncio.sleep(5) searcher = await loop.run_in_executor(None, CloudAuthHelper.create_maven_searcher, app.registry, env_creds) - # Search for the artifact with a timeout to avoid hanging forever urls = await asyncio.wait_for( loop.run_in_executor(None, partial(searcher.find_artifact_urls, artifact=maven_artifact)), timeout=V2_SEARCH_TIMEOUT @@ -544,8 +529,7 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt last_error = e error_str = str(e).lower() - # Special case: Nexus search API returns 404 when artifact isn't in the search index yet - # (it takes time for newly uploaded artifacts to appear in search) + # Nexus: 404 means artifact not yet indexed in search if "404" in error_str and "search request" in error_str: logger.info(f"V2 search index miss for {app.name} - artifact may not be indexed in Nexus search DB") logger.info(f"Falling back to V1 direct HTTP lookup") @@ -560,9 +544,7 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt except Exception: pass - # Some errors are temporary and worth retrying: - # 401/unauthorized: credentials might need refresh - # timeout: network might be slow + # Retry on transient errors (401, timeout, expired) if attempt < max_retries - 1 and any(x in error_str for x in ["401", "unauthorized", "forbidden", "expired", "timeout"]): logger.warning(f"V2 transient error for {app.name}, retrying: {e}") continue @@ -573,15 +555,14 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt logger.error(f"V2 fallback for '{app.name}': All {max_retries} attempts exhausted - {last_error}") return await _check_artifact_v1_async(app, artifact_extension, version, cred=None, classifier="") - # AWS CodeArtifact returns resource IDs instead of direct URLs - # We need to construct the full download URL ourselves + # AWS: construct full URL from resource ID if auth_config.provider == "aws": registry_domain = app.registry.maven_config.repository_domain_name folder_name = version_to_folder_name(version) repo_path = app.registry.maven_config.target_snapshot if folder_name.endswith("-SNAPSHOT") else app.registry.maven_config.target_release full_url = f"{registry_domain.rstrip('/')}/{repo_path.rstrip('/')}/{maven_url}" else: - # Other providers (GCP, Artifactory, Nexus) return ready-to-use URLs + # GCP/Artifactory/Nexus: URL ready-to-use full_url = maven_url return full_url, ("v2_downloaded", local_path) @@ -589,12 +570,10 @@ async def _check_artifact_v2_async(app: Application, artifact_extension: FileExt async def _v2_download_with_fallback(searcher, url: str, local_path: str, auth_config, registry: Registry, env_creds: Optional[dict]) -> bool: - """Download artifact using MavenArtifactSearcher with HTTP fallback. - - Attempts to download via the configured MavenArtifactSearcher first, - bounded by V2_DOWNLOAD_TIMEOUT. For supported providers (GCP, Artifactory, - Nexus), falls back to a direct HTTP GET when the searcher-based download - fails, optionally adding GCP access tokens to the request. + """Download artifact via searcher with HTTP fallback. + + Tries searcher download first, falls back to HTTP GET for GCP/Artifactory/Nexus. + Adds GCP access tokens if needed. """ loop = asyncio.get_running_loop() diff --git a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py index 518336ed3..c0cd75489 100644 --- a/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py +++ b/python/artifact-searcher/artifact_searcher/cloud_auth_helper.py @@ -24,26 +24,18 @@ class CloudAuthHelper: - """Helper to connect to cloud registries using V2 authentication. + """V2 authentication helper for cloud registries. - This class knows how to authenticate with different types of artifact registries: - - AWS CodeArtifact (uses AWS access keys) - - GCP Artifact Registry (uses service account JSON) - - Artifactory (uses username/password) - - Nexus (uses username/password or anonymous access) - - It creates a MavenArtifactSearcher configured for each specific provider. + Supports: AWS (access keys), GCP (SA JSON), Artifactory/Nexus (user/pass or anonymous). + Creates configured MavenArtifactSearcher per provider. """ @staticmethod def resolve_auth_config(registry: Registry, artifact_type: str = "maven") -> Optional[AuthConfig]: - """Find the authentication settings for this registry. - - Each registry can have multiple authConfig entries (for different artifact types). - This looks up which authConfig to use based on what the maven_config references. + """Find auth settings for this registry. - Returns: - AuthConfig object with provider and credentials info, or None if not configured + Looks up authConfig based on maven_config reference. + Returns AuthConfig or None. """ if artifact_type != "maven": return None @@ -66,17 +58,10 @@ def resolve_auth_config(registry: Registry, artifact_type: str = "maven") -> Opt @staticmethod def resolve_credentials(auth_config: AuthConfig, env_creds: Optional[Dict[str, dict]]) -> Optional[dict]: - """Get the actual username/password or secrets from the credentials vault. - - The authConfig tells us the credentials ID to look up. This function finds - those credentials in the environment's credential store and extracts them. + """Get credentials from vault using authConfig's credentials ID. - Special handling: - - Empty username/password = anonymous access (returns None) - - Different credential types: usernamePassword, secret (for GCP service accounts) - - Returns: - dict with username/password or secret, or None for anonymous access + Handles: usernamePassword (returns dict), secret (GCP), empty creds (anonymous). + Returns dict or None for anonymous. """ cred_id = auth_config.credentials_id if not cred_id: @@ -114,8 +99,7 @@ def resolve_credentials(auth_config: AuthConfig, env_creds: Optional[Dict[str, d logger.info(f"Resolved credentials for '{cred_id}' (type: {cred_type})") - # Make sure we have the right credential format for each cloud provider - # AWS needs username (access key ID) and password (secret access key) + # Validate credential format per provider if auth_config.provider == "aws": if "username" not in creds or "password" not in creds: raise ValueError(f"AWS credentials must have 'username' and 'password'") @@ -128,14 +112,7 @@ def resolve_credentials(auth_config: AuthConfig, env_creds: Optional[Dict[str, d @staticmethod def _extract_repository_name(url: str) -> str: - """Extract the repository name from a registry URL. - - For CodeArtifact: https://domain-owner.d.codeartifact.region.amazonaws.com/maven/repo-name/ - Returns 'repo-name' - - For Artifact Registry: https://region-maven.pkg.dev/project/repo-name/ - Returns 'repo-name' - """ + """Extract repository name from registry URL (last path segment).""" parts = [p for p in url.rstrip('/').split('/') if p] if parts: repo_name = parts[-1] @@ -145,13 +122,7 @@ def _extract_repository_name(url: str) -> str: @staticmethod def _extract_region(url: str, auth_config: AuthConfig) -> str: - """Figure out which AWS region to use. - - Tries these sources in order: - 1. Explicit region in authConfig (if configured) - 2. Extract from URL pattern (e.g., 'us-west-2.amazonaws.com') - 3. Default to 'us-east-1' if can't determine - """ + """Get AWS region from authConfig, URL, or default to us-east-1.""" if auth_config.provider == "aws" and auth_config.aws_region: logger.debug(f"Using explicit AWS region: {auth_config.aws_region}") return auth_config.aws_region @@ -165,13 +136,7 @@ def _extract_region(url: str, auth_config: AuthConfig) -> str: @staticmethod def _extract_gcp_region(url: str) -> str: - """Extract GCP region from Artifact Registry URL. - - GCP Artifact Registry URL format: https://-maven.pkg.dev/... - Example: https://us-east1-maven.pkg.dev/... -> us-east1 - - Note: GCP regions use format like 'us-east1', NOT 'us-east-1' - """ + """Extract GCP region from URL (format: us-east1, not us-east-1).""" match = re.search(r'https://([a-z0-9-]+)-maven\.pkg\.dev', url) if match: region = match.group(1) @@ -182,14 +147,7 @@ def _extract_gcp_region(url: str) -> str: @staticmethod def _detect_provider(url: str, auth_config: AuthConfig) -> Optional[str]: - """Auto-detect provider from URL patterns for on-premise registries only. - - Auto-detection is ONLY for Nexus and Artifactory (on-premise registries). - AWS and GCP (cloud registries) require explicit provider specification. - - Returns: - Provider name or None if cannot be detected - """ + """Auto-detect provider from URL (Nexus/Artifactory only; AWS/GCP need explicit).""" # If provider is explicitly set, use it if auth_config.provider: logger.debug(f"Using explicit provider: {auth_config.provider}") @@ -216,15 +174,9 @@ def _detect_provider(url: str, auth_config: AuthConfig) -> Optional[str]: @staticmethod def create_maven_searcher(registry: Registry, env_creds: Optional[Dict[str, dict]]) -> 'MavenArtifactSearcher': - """Create a searcher object that knows how to find artifacts in this registry. - - This is the main entry point for V2 artifact searching. It: - 1. Reads the registry configuration to determine the provider type - 2. Loads the appropriate credentials from the vault - 3. Creates and configures a MavenArtifactSearcher for that specific provider + """Create configured MavenArtifactSearcher for this registry. - Returns: - Configured MavenArtifactSearcher ready to search for and download artifacts + Resolves provider, loads credentials, configures searcher. """ if MavenArtifactSearcher is None: raise ImportError("qubership_pipelines_common_library not available") @@ -246,10 +198,7 @@ def create_maven_searcher(registry: Registry, env_creds: Optional[Dict[str, dict if provider not in ["aws", "gcp", "artifactory", "nexus"]: raise ValueError(f"Unsupported provider: {provider}") - # Nexus URLs need special handling: - # Download URLs use: http://nexus/repository/repo-name/... - # Search API uses: http://nexus/service/rest/v1/search/... - # So we need to remove the /repository/ suffix before initializing the searcher + # Nexus: remove /repository/ suffix for search API compatibility if provider == "nexus" and registry_url.endswith("/repository/"): registry_url = registry_url[:-len("repository/")] logger.info(f"Nexus: adjusted registry URL to {registry_url} for search API") @@ -260,8 +209,7 @@ def create_maven_searcher(registry: Registry, env_creds: Optional[Dict[str, dict # Create the base searcher object - provider-specific config comes next searcher = MavenArtifactSearcher(registry_url, params={"timeout": DEFAULT_SEARCHER_TIMEOUT}) - # Check if anonymous access is allowed for this provider type - # AWS and GCP APIs require authentication - they don't support anonymous access + # AWS/GCP require authentication (no anonymous access) if provider in ["aws", "gcp"] and creds is None: raise ValueError(f"{provider.upper()} requires credentials - anonymous access not supported") @@ -277,15 +225,7 @@ def create_maven_searcher(registry: Registry, env_creds: Optional[Dict[str, dict @staticmethod def _configure_aws(searcher: 'MavenArtifactSearcher', auth_config: AuthConfig, creds: dict, registry_url: str) -> 'MavenArtifactSearcher': - """Set up the searcher to work with AWS CodeArtifact. - - AWS needs: - - Access key (stored as username) - - Secret key (stored as password) - - Domain name (from authConfig) - - Region (from authConfig or URL) - - Repository name (from URL) - """ + """Configure searcher for AWS CodeArtifact (access key, secret, domain, region, repo).""" if not auth_config.aws_domain: raise ValueError("AWS auth requires awsDomain in authConfig") region = CloudAuthHelper._extract_region(registry_url, auth_config) @@ -302,14 +242,7 @@ def _configure_aws(searcher: 'MavenArtifactSearcher', auth_config: AuthConfig, @staticmethod def _configure_gcp(searcher: 'MavenArtifactSearcher', auth_config: AuthConfig, creds: dict, registry_url: str) -> 'MavenArtifactSearcher': - """Set up the searcher to work with GCP Artifact Registry. - - GCP needs: - - Service account JSON (stored as secret) - - Project name (from authConfig or extracted from URL) - - Region (from URL, like 'us-central1') - - Repository name (from URL) - """ + """Configure searcher for GCP Artifact Registry (SA JSON, project, region, repo).""" if auth_config.auth_method != "service_account": raise ValueError(f"GCP auth_method '{auth_config.auth_method}' not supported") @@ -356,14 +289,9 @@ def _configure_artifactory(searcher: 'MavenArtifactSearcher', creds: Optional[di @staticmethod def _configure_nexus(searcher: 'MavenArtifactSearcher', creds: Optional[dict], registry: Registry) -> 'MavenArtifactSearcher': - """Set up the searcher to work with Nexus Repository Manager. - - Nexus is simple - just username and password, or anonymous if allowed. + """Configure searcher for Nexus (username/password or anonymous). - Important: The MavenArtifactSearcher library searches across ALL repositories - in Nexus (we can't limit to a specific repository). This is a library limitation, - not a bug in our code. Nexus will return results from any repository the user - has access to. + Note: Library searches all Nexus repos (cannot limit to specific repo). """ if creds is None: logger.info("Configuring Nexus with anonymous access (no credentials)") From 979d8d48eb33528bfd40a6e4ceae000a90790456 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Mon, 2 Feb 2026 00:18:51 +0530 Subject: [PATCH 41/48] Fix: Fixed Tests --- .../tests/sd/test_handle_sd_artifact.py | 16 ++++++++++---- .../tests/sd/test_handle_sd_local.py | 22 ++++++++++--------- 2 files changed, 24 insertions(+), 14 deletions(-) diff --git a/scripts/build_env/tests/sd/test_handle_sd_artifact.py b/scripts/build_env/tests/sd/test_handle_sd_artifact.py index 089402fd5..a7161bff2 100644 --- a/scripts/build_env/tests/sd/test_handle_sd_artifact.py +++ b/scripts/build_env/tests/sd/test_handle_sd_artifact.py @@ -38,7 +38,7 @@ @pytest.mark.parametrize("test_case_name", TEST_CASES_POSITIVE) -@patch("handle_sd.download_sd_by_appver") +@patch("handle_sd._download_sd_by_appver_async") def test_sd_positive(mock_download_sd, test_case_name): env = Environment(str(Path(OUTPUT_DIR, test_case_name)), "cluster-01", "env-01") do_prerequisites(SD, TEST_SD_DIR, OUTPUT_DIR, test_case_name, env, test_suits_map) @@ -50,7 +50,11 @@ def test_sd_positive(mock_download_sd, test_case_name): file_path = Path(TEST_SD_DIR, test_case_name, f"mock_sd.json") sd_data = openJson(file_path) - mock_download_sd.return_value = sd_data + + # Mock async function to return coroutine + async def mock_return(*args, **kwargs): + return sd_data + mock_download_sd.side_effect = mock_return handle_sd(env, sd_source_type, sd_version, sd_data, sd_delta, sd_merge_mode) actual_dir = os.path.join(env.env_path, "Inventory", "solution-descriptor") @@ -60,7 +64,7 @@ def test_sd_positive(mock_download_sd, test_case_name): @pytest.mark.parametrize("test_case_name,expected_exception", [(k, v) for k, v in TEST_CASES_NEGATIVE.items()]) -@patch("handle_sd.download_sd_by_appver") +@patch("handle_sd._download_sd_by_appver_async") def test_sd_negative(mock_download_sd, test_case_name, expected_exception): env = Environment(str(Path(OUTPUT_DIR, test_case_name)), "cluster-01", "env-01") do_prerequisites(SD, TEST_SD_DIR, OUTPUT_DIR, test_case_name, env, test_suits_map) @@ -72,7 +76,11 @@ def test_sd_negative(mock_download_sd, test_case_name, expected_exception): file_path = Path(TEST_SD_DIR, test_case_name, f"mock_sd.json") sd_data = openJson(file_path) - mock_download_sd.return_value = sd_data + + # Mock async function to return coroutine + async def mock_return(*args, **kwargs): + return sd_data + mock_download_sd.side_effect = mock_return with pytest.raises(expected_exception): handle_sd(env, sd_source_type, sd_version, sd_data, sd_delta, sd_merge_mode) diff --git a/scripts/build_env/tests/sd/test_handle_sd_local.py b/scripts/build_env/tests/sd/test_handle_sd_local.py index daeaee4a7..b02021aae 100644 --- a/scripts/build_env/tests/sd/test_handle_sd_local.py +++ b/scripts/build_env/tests/sd/test_handle_sd_local.py @@ -66,19 +66,21 @@ def test_download_sd_uses_get_cred_config(mock_get_appdef, mock_check_artifact, 'aws-creds': {'type': 'usernamePassword', 'data': {'username': 'key', 'password': 'secret'}} } mock_app_def = MagicMock() + mock_app_def.registry.credentials_id = None mock_get_appdef.return_value = mock_app_def - def capture_run(coro): + # Mock the async check_artifact_async to return a future + async def mock_check_return(): return ("http://sd-url", ("repo", "/tmp/sd.json")) + mock_check_artifact.return_value = mock_check_return() - with patch('handle_sd.asyncio.run', side_effect=capture_run): - with patch('handle_sd.open', create=True) as mock_open: - mock_open.return_value.__enter__.return_value.read.return_value = '{"applications": []}' - - try: - env = Environment("/test/path", "test-cluster", "test-env") - download_sd_by_appver("test-app", "1.0.0", MagicMock(), env) - except: - pass + with patch('handle_sd.open', create=True) as mock_open: + mock_open.return_value.__enter__.return_value.read.return_value = '{"applications": []}' + + try: + env = Environment("/test/path", "test-cluster", "test-env") + download_sd_by_appver("test-app", "1.0.0", MagicMock(), env) + except: + pass assert mock_get_creds.called From 6477573caf90e0451cb605a92d369b18d408f366 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Thu, 5 Feb 2026 15:05:04 +0530 Subject: [PATCH 42/48] fix: fixed issue with SD after merge conflict --- scripts/build_env/process_sd.py | 48 ++++++++++++++++--- .../tests/sd/test_process_sd_artifact.py | 12 ++--- .../tests/sd/test_process_sd_local.py | 4 +- 3 files changed, 47 insertions(+), 17 deletions(-) diff --git a/scripts/build_env/process_sd.py b/scripts/build_env/process_sd.py index 6602663d6..53e343206 100644 --- a/scripts/build_env/process_sd.py +++ b/scripts/build_env/process_sd.py @@ -9,6 +9,7 @@ import yaml from artifact_searcher import artifact from artifact_searcher.utils import models as artifact_models +from envgenehelper import get_cred_config from envgenehelper.business_helper import getenv_and_log, getenv_with_error from envgenehelper.env_helper import Environment from envgenehelper.file_helper import identify_yaml_extension @@ -284,7 +285,7 @@ def download_sds_with_version(env, base_sd_path, sd_version, effective_merge_mod app_def_getter_plugins = PluginEngine(plugins_dir='/module/scripts/handle_sd_plugins/app_def_getter') sd_data_list = [] - for entry in sd_entries: # appvers + for entry in sd_entries: if ":" not in entry: logger.error(f"Invalid SD_VERSION format: '{entry}'. Expected 'name:version'") exit(1) @@ -292,7 +293,7 @@ def download_sds_with_version(env, base_sd_path, sd_version, effective_merge_mod source_name, version = entry.split(":", 1) logger.info(f"Starting download of SD: {source_name}-{version}") - sd_data = download_sd_by_appver(source_name, version, app_def_getter_plugins) + sd_data = download_sd_by_appver(source_name, version, app_def_getter_plugins, env) sd_data_list.append(sd_data) @@ -300,18 +301,51 @@ def download_sds_with_version(env, base_sd_path, sd_version, effective_merge_mod extract_sds_from_json(env, base_sd_path, sd_data_json, effective_merge_mode) -def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine) -> dict[str, object]: +def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine, env: Environment = None) -> dict[str, object]: + """Download SD by app name and version with V2 support. + + Args: + app_name: Application name + version: Application version + plugins: Plugin engine for app def resolution + env: Environment object (optional, for V2 support) + + Returns: + SD data dictionary + """ if 'SNAPSHOT' in version: raise ValueError("SNAPSHOT is not supported version of Solution Descriptor artifacts") - # TODO: check if job would fail without plugins + app_def = get_appdef_for_app(f"{app_name}:{version}", app_name, plugins) - artifact_info = asyncio.run(artifact.check_artifact_async(app_def, artifact.FileExtension.JSON, version)) + # Use get_cred_config() for V2 credential resolution + env_creds = get_cred_config() + artifact_info = asyncio.run(artifact.check_artifact_async(app_def, artifact.FileExtension.JSON, version, env_creds=env_creds)) if not artifact_info: raise ValueError( f'Solution descriptor content was not received for {app_name}:{version}') - sd_url, _ = artifact_info - return artifact.download_json_content(sd_url) + sd_url, mvn_repo = artifact_info + mvn_repo_value, mvn_repo_extra = mvn_repo + + # V2 optimization: read from local file if already downloaded + if mvn_repo_value == "v2_downloaded": + logger.debug(f"Reading V2 solution descriptor from local file: {mvn_repo_extra}") + with open(mvn_repo_extra, 'r') as f: + sd_data = json.load(f) + return sd_data + + # V1 fallback or non-V2 registry: download via HTTP + cred = None + if app_def.registry.credentials_id and env_creds: + cred_data = env_creds.get(app_def.registry.credentials_id) + if cred_data and cred_data.get('username'): + cred = artifact_models.Credentials( + username=cred_data.get('username', ''), + password=cred_data.get('password', '') + ) + logger.debug(f"Using credentials '{app_def.registry.credentials_id}' for SD download") + + return artifact.download_json_content(sd_url, cred) def get_appdef_for_app(appver: str, app_name: str, plugins: PluginEngine) -> artifact_models.Application: diff --git a/scripts/build_env/tests/sd/test_process_sd_artifact.py b/scripts/build_env/tests/sd/test_process_sd_artifact.py index 7190b7d46..7a4485a59 100644 --- a/scripts/build_env/tests/sd/test_process_sd_artifact.py +++ b/scripts/build_env/tests/sd/test_process_sd_artifact.py @@ -51,10 +51,8 @@ def test_sd_positive(mock_download_sd, test_case_name): file_path = Path(TEST_SD_DIR, test_case_name, f"mock_sd.json") sd_data = openJson(file_path) - # Mock async function to return coroutine - async def mock_return(*args, **kwargs): - return sd_data - mock_download_sd.side_effect = mock_return + # Mock synchronous function to return SD data directly + mock_download_sd.return_value = sd_data handle_sd(env, sd_source_type, sd_version, sd_data, sd_delta, sd_merge_mode) actual_dir = os.path.join(env.env_path, "Inventory", "solution-descriptor") @@ -77,10 +75,8 @@ def test_sd_negative(mock_download_sd, test_case_name, expected_exception): file_path = Path(TEST_SD_DIR, test_case_name, f"mock_sd.json") sd_data = openJson(file_path) - # Mock async function to return coroutine - async def mock_return(*args, **kwargs): - return sd_data - mock_download_sd.side_effect = mock_return + # Mock synchronous function to return SD data directly + mock_download_sd.return_value = sd_data with pytest.raises(expected_exception): handle_sd(env, sd_source_type, sd_version, sd_data, sd_delta, sd_merge_mode) diff --git a/scripts/build_env/tests/sd/test_process_sd_local.py b/scripts/build_env/tests/sd/test_process_sd_local.py index 4028541b9..433a2d9b9 100644 --- a/scripts/build_env/tests/sd/test_process_sd_local.py +++ b/scripts/build_env/tests/sd/test_process_sd_local.py @@ -57,8 +57,8 @@ def test_sd_positive(test_case_name): logger.info(f"=====SUCCESS - {test_case_name}======") -@patch('process_sd.get_cred_config') -@patch('process_sd.artifact.check_artifact_async') +@patch('envgenehelper.get_cred_config') +@patch('artifact_searcher.artifact.check_artifact_async') @patch('process_sd.get_appdef_for_app') def test_download_sd_uses_get_cred_config(mock_get_appdef, mock_check_artifact, mock_get_creds): """Test that download_sd_by_appver uses existing get_cred_config utility""" From 1b0d6fe295c5d4e8e90eaf78119ff79a944b86ed Mon Sep 17 00:00:00 2001 From: BackendBits Date: Thu, 5 Feb 2026 15:25:46 +0530 Subject: [PATCH 43/48] fix: fixed tests --- scripts/build_env/tests/sd/test_process_sd_local.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_env/tests/sd/test_process_sd_local.py b/scripts/build_env/tests/sd/test_process_sd_local.py index 433a2d9b9..1a670fb81 100644 --- a/scripts/build_env/tests/sd/test_process_sd_local.py +++ b/scripts/build_env/tests/sd/test_process_sd_local.py @@ -57,7 +57,7 @@ def test_sd_positive(test_case_name): logger.info(f"=====SUCCESS - {test_case_name}======") -@patch('envgenehelper.get_cred_config') +@patch('process_sd.get_cred_config') @patch('artifact_searcher.artifact.check_artifact_async') @patch('process_sd.get_appdef_for_app') def test_download_sd_uses_get_cred_config(mock_get_appdef, mock_check_artifact, mock_get_creds): From 5605a625f6c687266cc9c48274c0aee7ebd12211 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Thu, 5 Feb 2026 17:02:29 +0530 Subject: [PATCH 44/48] fix: updated the Process_sd --- scripts/build_env/process_sd.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/scripts/build_env/process_sd.py b/scripts/build_env/process_sd.py index 53e343206..f2729d050 100644 --- a/scripts/build_env/process_sd.py +++ b/scripts/build_env/process_sd.py @@ -291,10 +291,7 @@ def download_sds_with_version(env, base_sd_path, sd_version, effective_merge_mod exit(1) source_name, version = entry.split(":", 1) - logger.info(f"Starting download of SD: {source_name}-{version}") - sd_data = download_sd_by_appver(source_name, version, app_def_getter_plugins, env) - sd_data_list.append(sd_data) sd_data_json = json.dumps(sd_data_list) @@ -316,9 +313,10 @@ def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine, en if 'SNAPSHOT' in version: raise ValueError("SNAPSHOT is not supported version of Solution Descriptor artifacts") + logger.info(f"Starting download of SD: {app_name}-{version}") app_def = get_appdef_for_app(f"{app_name}:{version}", app_name, plugins) - # Use get_cred_config() for V2 credential resolution + # Use existing get_cred_config() utility for credentials env_creds = get_cred_config() artifact_info = asyncio.run(artifact.check_artifact_async(app_def, artifact.FileExtension.JSON, version, env_creds=env_creds)) if not artifact_info: @@ -332,9 +330,10 @@ def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine, en logger.debug(f"Reading V2 solution descriptor from local file: {mvn_repo_extra}") with open(mvn_repo_extra, 'r') as f: sd_data = json.load(f) + logger.info(f"Successfully downloaded SD: {app_name}-{version}") return sd_data - # V1 fallback or non-V2 registry: download via HTTP + # V1 fallback path or non-V2 registry - need credentials for HTTP download cred = None if app_def.registry.credentials_id and env_creds: cred_data = env_creds.get(app_def.registry.credentials_id) @@ -345,7 +344,9 @@ def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine, en ) logger.debug(f"Using credentials '{app_def.registry.credentials_id}' for SD download") - return artifact.download_json_content(sd_url, cred) + sd_data = artifact.download_json_content(sd_url, cred) + logger.info(f"Successfully downloaded SD: {app_name}-{version}") + return sd_data def get_appdef_for_app(appver: str, app_name: str, plugins: PluginEngine) -> artifact_models.Application: From cc9530464ee3b75fa76cc8188b63dc4a3856e24f Mon Sep 17 00:00:00 2001 From: BackendBits Date: Thu, 5 Feb 2026 19:22:20 +0530 Subject: [PATCH 45/48] fix: Allow SNAPSHOT version resolution for SD artifacts --- scripts/build_env/process_sd.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/scripts/build_env/process_sd.py b/scripts/build_env/process_sd.py index 6015c3df8..756de7a84 100644 --- a/scripts/build_env/process_sd.py +++ b/scripts/build_env/process_sd.py @@ -303,9 +303,6 @@ def download_sd_by_appver(app_name: str, version: str, plugins: PluginEngine, en Returns: SD data dictionary """ - if 'SNAPSHOT' in version: - raise ValueError("SNAPSHOT is not supported version of Solution Descriptor artifacts") - logger.info(f"Starting download of SD: {app_name}-{version}") app_def = get_appdef_for_app(f"{app_name}:{version}", app_name, plugins) From e0efde15b3b94d896014e1512da9dc9bbc2b0874 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Sun, 8 Feb 2026 21:10:53 +0530 Subject: [PATCH 46/48] fix: fixed issue with render --- scripts/build_env/render_config_env.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/scripts/build_env/render_config_env.py b/scripts/build_env/render_config_env.py index 6e8c02e79..67c542bde 100644 --- a/scripts/build_env/render_config_env.py +++ b/scripts/build_env/render_config_env.py @@ -489,8 +489,11 @@ def validate_appregdefs(self): if not regdef_files: logger.warning(f"No RegDef YAMLs found in {regdef_dir}") for file in regdef_files: - logger.info(f"RegDef file: {file}") - validate_yaml_by_scheme_or_fail(file, str(SCHEMAS_DIR / "regdef.schema.json")) + logger.info(f"Validating RegDef file: {file}") + regdef_content = openYaml(file) + version = str(regdef_content.get('version', '1.0')) + schema_path = SCHEMAS_DIR / "regdef-v2.schema.json" if version != '1.0' else SCHEMAS_DIR / "regdef.schema.json" + validate_yaml_by_scheme_or_fail(file, str(schema_path)) def process_app_reg_defs(self, env_name: str, extra_env: dict): logger.info( From 363501e70cf59a2f7a33c802bec935d1f0d0e079 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Mon, 9 Feb 2026 22:54:20 +0530 Subject: [PATCH 47/48] fix: fixed schema --- schemas/regdef-v2.schema.json | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/schemas/regdef-v2.schema.json b/schemas/regdef-v2.schema.json index 5b1b3d8f7..7f1a30249 100644 --- a/schemas/regdef-v2.schema.json +++ b/schemas/regdef-v2.schema.json @@ -66,9 +66,7 @@ "enum": [ "aws", "azure", - "gcp", - "nexus", - "artifactory" + "gcp" ] }, "authMethod": { From 34a902b5a958fa6634f50cd02840733e2bd0bde5 Mon Sep 17 00:00:00 2001 From: BackendBits Date: Wed, 11 Feb 2026 00:09:39 +0530 Subject: [PATCH 48/48] fix: Review Comment for Duplicated method and Missing Test --- python/envgene/envgenehelper/__init__.py | 3 +- python/envgene/envgenehelper/constants.py | 5 +++ python/envgene/envgenehelper/validation.py | 38 +++++++++++++++++++ scripts/build_env/main.py | 30 --------------- scripts/build_env/render_config_env.py | 24 ++---------- .../tests/app_reg_defs/test_appregdefs.py | 1 + .../env-01/Inventory/env_definition.yml | 14 +++++++ .../expected/regdefs/registry-v2.yml | 25 ++++++++++++ .../templates/regdefs/registry-v2.yaml.j2 | 25 ++++++++++++ 9 files changed, 113 insertions(+), 52 deletions(-) create mode 100644 test_data/test_app_reg_defs/TC-001-013/environments/cluster-01/env-01/Inventory/env_definition.yml create mode 100644 test_data/test_app_reg_defs/TC-001-013/expected/regdefs/registry-v2.yml create mode 100644 test_data/test_app_reg_defs/TC-001-013/templates/regdefs/registry-v2.yaml.j2 diff --git a/python/envgene/envgenehelper/__init__.py b/python/envgene/envgenehelper/__init__.py index 8adea78a9..2bd368025 100644 --- a/python/envgene/envgenehelper/__init__.py +++ b/python/envgene/envgenehelper/__init__.py @@ -10,4 +10,5 @@ from .sd_merge_helper import * from .yaml_validator import checkByWhiteList, checkByBlackList, checkSchemaValidationFailed, getSchemaValidationErrorMessage from .crypt import decrypt_file, encrypt_file, decrypt_all_cred_files_for_env, encrypt_all_cred_files_for_env, is_encrypted -from .constants import cleanup_targets \ No newline at end of file +from .constants import cleanup_targets, APPDEF_SCHEMA_FILE, REGDEF_V1_SCHEMA_FILE, REGDEF_V2_SCHEMA_FILE +from .validation import validate_appregdefs \ No newline at end of file diff --git a/python/envgene/envgenehelper/constants.py b/python/envgene/envgenehelper/constants.py index 136ab51f5..d91e5a4e9 100644 --- a/python/envgene/envgenehelper/constants.py +++ b/python/envgene/envgenehelper/constants.py @@ -9,3 +9,8 @@ "bg_domain.yml", "composite_structure.yml", ] + +# Schema file names for AppDef/RegDef validation +APPDEF_SCHEMA_FILE = "appdef.schema.json" +REGDEF_V1_SCHEMA_FILE = "regdef.schema.json" +REGDEF_V2_SCHEMA_FILE = "regdef-v2.schema.json" diff --git a/python/envgene/envgenehelper/validation.py b/python/envgene/envgenehelper/validation.py index a5a6aeec9..cf4277d5b 100644 --- a/python/envgene/envgenehelper/validation.py +++ b/python/envgene/envgenehelper/validation.py @@ -1,3 +1,11 @@ +import os +from pathlib import Path + +from .constants import APPDEF_SCHEMA_FILE, REGDEF_V1_SCHEMA_FILE, REGDEF_V2_SCHEMA_FILE +from .logger import logger +from .yaml_helper import findAllYamlsInDir, openYaml, validate_yaml_by_scheme_or_fail + + def ensure_required_keys(context: dict, required: list[str]): missing = [var for var in required if var not in context] if missing: @@ -19,3 +27,33 @@ def ensure_valid_fields(context: dict, fields: list[str]): f"Invalid or empty fields found: {', '.join(invalid)}. " f"Required fields: {', '.join(fields)}" ) + + +def validate_appregdefs(appdef_dir, regdef_dir, schemas_dir): + """Validate AppDef and RegDef files against their respective schemas. + + Args: + appdef_dir: Path to the AppDefs directory. + regdef_dir: Path to the RegDefs directory. + schemas_dir: Path to the directory containing schema JSON files. + """ + schemas_path = Path(schemas_dir) + + if os.path.exists(appdef_dir): + appdef_files = findAllYamlsInDir(appdef_dir) + if not appdef_files: + logger.warning(f"No AppDef YAMLs found in {appdef_dir}") + for file in appdef_files: + logger.info(f"AppDef file: {file}") + validate_yaml_by_scheme_or_fail(file, str(schemas_path / APPDEF_SCHEMA_FILE)) + + if os.path.exists(regdef_dir): + regdef_files = findAllYamlsInDir(regdef_dir) + if not regdef_files: + logger.warning(f"No RegDef YAMLs found in {regdef_dir}") + for file in regdef_files: + logger.info(f"Validating RegDef file: {file}") + regdef_content = openYaml(file) + version = str(regdef_content.get('version', '1.0')) + schema_file = REGDEF_V2_SCHEMA_FILE if version != '1.0' else REGDEF_V1_SCHEMA_FILE + validate_yaml_by_scheme_or_fail(file, str(schemas_path / schema_file)) diff --git a/scripts/build_env/main.py b/scripts/build_env/main.py index c02a65496..8205d9441 100644 --- a/scripts/build_env/main.py +++ b/scripts/build_env/main.py @@ -1,6 +1,3 @@ -import argparse -import os - from envgenehelper import * from envgenehelper.deployer import * @@ -18,9 +15,6 @@ CLOUD_SCHEMA = "schemas/cloud.schema.json" NAMESPACE_SCHEMA = "schemas/namespace.schema.json" ENV_SPECIFIC_RESOURCE_PROFILE_SCHEMA = "schemas/resource-profile.schema.json" -APPDEF_SCHEMA = "schemas/appdef.schema.json" -REGDEF_V1_SCHEMA = "schemas/regdef.schema.json" -REGDEF_V2_SCHEMA = "schemas/regdef-v2.schema.json" def prepare_folders_for_rendering(env_name, cluster_name, source_env_dir, templates_dir, render_dir, @@ -269,30 +263,6 @@ def validate_parameter_files(param_files): return errors -def validate_appregdefs(render_dir, env_name): - appdef_dir = f"{render_dir}/{env_name}/AppDefs" - regdef_dir = f"{render_dir}/{env_name}/RegDefs" - - if os.path.exists(appdef_dir): - appdef_files = findAllYamlsInDir(appdef_dir) - if not appdef_files: - logger.info(f"No AppDef YAMLs found in {appdef_dir}") - for file in appdef_files: - logger.info(f"AppDef file: {file}") - validate_yaml_by_scheme_or_fail(file, APPDEF_SCHEMA) - - if os.path.exists(regdef_dir): - regdef_files = findAllYamlsInDir(regdef_dir) - if not regdef_files: - logger.info(f"No RegDef YAMLs found in {regdef_dir}") - for file in regdef_files: - logger.info(f"Validating RegDef file: {file}") - regdef_content = openYaml(file) - version = str(regdef_content.get('version', '1.0')) - schema_path = REGDEF_V2_SCHEMA if version != '1.0' else REGDEF_V1_SCHEMA - validate_yaml_by_scheme_or_fail(file, schema_path) - - def render_environment(env_name, cluster_name, templates_dir, all_instances_dir, output_dir, work_dir): logger.info(f'env: {env_name}') logger.info(f'cluster_name: {cluster_name}') diff --git a/scripts/build_env/render_config_env.py b/scripts/build_env/render_config_env.py index 67c542bde..902e40ac5 100644 --- a/scripts/build_env/render_config_env.py +++ b/scripts/build_env/render_config_env.py @@ -6,7 +6,8 @@ from deepmerge import always_merger from envgenehelper import * from envgenehelper.business_helper import get_bgd_object, get_namespaces -from envgenehelper.validation import ensure_valid_fields, ensure_required_keys +from envgenehelper.validation import (ensure_valid_fields, ensure_required_keys, + validate_appregdefs as _validate_appregdefs) from jinja2 import Template, TemplateError from pydantic import BaseModel, Field @@ -472,28 +473,9 @@ def generate_profiles(self, profile_names: Iterable[str]): def validate_appregdefs(self): render_dir = self.ctx.current_env_dir - appdef_dir = f"{render_dir}/AppDefs" regdef_dir = f"{render_dir}/RegDefs" - - if os.path.exists(appdef_dir): - appdef_files = findAllYamlsInDir(appdef_dir) - if not appdef_files: - logger.warning(f"No AppDef YAMLs found in {appdef_dir}") - for file in appdef_files: - logger.info(f"AppDef file: {file}") - validate_yaml_by_scheme_or_fail(file, str(SCHEMAS_DIR / "appdef.schema.json")) - - if os.path.exists(regdef_dir): - regdef_files = findAllYamlsInDir(regdef_dir) - if not regdef_files: - logger.warning(f"No RegDef YAMLs found in {regdef_dir}") - for file in regdef_files: - logger.info(f"Validating RegDef file: {file}") - regdef_content = openYaml(file) - version = str(regdef_content.get('version', '1.0')) - schema_path = SCHEMAS_DIR / "regdef-v2.schema.json" if version != '1.0' else SCHEMAS_DIR / "regdef.schema.json" - validate_yaml_by_scheme_or_fail(file, str(schema_path)) + _validate_appregdefs(appdef_dir, regdef_dir, SCHEMAS_DIR) def process_app_reg_defs(self, env_name: str, extra_env: dict): logger.info( diff --git a/scripts/build_env/tests/app_reg_defs/test_appregdefs.py b/scripts/build_env/tests/app_reg_defs/test_appregdefs.py index 2d77b3cd9..724a79550 100644 --- a/scripts/build_env/tests/app_reg_defs/test_appregdefs.py +++ b/scripts/build_env/tests/app_reg_defs/test_appregdefs.py @@ -75,6 +75,7 @@ def _verify_rendered_files(self, test_number: str, render_dir: Path): "TC-001-005", "TC-001-006", "TC-001-008", + "TC-001-013", ] @pytest.mark.parametrize("test_number", POSITIVE_CASES) diff --git a/test_data/test_app_reg_defs/TC-001-013/environments/cluster-01/env-01/Inventory/env_definition.yml b/test_data/test_app_reg_defs/TC-001-013/environments/cluster-01/env-01/Inventory/env_definition.yml new file mode 100644 index 000000000..ca44da31c --- /dev/null +++ b/test_data/test_app_reg_defs/TC-001-013/environments/cluster-01/env-01/Inventory/env_definition.yml @@ -0,0 +1,14 @@ +inventory: + environmentName: env-01 + tenantName: tenant + cloudName: cluster_01_env_01 + cloudPassport: cluster-01 + description: V2 RegDef rendering test + owners: Qubership team +envTemplate: + name: composite-prod + artifact: deployment-configuration-env-templates:1.2.3 + sharedTemplateVariables: + - prod-template-variables +generatedVersions: + generateEnvironmentLatestVersion: "deployment-configuration-env-templates:composite-prod" diff --git a/test_data/test_app_reg_defs/TC-001-013/expected/regdefs/registry-v2.yml b/test_data/test_app_reg_defs/TC-001-013/expected/regdefs/registry-v2.yml new file mode 100644 index 000000000..53497307e --- /dev/null +++ b/test_data/test_app_reg_defs/TC-001-013/expected/regdefs/registry-v2.yml @@ -0,0 +1,25 @@ +version: "2.0" +name: "registry-v2" +authConfig: + main-auth: + credentialsId: "registry-cred" + authType: "longLived" + authMethod: "user_pass" +mavenConfig: + authConfig: "main-auth" + repositoryDomainName: "maven.qubership.org" + targetSnapshot: "snapshots" + targetStaging: "staging" + targetRelease: "releases" + releaseGroup: "releases" + snapshotGroup: "snapshots" +dockerConfig: + authConfig: "main-auth" + snapshotUri: "docker.qubership.org/snapshot" + stagingUri: "docker.qubership.org/staging" + releaseUri: "docker.qubership.org/release" + groupUri: "docker.qubership.org/group" + snapshotRepoName: "docker-snapshot" + stagingRepoName: "docker-staging" + releaseRepoName: "docker-release" + groupName: "docker-group" diff --git a/test_data/test_app_reg_defs/TC-001-013/templates/regdefs/registry-v2.yaml.j2 b/test_data/test_app_reg_defs/TC-001-013/templates/regdefs/registry-v2.yaml.j2 new file mode 100644 index 000000000..1bbd06ced --- /dev/null +++ b/test_data/test_app_reg_defs/TC-001-013/templates/regdefs/registry-v2.yaml.j2 @@ -0,0 +1,25 @@ +version: "2.0" +name: "registry-v2" +authConfig: + main-auth: + credentialsId: "registry-cred" + authType: "longLived" + authMethod: "user_pass" +mavenConfig: + authConfig: "main-auth" + repositoryDomainName: "{{ regdefs.overrides.maven.RepositoryDomainName | default('maven.qubership.org') }}" + targetSnapshot: "snapshots" + targetStaging: "staging" + targetRelease: "releases" + releaseGroup: "releases" + snapshotGroup: "snapshots" +dockerConfig: + authConfig: "main-auth" + snapshotUri: "{{ regdefs.overrides.docker.snapshotUri | default('docker.qubership.org/snapshot') }}" + stagingUri: "{{ regdefs.overrides.docker.stagingUri | default('docker.qubership.org/staging') }}" + releaseUri: "{{ regdefs.overrides.docker.releaseUri | default('docker.qubership.org/release') }}" + groupUri: "{{ regdefs.overrides.docker.groupUri | default('docker.qubership.org/group') }}" + snapshotRepoName: "docker-snapshot" + stagingRepoName: "docker-staging" + releaseRepoName: "docker-release" + groupName: "docker-group"