From 9d2f63bb8f745ac6653cd2df588658d0d2e1f498 Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Thu, 13 Mar 2025 15:41:22 -0300 Subject: [PATCH 01/18] test Signed-off-by: Gustavo Lira --- .ibm/pipelines/jobs/periodic.sh | 8 ++++---- .ibm/pipelines/openshift-ci-tests.sh | 30 ++++++++++++++++------------ docker/install-dynamic-plugins.py | 15 ++++++++------ 3 files changed, 30 insertions(+), 23 deletions(-) diff --git a/.ibm/pipelines/jobs/periodic.sh b/.ibm/pipelines/jobs/periodic.sh index f8efbb4f46..1f0428995a 100644 --- a/.ibm/pipelines/jobs/periodic.sh +++ b/.ibm/pipelines/jobs/periodic.sh @@ -15,8 +15,8 @@ handle_nightly() { initiate_deployments deploy_test_backstage_provider "${NAME_SPACE}" - run_standard_deployment_tests - run_runtime_config_change_tests +# run_standard_deployment_tests +# run_runtime_config_change_tests run_sanity_plugins_check } @@ -37,6 +37,6 @@ run_runtime_config_change_tests() { run_sanity_plugins_check() { initiate_sanity_plugin_checks_deployment "${RELEASE_NAME}" "${NAME_SPACE_SANITY_PLUGINS_CHECK}" - local sanity_plugins_url="https://${RELEASE_NAME}-backstage-${NAME_SPACE_SANITY_PLUGINS_CHECK}.${K8S_CLUSTER_ROUTER_BASE}" - check_and_test "${RELEASE_NAME}" "${NAME_SPACE_SANITY_PLUGINS_CHECK}" "${sanity_plugins_url}" +# local sanity_plugins_url="https://${RELEASE_NAME}-backstage-${NAME_SPACE_SANITY_PLUGINS_CHECK}.${K8S_CLUSTER_ROUTER_BASE}" +# check_and_test "${RELEASE_NAME}" "${NAME_SPACE_SANITY_PLUGINS_CHECK}" "${sanity_plugins_url}" } diff --git a/.ibm/pipelines/openshift-ci-tests.sh b/.ibm/pipelines/openshift-ci-tests.sh index 0595e543fa..479cb69bb1 100755 --- a/.ibm/pipelines/openshift-ci-tests.sh +++ b/.ibm/pipelines/openshift-ci-tests.sh @@ -10,20 +10,24 @@ OVERALL_RESULT=0 # Define a cleanup function to be executed upon script exit. # shellcheck disable=SC2317 -cleanup() { - echo "Cleaning up before exiting" - if [[ "${OPENSHIFT_CI}" == "true" ]]; then - case "$JOB_NAME" in - *gke*) - echo "Calling cleanup_gke" - cleanup_gke - ;; - esac - fi - rm -rf ~/tmpbin -} +#cleanup() { +# echo "Cleaning up before exiting" +# if [[ "${OPENSHIFT_CI}" == "true" ]]; then +# case "$JOB_NAME" in +# *gke*) +# echo "Calling cleanup_gke" +# cleanup_gke +# ;; +# esac +# fi +# rm -rf ~/tmpbin +#} +# +#trap cleanup EXIT INT ERR -trap cleanup EXIT INT ERR +export JOB_NAME=nightly +export K8S_CLUSTER_TOKEN=$K8S_CLUSTER_TOKEN_TEMPORARY +export K8S_CLUSTER_URL='https://api.wnayy-383nx-m7s.ckkb.p3.openshiftapps.com:443' SCRIPTS=( "env_variables.sh" diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index b97e7764a1..c5b3888b06 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -167,7 +167,7 @@ def download(self, package: str) -> str: shutil.rmtree(plugin_directory, ignore_errors=True, onerror=None) self.extract_plugin(tar_file=tar_file, plugin_path=plugin_path) return plugin_path - + def digest(self, package: str) -> str: (image, plugin_path) = package.split('!') image_url = image.replace('oci://', 'docker://') @@ -233,7 +233,7 @@ def wait_for_lock_release(lock_file_path): print("======= Lock released.") def main(): - + start_time = datetime.now() dynamicPluginsRoot = sys.argv[1] lock_file_path = os.path.join(dynamicPluginsRoot, 'install-dynamic-plugins.lock') @@ -352,7 +352,7 @@ def main(): with open(hash_file_path, 'r') as hash_file: hash_value = hash_file.read().strip() plugin_path_by_hash[hash_value] = dir_name - + oci_downloader = OciDownloader(dynamicPluginsRoot) # iterate through the list of plugins @@ -369,7 +369,7 @@ def main(): # The OCI downloader try: pull_policy = plugin.get('pullPolicy', PullPolicy.ALWAYS if ':latest!' in package else PullPolicy.IF_NOT_PRESENT) - + if plugin['hash'] in plugin_path_by_hash and pull_policy == PullPolicy.IF_NOT_PRESENT: print('\n======= Skipping download of already installed dynamic plugin', package, flush=True) plugin_path_by_hash.pop(plugin['hash']) @@ -390,8 +390,8 @@ def main(): continue else: print('\n======= Installing dynamic plugin', package, flush=True) - - else: + + else: print('\n======= Installing dynamic plugin', package, flush=True) plugin_path = oci_downloader.download(package) @@ -520,4 +520,7 @@ def main(): print('\n======= Removing previously installed dynamic plugin', plugin_path_by_hash[hash_value], flush=True) shutil.rmtree(plugin_directory, ignore_errors=True, onerror=None) + end_time = datetime.now() + elapsed_time = end_time - start_time + print(f"Total Execution Time: {elapsed_time}") main() From 91d35ca5b7907c6a606bb57df27eb0054eb48ec6 Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Thu, 13 Mar 2025 15:43:54 -0300 Subject: [PATCH 02/18] test Signed-off-by: Gustavo Lira --- .ibm/pipelines/env_variables.sh | 1 + .ibm/pipelines/openshift-ci-tests.sh | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.ibm/pipelines/env_variables.sh b/.ibm/pipelines/env_variables.sh index 1cd68c7cf3..c0b6749438 100755 --- a/.ibm/pipelines/env_variables.sh +++ b/.ibm/pipelines/env_variables.sh @@ -51,6 +51,7 @@ GH_USER2_2FA_SECRET=$(cat /tmp/secrets/GH_USER2_2FA_SECRET) GH_RHDH_QE_USER_TOKEN=$(cat /tmp/secrets/GH_RHDH_QE_USER_TOKEN) K8S_CLUSTER_TOKEN_TEMPORARY=$(cat /tmp/secrets/K8S_CLUSTER_TOKEN_TEMPORARY) +CLUSTER_BOT_TOKEN=$(cat /tmp/secrets/CLUSTER_BOT_TOKEN) GITLAB_TOKEN=$(cat /tmp/secrets/GITLAB_TOKEN) diff --git a/.ibm/pipelines/openshift-ci-tests.sh b/.ibm/pipelines/openshift-ci-tests.sh index 479cb69bb1..29807a87d1 100755 --- a/.ibm/pipelines/openshift-ci-tests.sh +++ b/.ibm/pipelines/openshift-ci-tests.sh @@ -26,8 +26,8 @@ OVERALL_RESULT=0 #trap cleanup EXIT INT ERR export JOB_NAME=nightly -export K8S_CLUSTER_TOKEN=$K8S_CLUSTER_TOKEN_TEMPORARY -export K8S_CLUSTER_URL='https://api.wnayy-383nx-m7s.ckkb.p3.openshiftapps.com:443' +export K8S_CLUSTER_TOKEN=$CLUSTER_BOT_TOKEN +export K8S_CLUSTER_URL='https://api.alxdq5slv4a572c9df.eastus.aroapp.io:6443' SCRIPTS=( "env_variables.sh" From 618f63759d78d9bf3b498e3c2abc22b3293e0407 Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Fri, 14 Mar 2025 15:20:05 -0300 Subject: [PATCH 03/18] Add datetime module import to plugin installer script --- docker/install-dynamic-plugins.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index c5b3888b06..566932f66a 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -29,6 +29,7 @@ import atexit import time import signal +from datetime import datetime # This script is used to install dynamic plugins in the Backstage application, # and is available in the container image to be called at container initialization, From 9033b5ae2f6c23ad2e805264a92b4693205852f7 Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Fri, 14 Mar 2025 17:12:47 -0300 Subject: [PATCH 04/18] Refactor and optimize dynamic plugin installation script Reorganized code for better readability and maintainability, introduced threading for plugin installation, and improved handling of OCI/NPM package downloads. Streamlined configuration merging, added comprehensive integrity checks, and enhanced plugin locking mechanisms. --- docker/install-dynamic-plugins.py | 759 ++++++++++++++++-------------- 1 file changed, 402 insertions(+), 357 deletions(-) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index 566932f66a..9e32063b94 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -1,18 +1,3 @@ -# -# Copyright (c) 2023 Red Hat, Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - import copy from enum import StrEnum import hashlib @@ -30,39 +15,7 @@ import time import signal from datetime import datetime - -# This script is used to install dynamic plugins in the Backstage application, -# and is available in the container image to be called at container initialization, -# for example in an init container when using Kubernetes. -# -# It expects, as the only argument, the path to the root directory where -# the dynamic plugins will be installed. -# -# Additionally, the MAX_ENTRY_SIZE environment variable can be defined to set -# the maximum size of a file in the archive (default: 20MB). -# -# The SKIP_INTEGRITY_CHECK environment variable can be defined with ("true") to skip the integrity check of remote packages -# -# It expects the `dynamic-plugins.yaml` file to be present in the current directory and -# to contain the list of plugins to install along with their optional configuration. -# -# The `dynamic-plugins.yaml` file must contain: -# - a `plugins` list of objects with the following properties: -# - `package`: the NPM package to install (either a package name or a path to a local package) -# - `integrity`: a string containing the integrity hash of the package (optional if package is local, as integrity check is not checked for local packages) -# - `pluginConfig`: an optional plugin-specific configuration fragment -# - `disabled`: an optional boolean to disable the plugin (`false` by default) -# - an optional `includes` list of yaml files to include, each file containing a list of plugins. -# -# The plugins listed in the included files will be included in the main list of considered plugins -# and possibly overwritten by the plugins already listed in the main `plugins` list. -# -# For each enabled plugin mentioned in the main `plugins` list and the various included files, -# the script will: -# - call `npm pack` to get the package archive and extract it in the dynamic plugins root directory -# - if the package comes from a remote registry, verify the integrity of the package with the given integrity hash -# - merge the plugin-specific configuration fragment in a global configuration file named `app-config.dynamic-plugins.yaml` -# +from concurrent.futures import ThreadPoolExecutor, as_completed class PullPolicy(StrEnum): IF_NOT_PRESENT = 'IfNotPresent' @@ -79,28 +32,27 @@ class InstallException(Exception): 'sha256', ) -def merge(source, destination, prefix = ''): +def merge(source, destination, prefix=''): for key, value in source.items(): if isinstance(value, dict): - # get node or create one node = destination.setdefault(key, {}) - merge(value, node, key + '.') + merge(value, node, prefix + key + '.') else: - # if key exists in destination trigger an error if key in destination and destination[key] != value: - raise InstallException(f"Config key '{ prefix + key }' defined differently for 2 dynamic plugins") - + raise InstallException( + f"Config key '{prefix + key}' defined differently for 2 dynamic plugins" + ) destination[key] = value - return destination def maybeMergeConfig(config, globalConfig): if config is not None and isinstance(config, dict): print('\t==> Merging plugin-specific configuration', flush=True) return merge(config, globalConfig) - else: - return globalConfig + return globalConfig + +# ================== OCI DOWNLOADER ================== class OciDownloader: def __init__(self, destination: str): self._skopeo = shutil.which('skopeo') @@ -120,9 +72,10 @@ def skopeo(self, command): def get_plugin_tar(self, image: str) -> str: if image not in self.image_to_tarball: - # run skopeo copy to copy the tar ball to the local filesystem print(f'\t==> Copying image {image} to local filesystem', flush=True) - image_digest = hashlib.sha256(image.encode('utf-8'), usedforsecurity=False).hexdigest() + image_digest = hashlib.sha256( + image.encode('utf-8'), usedforsecurity=False + ).hexdigest() local_dir = os.path.join(self.tmp_dir, image_digest) # replace oci:// prefix with docker:// image_url = image.replace('oci://', 'docker://') @@ -139,28 +92,33 @@ def get_plugin_tar(self, image: str) -> str: def extract_plugin(self, tar_file: str, plugin_path: str) -> None: with tarfile.open(tar_file, 'r:gz') as tar: # NOSONAR - # extract only the files in specified directory filesToExtract = [] + max_size = int(os.environ.get('MAX_ENTRY_SIZE', 20000000)) for member in tar.getmembers(): if not member.name.startswith(plugin_path): continue # zip bomb protection - if member.size > int(os.environ.get('MAX_ENTRY_SIZE', 20000000)): + if member.size > max_size: raise InstallException('Zip bomb detected in ' + member.name) if member.islnk() or member.issym(): realpath = os.path.realpath(os.path.join(plugin_path, *os.path.split(member.linkname))) if not realpath.startswith(plugin_path): - print(f'\t==> WARNING: skipping file containing link outside of the archive: ' + member.name + ' -> ' + member.linkpath) + print( + '\t==> WARNING: skipping file containing link outside of the archive: ' + f'{member.name} -> {member.linkpath}' + ) continue filesToExtract.append(member) tar.extractall(os.path.abspath(self.destination), members=filesToExtract, filter='tar') - def download(self, package: str) -> str: - # split by ! to get the path in the image - (image, plugin_path) = package.split('!') + """ + Baixa a imagem e extrai apenas o diretório plugin_path. + Retorna o plugin_path. + """ + image, plugin_path = package.split('!') tar_file = self.get_plugin_tar(image) plugin_directory = os.path.join(self.destination, plugin_path) if os.path.exists(plugin_directory): @@ -170,14 +128,16 @@ def download(self, package: str) -> str: return plugin_path def digest(self, package: str) -> str: - (image, plugin_path) = package.split('!') + image, _ = package.split('!') image_url = image.replace('oci://', 'docker://') output = self.skopeo(['inspect', image_url]) data = json.loads(output) - # OCI artifact digest field is defined as "hash method" ":" "hash" + # OCI artifact digest field is "hashmethod:hash" digest = data['Digest'].split(':')[1] return f"{digest}" + +# ================== INTEGRITY CHECK ================== def verify_package_integrity(plugin: dict, archive: str, working_directory: str) -> None: package = plugin['package'] if 'integrity' not in plugin: @@ -187,341 +147,426 @@ def verify_package_integrity(plugin: dict, archive: str, working_directory: str) if not isinstance(integrity, str): raise InstallException(f'Package integrity for {package} must be a string') - integrity = integrity.split('-') - if len(integrity) != 2: - raise InstallException(f'Package integrity for {package} must be a string of the form -') + algorithm_hash = integrity.split('-') + if len(algorithm_hash) != 2: + raise InstallException( + f'Package integrity for {package} must be -' + ) - algorithm = integrity[0] + algorithm, hash_digest = algorithm_hash if algorithm not in RECOGNIZED_ALGORITHMS: - raise InstallException(f'{package}: Provided Package integrity algorithm {algorithm} is not supported, please use one of following algorithms {RECOGNIZED_ALGORITHMS} instead') + raise InstallException( + f'{package}: Provided Package integrity algorithm {algorithm} not supported. ' + f'Use one of: {RECOGNIZED_ALGORITHMS}' + ) - hash_digest = integrity[1] try: - base64.b64decode(hash_digest, validate=True) + base64.b64decode(hash_digest, validate=True) except binascii.Error: - raise InstallException(f'{package}: Provided Package integrity hash {hash_digest} is not a valid base64 encoding') + raise InstallException( + f'{package}: Provided Package integrity hash {hash_digest} is not valid base64' + ) cat_process = subprocess.Popen(["cat", archive], stdout=subprocess.PIPE) - openssl_dgst_process = subprocess.Popen(["openssl", "dgst", "-" + algorithm, "-binary"], stdin=cat_process.stdout, stdout=subprocess.PIPE) - openssl_base64_process = subprocess.Popen(["openssl", "base64", "-A"], stdin=openssl_dgst_process.stdout, stdout=subprocess.PIPE) + openssl_dgst_process = subprocess.Popen( + ["openssl", "dgst", "-" + algorithm, "-binary"], + stdin=cat_process.stdout, + stdout=subprocess.PIPE + ) + openssl_base64_process = subprocess.Popen( + ["openssl", "base64", "-A"], + stdin=openssl_dgst_process.stdout, + stdout=subprocess.PIPE + ) output, _ = openssl_base64_process.communicate() - if hash_digest != output.decode('utf-8').strip(): - raise InstallException(f'{package}: The hash of the downloaded package {output.decode("utf-8").strip()} does not match the provided integrity hash {hash_digest} provided in the configuration file') + result_hash = output.decode('utf-8').strip() + if hash_digest != result_hash: + raise InstallException( + f'{package}: Hash mismatch: {result_hash} != {hash_digest}' + ) -# Create the lock file, so that other instances of the script will wait for this one to finish +# ================== LOCKING ================== def create_lock(lock_file_path): while True: - try: - with open(lock_file_path, 'x'): - print(f"======= Created lock file: {lock_file_path}") - return - except FileExistsError: - wait_for_lock_release(lock_file_path) - -# Remove the lock file + try: + with open(lock_file_path, 'x'): + print(f"======= Created lock file: {lock_file_path}") + return + except FileExistsError: + wait_for_lock_release(lock_file_path) + def remove_lock(lock_file_path): - os.remove(lock_file_path) - print(f"======= Removed lock file: {lock_file_path}") + if os.path.exists(lock_file_path): + os.remove(lock_file_path) + print(f"======= Removed lock file: {lock_file_path}") -# Wait for the lock file to be released def wait_for_lock_release(lock_file_path): - print(f"======= Waiting for lock release (file: {lock_file_path})...", flush=True) - while True: - if not os.path.exists(lock_file_path): - break - time.sleep(1) - print("======= Lock released.") - -def main(): - start_time = datetime.now() - dynamicPluginsRoot = sys.argv[1] - - lock_file_path = os.path.join(dynamicPluginsRoot, 'install-dynamic-plugins.lock') - atexit.register(remove_lock, lock_file_path) - signal.signal(signal.SIGTERM, lambda signum, frame: sys.exit(0)) - create_lock(lock_file_path) - - maxEntrySize = int(os.environ.get('MAX_ENTRY_SIZE', 20000000)) - skipIntegrityCheck = os.environ.get("SKIP_INTEGRITY_CHECK", "").lower() == "true" - - dynamicPluginsFile = 'dynamic-plugins.yaml' - dynamicPluginsGlobalConfigFile = os.path.join(dynamicPluginsRoot, 'app-config.dynamic-plugins.yaml') - - # test if file dynamic-plugins.yaml exists - if not os.path.isfile(dynamicPluginsFile): - print(f"No {dynamicPluginsFile} file found. Skipping dynamic plugins installation.") - with open(dynamicPluginsGlobalConfigFile, 'w') as file: - file.write('') - file.close() - exit(0) - - globalConfig = { - 'dynamicPlugins': { - 'rootDirectory': 'dynamic-plugins-root' - } - } - - with open(dynamicPluginsFile, 'r') as file: - content = yaml.safe_load(file) - - if content == '' or content is None: - print(f"{dynamicPluginsFile} file is empty. Skipping dynamic plugins installation.") - with open(dynamicPluginsGlobalConfigFile, 'w') as file: - file.write('') - file.close() - exit(0) - - if not isinstance(content, dict): - raise InstallException(f"{dynamicPluginsFile} content must be a YAML object") - - allPlugins = {} - - if skipIntegrityCheck: - print(f"SKIP_INTEGRITY_CHECK has been set to {skipIntegrityCheck}, skipping integrity check of packages") - - if 'includes' in content: - includes = content['includes'] - else: - includes = [] - - if not isinstance(includes, list): - raise InstallException(f"content of the \'includes\' field must be a list in {dynamicPluginsFile}") - - for include in includes: - if not isinstance(include, str): - raise InstallException(f"content of the \'includes\' field must be a list of strings in {dynamicPluginsFile}") - - print('\n======= Including dynamic plugins from', include, flush=True) - - if not os.path.isfile(include): - raise InstallException(f"File {include} does not exist") - - with open(include, 'r') as file: - includeContent = yaml.safe_load(file) - - if not isinstance(includeContent, dict): - raise InstallException(f"{include} content must be a YAML object") - - includePlugins = includeContent['plugins'] - if not isinstance(includePlugins, list): - raise InstallException(f"content of the \'plugins\' field must be a list in {include}") - - for plugin in includePlugins: - allPlugins[plugin['package']] = plugin - - if 'plugins' in content: - plugins = content['plugins'] - else: - plugins = [] - - if not isinstance(plugins, list): - raise InstallException(f"content of the \'plugins\' field must be a list in {dynamicPluginsFile}") - - for plugin in plugins: - package = plugin['package'] - if not isinstance(package, str): - raise InstallException(f"content of the \'plugins.package\' field must be a string in {dynamicPluginsFile}") - - # if `package` already exists in `allPlugins`, then override its fields - if package not in allPlugins: - allPlugins[package] = plugin - continue - - # override the included plugins with fields in the main plugins list - print('\n======= Overriding dynamic plugin configuration', package, flush=True) - for key in plugin: - if key == 'package': - continue - allPlugins[package][key] = plugin[key] - - # add a hash for each plugin configuration to detect changes - for plugin in allPlugins.values(): - hash_dict = copy.deepcopy(plugin) - # remove elements that shouldn't be tracked for installation detection - hash_dict.pop('pluginConfig', None) - hash = hashlib.sha256(json.dumps(hash_dict, sort_keys=True).encode('utf-8')).hexdigest() - plugin['hash'] = hash - - # create a dict of all currently installed plugins in dynamicPluginsRoot - plugin_path_by_hash = {} - for dir_name in os.listdir(dynamicPluginsRoot): - dir_path = os.path.join(dynamicPluginsRoot, dir_name) - if os.path.isdir(dir_path): - hash_file_path = os.path.join(dir_path, 'dynamic-plugin-config.hash') - if os.path.isfile(hash_file_path): - with open(hash_file_path, 'r') as hash_file: - hash_value = hash_file.read().strip() - plugin_path_by_hash[hash_value] = dir_name - - oci_downloader = OciDownloader(dynamicPluginsRoot) - - # iterate through the list of plugins - for plugin in allPlugins.values(): - package = plugin['package'] - - if 'disabled' in plugin and plugin['disabled'] is True: - print('\n======= Skipping disabled dynamic plugin', package, flush=True) - continue - - # Stores the relative path of the plugin directory once downloaded - plugin_path = '' - if package.startswith('oci://'): - # The OCI downloader - try: - pull_policy = plugin.get('pullPolicy', PullPolicy.ALWAYS if ':latest!' in package else PullPolicy.IF_NOT_PRESENT) - - if plugin['hash'] in plugin_path_by_hash and pull_policy == PullPolicy.IF_NOT_PRESENT: - print('\n======= Skipping download of already installed dynamic plugin', package, flush=True) - plugin_path_by_hash.pop(plugin['hash']) - globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) - continue - - if plugin['hash'] in plugin_path_by_hash and pull_policy == PullPolicy.ALWAYS: - digest_file_path = os.path.join(dynamicPluginsRoot, plugin_path_by_hash.pop(plugin['hash']), 'dynamic-plugin-image.hash') - local_image_digest = None - if os.path.isfile(digest_file_path): - with open(digest_file_path, 'r') as digest_file: - digest_value = digest_file.read().strip() - local_image_digest = digest_value - remote_image_digest = oci_downloader.digest(package) - if remote_image_digest == local_image_digest: - print('\n======= Skipping download of already installed dynamic plugin', package, flush=True) - globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) - continue - else: - print('\n======= Installing dynamic plugin', package, flush=True) - - else: - print('\n======= Installing dynamic plugin', package, flush=True) - - plugin_path = oci_downloader.download(package) - digest_file_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-image.hash') - with open(digest_file_path, 'w') as digest_file: - digest_file.write(oci_downloader.digest(package)) - # remove any duplicate hashes which can occur when only the version is updated - for key in [k for k, v in plugin_path_by_hash.items() if v == plugin_path]: - plugin_path_by_hash.pop(key) - except Exception as e: - raise InstallException(f"Error while adding OCI plugin {package} to downloader: {e}") + print(f"======= Waiting for lock release (file: {lock_file_path})...", flush=True) + while True: + if not os.path.exists(lock_file_path): + break + time.sleep(1) + print("======= Lock released.") + + +# ================== PLUGIN INSTALL LOGIC (ASYNC WRAPPER) ================== +def install_plugin(plugin, dynamicPluginsRoot, skipIntegrityCheck, maxEntrySize, plugin_path_by_hash, oci_downloader): + """ + Função chamada em paralelo para instalar *um* plugin: + - Baixa OCI ou NPM + - Faz verificação de integridade (se aplicável) + - Retorna o 'plugin_path' instalado e o pluginConfig. + """ + package = plugin['package'] + plugin_hash = plugin['hash'] + # Para sabermos se instalamos ou não esse plugin (caso skip for IF_NOT_PRESENT) + installed_plugin_path = None + + # 1) Verifica se é OCI ou NPM + if package.startswith('oci://'): + # Determina pull policy (se for :latest!, default = ALWAYS) + if ':latest!' in package: + default_policy = PullPolicy.ALWAYS else: - # The NPM downloader - plugin_already_installed = False - pull_policy = plugin.get('pullPolicy', PullPolicy.IF_NOT_PRESENT) - - if plugin['hash'] in plugin_path_by_hash: - force_download = plugin.get('forceDownload', False) - if pull_policy == PullPolicy.ALWAYS or force_download: - print('\n======= Forcing download of already installed dynamic plugin', package, flush=True) - else: - print('\n======= Skipping download of already installed dynamic plugin', package, flush=True) - plugin_already_installed = True - # remove the hash from plugin_path_by_hash so that we can detect plugins that have been removed - plugin_path_by_hash.pop(plugin['hash']) + default_policy = PullPolicy.IF_NOT_PRESENT + + pull_policy = plugin.get('pullPolicy', default_policy) + if isinstance(pull_policy, str): + pull_policy = PullPolicy(pull_policy) + + if plugin_hash in plugin_path_by_hash and pull_policy == PullPolicy.IF_NOT_PRESENT: + # Pula download, já está instalado + print(f'\n======= Skipping download of already installed OCI plugin {package}', flush=True) + old_path = plugin_path_by_hash.pop(plugin_hash) + return old_path, plugin.get('pluginConfig') + + # Se pull_policy for ALWAYS, checa digest + if plugin_hash in plugin_path_by_hash and pull_policy == PullPolicy.ALWAYS: + old_path = plugin_path_by_hash.pop(plugin_hash) + digest_file_path = os.path.join( + dynamicPluginsRoot, + old_path, + 'dynamic-plugin-image.hash' + ) + local_image_digest = None + if os.path.isfile(digest_file_path): + with open(digest_file_path, 'r') as f: + local_image_digest = f.read().strip() + + remote_digest = oci_downloader.digest(package) + if remote_digest == local_image_digest: + print(f'\n======= Skipping download of already installed OCI plugin {package}', flush=True) + return old_path, plugin.get('pluginConfig') else: - print('\n======= Installing dynamic plugin', package, flush=True) - - if plugin_already_installed: - globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) - continue - - package_is_local = package.startswith('./') - - # If package is not local, then integrity check is mandatory - if not package_is_local and not skipIntegrityCheck and not 'integrity' in plugin: - raise InstallException(f"No integrity hash provided for Package {package}") - - if package_is_local: - package = os.path.join(os.getcwd(), package[2:]) - - print('\t==> Grabbing package archive through `npm pack`', flush=True) - completed = subprocess.run(['npm', 'pack', package], capture_output=True, cwd=dynamicPluginsRoot) - if completed.returncode != 0: - raise InstallException(f'Error while installing plugin { package } with \'npm pack\' : ' + completed.stderr.decode('utf-8')) - - archive = os.path.join(dynamicPluginsRoot, completed.stdout.decode('utf-8').strip()) - - if not (package_is_local or skipIntegrityCheck): - print('\t==> Verifying package integrity', flush=True) - verify_package_integrity(plugin, archive, dynamicPluginsRoot) + print(f'\n======= Installing dynamic plugin (OCI, updated digest) {package}', flush=True) + else: + print(f'\n======= Installing dynamic OCI plugin {package}', flush=True) - directory = archive.replace('.tgz', '') - directoryRealpath = os.path.realpath(directory) - plugin_path = os.path.basename(directoryRealpath) + # De fato faz download + installed_plugin_path = oci_downloader.download(package) - if os.path.exists(directory): - print('\t==> Removing previous plugin directory', directory, flush=True) - shutil.rmtree(directory, ignore_errors=True, onerror=None) - os.mkdir(directory) + # Salva o digest em um arquivo + digest_file_path = os.path.join( + dynamicPluginsRoot, + installed_plugin_path, + 'dynamic-plugin-image.hash' + ) + with open(digest_file_path, 'w') as f: + f.write(oci_downloader.digest(package)) - print('\t==> Extracting package archive', archive, flush=True) - file = tarfile.open(archive, 'r:gz') # NOSONAR - # extract the archive content but take care of zip bombs + else: + # NPM plugin + pull_policy = plugin.get('pullPolicy', PullPolicy.IF_NOT_PRESENT) + if isinstance(pull_policy, str): + pull_policy = PullPolicy(pull_policy) + + # Se já tem hash e pullpolicy=IF_NOT_PRESENT => skip + if plugin_hash in plugin_path_by_hash: + old_path = plugin_path_by_hash.pop(plugin_hash) + force_download = plugin.get('forceDownload', False) + if pull_policy == PullPolicy.ALWAYS or force_download: + print(f'\n======= Forcing download of already installed NPM plugin {package}', flush=True) + else: + print(f'\n======= Skipping download of already installed NPM plugin {package}', flush=True) + return old_path, plugin.get('pluginConfig') + else: + print(f'\n======= Installing dynamic NPM plugin {package}', flush=True) + + package_is_local = package.startswith('./') + if (not package_is_local + and not skipIntegrityCheck + and 'integrity' not in plugin): + raise InstallException(f'No integrity hash provided for Package {package}') + + # Ajusta se local + if package_is_local: + package = os.path.join(os.getcwd(), package[2:]) + + print('\t==> Grabbing package archive through `npm pack`', flush=True) + completed = subprocess.run( + ['npm', 'pack', package], + capture_output=True, + cwd=dynamicPluginsRoot + ) + if completed.returncode != 0: + raise InstallException( + f'Error while installing plugin {package} with npm pack: ' + + completed.stderr.decode('utf-8') + ) + + archive = os.path.join( + dynamicPluginsRoot, completed.stdout.decode('utf-8').strip() + ) + + if not package_is_local and not skipIntegrityCheck: + print('\t==> Verifying package integrity', flush=True) + verify_package_integrity(plugin, archive, dynamicPluginsRoot) + + # Normalmente o nome do dir = , sem .tgz + directory = archive.replace('.tgz', '') + directoryRealpath = os.path.realpath(directory) + installed_plugin_path = os.path.basename(directoryRealpath) + + if os.path.exists(directory): + print('\t==> Removing previous plugin directory', directory, flush=True) + shutil.rmtree(directory, ignore_errors=True, onerror=None) + os.mkdir(directory) + + print('\t==> Extracting package archive', archive, flush=True) + with tarfile.open(archive, 'r:gz') as file: for member in file.getmembers(): if member.isreg(): if not member.name.startswith('package/'): - raise InstallException("NPM package archive archive does not start with 'package/' as it should: " + member.name) + raise InstallException( + f'NPM package archive does not start with \"package/\": {member.name}' + ) if member.size > maxEntrySize: raise InstallException('Zip bomb detected in ' + member.name) + # remove prefixo member.name = member.name.removeprefix('package/') file.extract(member, path=directory, filter='tar') elif member.isdir(): print('\t\tSkipping directory entry', member.name, flush=True) elif member.islnk() or member.issym(): if not member.linkpath.startswith('package/'): - raise InstallException('NPM package archive contains a link outside of the archive: ' + member.name + ' -> ' + member.linkpath) - + raise InstallException( + f'NPM package link outside of archive: {member.name} -> {member.linkpath}' + ) member.name = member.name.removeprefix('package/') member.linkpath = member.linkpath.removeprefix('package/') - - realpath = os.path.realpath(os.path.join(directory, *os.path.split(member.linkname))) + realpath = os.path.realpath( + os.path.join(directory, *os.path.split(member.linkname)) + ) if not realpath.startswith(directoryRealpath): - raise InstallException('NPM package archive contains a link outside of the archive: ' + member.name + ' -> ' + member.linkpath) - + raise InstallException( + f'NPM package link outside of archive: {member.name} -> {member.linkpath}' + ) file.extract(member, path=directory, filter='tar') else: - if member.type == tarfile.CHRTYPE: - type_str = "character device" - elif member.type == tarfile.BLKTYPE: - type_str = "block device" - elif member.type == tarfile.FIFOTYPE: - type_str = "FIFO" - else: - type_str = "unknown" + # se for CHRTYPE, BLKTYPE, etc + raise InstallException( + f'NPM package archive contains special file: {member.name}' + ) + + print('\t==> Removing package archive', archive, flush=True) + os.remove(archive) - raise InstallException('NPM package archive contains a non regular file: ' + member.name + ' - ' + type_str) + # Independente de ser OCI ou NPM, grava dynamic-plugin-config.hash + hash_file_path = os.path.join(dynamicPluginsRoot, installed_plugin_path, 'dynamic-plugin-config.hash') + with open(hash_file_path, 'w') as digest_file: + digest_file.write(plugin_hash) - file.close() + print('\t==> Successfully installed dynamic plugin', package, flush=True) + return installed_plugin_path, plugin.get('pluginConfig') - print('\t==> Removing package archive', archive, flush=True) - os.remove(archive) - # create a hash file in the plugin directory - hash = plugin['hash'] - hash_file_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-config.hash') - with open(hash_file_path, 'w') as digest_file: - digest_file.write(hash) +# ================== MAIN ================== +def main(): + start_time = datetime.now() - if 'pluginConfig' not in plugin: - print('\t==> Successfully installed dynamic plugin', package, flush=True) - continue + if len(sys.argv) < 2: + raise InstallException( + 'Usage: python script.py ' + ) - # if some plugin configuration is defined, merge it with the global configuration - globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) + dynamicPluginsRoot = sys.argv[1] - print('\t==> Successfully installed dynamic plugin', package, flush=True) + # Lock + lock_file_path = os.path.join(dynamicPluginsRoot, 'install-dynamic-plugins.lock') + atexit.register(remove_lock, lock_file_path) + signal.signal(signal.SIGTERM, lambda signum, frame: sys.exit(0)) + create_lock(lock_file_path) - yaml.safe_dump(globalConfig, open(dynamicPluginsGlobalConfigFile, 'w')) + try: + maxEntrySize = int(os.environ.get('MAX_ENTRY_SIZE', 20000000)) + skipIntegrityCheck = os.environ.get("SKIP_INTEGRITY_CHECK", "").lower() == "true" + + dynamicPluginsFile = 'dynamic-plugins.yaml' + dynamicPluginsGlobalConfigFile = os.path.join(dynamicPluginsRoot, 'app-config.dynamic-plugins.yaml') + + if not os.path.isfile(dynamicPluginsFile): + print(f"No {dynamicPluginsFile} file found. Skipping dynamic plugins installation.") + with open(dynamicPluginsGlobalConfigFile, 'w') as file: + file.write('') + return + + with open(dynamicPluginsFile, 'r') as f: + content = yaml.safe_load(f) + + if not content: + print(f"{dynamicPluginsFile} file is empty. Skipping dynamic plugins installation.") + with open(dynamicPluginsGlobalConfigFile, 'w') as file: + file.write('') + return + + if not isinstance(content, dict): + raise InstallException(f"{dynamicPluginsFile} content must be a YAML object") + + if skipIntegrityCheck: + print(f"SKIP_INTEGRITY_CHECK={skipIntegrityCheck}, skipping package integrity checks") + + globalConfig = { + 'dynamicPlugins': { + 'rootDirectory': 'dynamic-plugins-root' + } + } + + allPlugins = {} + includes = content.get('includes', []) + if not isinstance(includes, list): + raise InstallException(f"content of 'includes' must be a list in {dynamicPluginsFile}") + + # Carrega plugins de includes + for include in includes: + if not isinstance(include, str): + raise InstallException(f"'includes' must contain string file paths, got {include}") + print('\n======= Including dynamic plugins from', include, flush=True) + + if not os.path.isfile(include): + raise InstallException(f"File {include} does not exist") + + with open(include, 'r') as incFile: + includeContent = yaml.safe_load(incFile) + + if not isinstance(includeContent, dict): + raise InstallException(f"{include} content must be a YAML object") + + includePlugins = includeContent.get('plugins', []) + if not isinstance(includePlugins, list): + raise InstallException(f"'plugins' must be a list in {include}") + + for plugin in includePlugins: + allPlugins[plugin['package']] = plugin + + # Carrega plugins do dynamic-plugins.yaml principal + plugins = content.get('plugins', []) + if not isinstance(plugins, list): + raise InstallException(f"'plugins' must be a list in {dynamicPluginsFile}") + + for plugin in plugins: + package = plugin['package'] + if package in allPlugins: + print('\n======= Overriding dynamic plugin configuration', package, flush=True) + for key, val in plugin.items(): + if key != 'package': + allPlugins[package][key] = val + else: + allPlugins[package] = plugin + + # Gera hash de cada plugin + for plugin in allPlugins.values(): + hash_dict = copy.deepcopy(plugin) + hash_dict.pop('pluginConfig', None) + plugin_hash = hashlib.sha256( + json.dumps(hash_dict, sort_keys=True).encode('utf-8') + ).hexdigest() + plugin['hash'] = plugin_hash + + # Lê plugins já instalados + plugin_path_by_hash = {} + for dir_name in os.listdir(dynamicPluginsRoot): + dir_path = os.path.join(dynamicPluginsRoot, dir_name) + if os.path.isdir(dir_path): + hash_file_path = os.path.join(dir_path, 'dynamic-plugin-config.hash') + if os.path.isfile(hash_file_path): + with open(hash_file_path, 'r') as hf: + hash_val = hf.read().strip() + plugin_path_by_hash[hash_val] = dir_name + + # Preparação do downloader (OCI) + oci_downloader = OciDownloader(dynamicPluginsRoot) + + # Filtra plugins habilitados + active_plugins = [] + for plugin in allPlugins.values(): + if plugin.get('disabled') is True: + print('\n======= Skipping disabled dynamic plugin', plugin['package'], flush=True) + else: + active_plugins.append(plugin) + + # ================================ + # FASE DE INSTALAÇÃO EM PARALELO + # ================================ + results = [] # lista de (pluginPathInstalado, pluginConfig) + exceptions = [] + + # Usamos 4 threads ou quantas forem (N) dependendo do seu cenário + # Se tiver muitas imagens, aumentar. Se tiver poucas, 4 ou 5 é suficiente. + with ThreadPoolExecutor(max_workers=4) as executor: + future_to_plugin = {} + for plugin in active_plugins: + future = executor.submit( + install_plugin, + plugin, + dynamicPluginsRoot, + skipIntegrityCheck, + maxEntrySize, + plugin_path_by_hash, + oci_downloader + ) + future_to_plugin[future] = plugin['package'] + + for future in as_completed(future_to_plugin): + pkg = future_to_plugin[future] + try: + installed_path, plugin_cfg = future.result() + # Guardamos o que precisamos para merges após as threads + results.append((installed_path, plugin_cfg)) + except Exception as exc: + exceptions.append((pkg, exc)) + + # Se houve exceção em alguma thread, exibimos e abortamos + if exceptions: + for pkg, exc in exceptions: + print(f'\n**** ERROR while installing plugin {pkg}: {exc}', flush=True) + # Caso deseje encerrar com código de erro, ou re-raise: + raise InstallException('One or more plugins failed to install.') + + # ================================ + # MERGE DE CONFIGs + # ================================ + for installed_path, plugin_cfg in results: + if plugin_cfg: + globalConfig = maybeMergeConfig(plugin_cfg, globalConfig) + + # Salva config global + yaml.safe_dump(globalConfig, open(dynamicPluginsGlobalConfigFile, 'w')) + + # Remove plugins que restaram em plugin_path_by_hash (significa que eles + # estavam instalados antes, mas não foram citados agora) + for outdated_hash, old_plugin_dir in plugin_path_by_hash.items(): + plugin_directory = os.path.join(dynamicPluginsRoot, old_plugin_dir) + print('\n======= Removing previously installed dynamic plugin', old_plugin_dir, flush=True) + shutil.rmtree(plugin_directory, ignore_errors=True, onerror=None) - # remove plugins that have been removed from the configuration - for hash_value in plugin_path_by_hash: - plugin_directory = os.path.join(dynamicPluginsRoot, plugin_path_by_hash[hash_value]) - print('\n======= Removing previously installed dynamic plugin', plugin_path_by_hash[hash_value], flush=True) - shutil.rmtree(plugin_directory, ignore_errors=True, onerror=None) + finally: + # Mesmo se der erro, remove o lock + end_time = datetime.now() + elapsed_time = end_time - start_time + print(f'Total Execution Time: {elapsed_time}') - end_time = datetime.now() - elapsed_time = end_time - start_time - print(f"Total Execution Time: {elapsed_time}") -main() +if __name__ == '__main__': + main() From 364bb4a59aeea24250c4781b4a34a9f39708ec86 Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Sun, 16 Mar 2025 20:10:32 -0300 Subject: [PATCH 05/18] test --- docker/install-dynamic-plugins.py | 655 ++++++++++-------------------- 1 file changed, 224 insertions(+), 431 deletions(-) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index 9e32063b94..71890b6906 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -14,45 +14,22 @@ import atexit import time import signal +import threading +import concurrent.futures +import functools from datetime import datetime -from concurrent.futures import ThreadPoolExecutor, as_completed - -class PullPolicy(StrEnum): - IF_NOT_PRESENT = 'IfNotPresent' - ALWAYS = 'Always' - # NEVER = 'Never' not needed - -class InstallException(Exception): - """Exception class from which every exception in this library will derive.""" - pass - -RECOGNIZED_ALGORITHMS = ( - 'sha512', - 'sha384', - 'sha256', -) - -def merge(source, destination, prefix=''): - for key, value in source.items(): - if isinstance(value, dict): - node = destination.setdefault(key, {}) - merge(value, node, prefix + key + '.') - else: - if key in destination and destination[key] != value: - raise InstallException( - f"Config key '{prefix + key}' defined differently for 2 dynamic plugins" - ) - destination[key] = value - return destination -def maybeMergeConfig(config, globalConfig): - if config is not None and isinstance(config, dict): - print('\t==> Merging plugin-specific configuration', flush=True) - return merge(config, globalConfig) - return globalConfig -# ================== OCI DOWNLOADER ================== +def cache_results(func): + cache = {} + @functools.wraps(func) + def wrapper(*args, **kwargs): + if args not in cache: + cache[args] = func(*args, **kwargs) + return cache[args] + return wrapper + class OciDownloader: def __init__(self, destination: str): self._skopeo = shutil.which('skopeo') @@ -64,6 +41,7 @@ def __init__(self, destination: str): self.image_to_tarball = {} self.destination = destination + @cache_results def skopeo(self, command): rv = subprocess.run([self._skopeo] + command, check=True, capture_output=True) if rv.returncode != 0: @@ -73,16 +51,12 @@ def skopeo(self, command): def get_plugin_tar(self, image: str) -> str: if image not in self.image_to_tarball: print(f'\t==> Copying image {image} to local filesystem', flush=True) - image_digest = hashlib.sha256( - image.encode('utf-8'), usedforsecurity=False - ).hexdigest() + image_digest = hashlib.sha256(image.encode('utf-8'), usedforsecurity=False).hexdigest() local_dir = os.path.join(self.tmp_dir, image_digest) - # replace oci:// prefix with docker:// image_url = image.replace('oci://', 'docker://') self.skopeo(['copy', image_url, f'dir:{local_dir}']) manifest_path = os.path.join(local_dir, 'manifest.json') manifest = json.load(open(manifest_path)) - # get the first layer of the image layer = manifest['layers'][0]['digest'] (_sha, filename) = layer.split(':') local_path = os.path.join(local_dir, filename) @@ -93,32 +67,21 @@ def get_plugin_tar(self, image: str) -> str: def extract_plugin(self, tar_file: str, plugin_path: str) -> None: with tarfile.open(tar_file, 'r:gz') as tar: # NOSONAR filesToExtract = [] - max_size = int(os.environ.get('MAX_ENTRY_SIZE', 20000000)) for member in tar.getmembers(): if not member.name.startswith(plugin_path): continue - # zip bomb protection - if member.size > max_size: + if member.size > int(os.environ.get('MAX_ENTRY_SIZE', 20000000)): raise InstallException('Zip bomb detected in ' + member.name) - if member.islnk() or member.issym(): realpath = os.path.realpath(os.path.join(plugin_path, *os.path.split(member.linkname))) if not realpath.startswith(plugin_path): - print( - '\t==> WARNING: skipping file containing link outside of the archive: ' - f'{member.name} -> {member.linkpath}' - ) + print(f'\t==> WARNING: skipping file containing link outside of the archive: ' + member.name + ' -> ' + member.linkpath) continue - filesToExtract.append(member) tar.extractall(os.path.abspath(self.destination), members=filesToExtract, filter='tar') def download(self, package: str) -> str: - """ - Baixa a imagem e extrai apenas o diretório plugin_path. - Retorna o plugin_path. - """ - image, plugin_path = package.split('!') + (image, plugin_path) = package.split('!') tar_file = self.get_plugin_tar(image) plugin_directory = os.path.join(self.destination, plugin_path) if os.path.exists(plugin_directory): @@ -128,205 +91,89 @@ def download(self, package: str) -> str: return plugin_path def digest(self, package: str) -> str: - image, _ = package.split('!') + (image, plugin_path) = package.split('!') image_url = image.replace('oci://', 'docker://') output = self.skopeo(['inspect', image_url]) data = json.loads(output) - # OCI artifact digest field is "hashmethod:hash" digest = data['Digest'].split(':')[1] return f"{digest}" - -# ================== INTEGRITY CHECK ================== -def verify_package_integrity(plugin: dict, archive: str, working_directory: str) -> None: - package = plugin['package'] - if 'integrity' not in plugin: - raise InstallException(f'Package integrity for {package} is missing') - - integrity = plugin['integrity'] - if not isinstance(integrity, str): - raise InstallException(f'Package integrity for {package} must be a string') - - algorithm_hash = integrity.split('-') - if len(algorithm_hash) != 2: - raise InstallException( - f'Package integrity for {package} must be -' - ) - - algorithm, hash_digest = algorithm_hash - if algorithm not in RECOGNIZED_ALGORITHMS: - raise InstallException( - f'{package}: Provided Package integrity algorithm {algorithm} not supported. ' - f'Use one of: {RECOGNIZED_ALGORITHMS}' - ) - - try: - base64.b64decode(hash_digest, validate=True) - except binascii.Error: - raise InstallException( - f'{package}: Provided Package integrity hash {hash_digest} is not valid base64' - ) - - cat_process = subprocess.Popen(["cat", archive], stdout=subprocess.PIPE) - openssl_dgst_process = subprocess.Popen( - ["openssl", "dgst", "-" + algorithm, "-binary"], - stdin=cat_process.stdout, - stdout=subprocess.PIPE - ) - openssl_base64_process = subprocess.Popen( - ["openssl", "base64", "-A"], - stdin=openssl_dgst_process.stdout, - stdout=subprocess.PIPE - ) - - output, _ = openssl_base64_process.communicate() - result_hash = output.decode('utf-8').strip() - if hash_digest != result_hash: - raise InstallException( - f'{package}: Hash mismatch: {result_hash} != {hash_digest}' - ) - -# ================== LOCKING ================== -def create_lock(lock_file_path): - while True: - try: - with open(lock_file_path, 'x'): - print(f"======= Created lock file: {lock_file_path}") - return - except FileExistsError: - wait_for_lock_release(lock_file_path) - -def remove_lock(lock_file_path): - if os.path.exists(lock_file_path): - os.remove(lock_file_path) - print(f"======= Removed lock file: {lock_file_path}") - -def wait_for_lock_release(lock_file_path): - print(f"======= Waiting for lock release (file: {lock_file_path})...", flush=True) - while True: - if not os.path.exists(lock_file_path): - break - time.sleep(1) - print("======= Lock released.") - - -# ================== PLUGIN INSTALL LOGIC (ASYNC WRAPPER) ================== -def install_plugin(plugin, dynamicPluginsRoot, skipIntegrityCheck, maxEntrySize, plugin_path_by_hash, oci_downloader): - """ - Função chamada em paralelo para instalar *um* plugin: - - Baixa OCI ou NPM - - Faz verificação de integridade (se aplicável) - - Retorna o 'plugin_path' instalado e o pluginConfig. - """ +def install_plugin(plugin, dynamicPluginsRoot, globalConfig, plugin_path_by_hash, oci_downloader, skipIntegrityCheck, maxEntrySize): package = plugin['package'] - plugin_hash = plugin['hash'] - # Para sabermos se instalamos ou não esse plugin (caso skip for IF_NOT_PRESENT) - installed_plugin_path = None + if 'disabled' in plugin and plugin['disabled'] is True: + print('\n======= Skipping disabled dynamic plugin', package, flush=True) + return globalConfig - # 1) Verifica se é OCI ou NPM + plugin_path = '' if package.startswith('oci://'): - # Determina pull policy (se for :latest!, default = ALWAYS) - if ':latest!' in package: - default_policy = PullPolicy.ALWAYS - else: - default_policy = PullPolicy.IF_NOT_PRESENT - - pull_policy = plugin.get('pullPolicy', default_policy) - if isinstance(pull_policy, str): - pull_policy = PullPolicy(pull_policy) - - if plugin_hash in plugin_path_by_hash and pull_policy == PullPolicy.IF_NOT_PRESENT: - # Pula download, já está instalado - print(f'\n======= Skipping download of already installed OCI plugin {package}', flush=True) - old_path = plugin_path_by_hash.pop(plugin_hash) - return old_path, plugin.get('pluginConfig') - - # Se pull_policy for ALWAYS, checa digest - if plugin_hash in plugin_path_by_hash and pull_policy == PullPolicy.ALWAYS: - old_path = plugin_path_by_hash.pop(plugin_hash) - digest_file_path = os.path.join( - dynamicPluginsRoot, - old_path, - 'dynamic-plugin-image.hash' - ) - local_image_digest = None - if os.path.isfile(digest_file_path): - with open(digest_file_path, 'r') as f: - local_image_digest = f.read().strip() - - remote_digest = oci_downloader.digest(package) - if remote_digest == local_image_digest: - print(f'\n======= Skipping download of already installed OCI plugin {package}', flush=True) - return old_path, plugin.get('pluginConfig') + try: + pull_policy = plugin.get('pullPolicy', PullPolicy.ALWAYS if ':latest!' in package else PullPolicy.IF_NOT_PRESENT) + if plugin['hash'] in plugin_path_by_hash and pull_policy == PullPolicy.IF_NOT_PRESENT: + print('\n======= Skipping download of already installed dynamic plugin', package, flush=True) + plugin_path_by_hash.pop(plugin['hash']) + return maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) + if plugin['hash'] in plugin_path_by_hash and pull_policy == PullPolicy.ALWAYS: + digest_file_path = os.path.join(dynamicPluginsRoot, plugin_path_by_hash.pop(plugin['hash']), 'dynamic-plugin-image.hash') + local_image_digest = None + if os.path.isfile(digest_file_path): + with open(digest_file_path, 'r') as digest_file: + digest_value = digest_file.read().strip() + local_image_digest = digest_value + remote_image_digest = oci_downloader.digest(package) + if remote_image_digest == local_image_digest: + print('\n======= Skipping download of already installed dynamic plugin', package, flush=True) + return maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) + else: + print('\n======= Installing dynamic plugin', package, flush=True) else: - print(f'\n======= Installing dynamic plugin (OCI, updated digest) {package}', flush=True) - else: - print(f'\n======= Installing dynamic OCI plugin {package}', flush=True) - - # De fato faz download - installed_plugin_path = oci_downloader.download(package) - - # Salva o digest em um arquivo - digest_file_path = os.path.join( - dynamicPluginsRoot, - installed_plugin_path, - 'dynamic-plugin-image.hash' - ) - with open(digest_file_path, 'w') as f: - f.write(oci_downloader.digest(package)) - + print('\n======= Installing dynamic plugin', package, flush=True) + + plugin_path = oci_downloader.download(package) + digest_file_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-image.hash') + with open(digest_file_path, 'w') as digest_file: + digest_file.write(oci_downloader.digest(package)) + for key in [k for k, v in plugin_path_by_hash.items() if v == plugin_path]: + plugin_path_by_hash.pop(key) + except Exception as e: + raise InstallException(f"Error while adding OCI plugin {package} to downloader: {e}") else: - # NPM plugin + plugin_already_installed = False pull_policy = plugin.get('pullPolicy', PullPolicy.IF_NOT_PRESENT) - if isinstance(pull_policy, str): - pull_policy = PullPolicy(pull_policy) - - # Se já tem hash e pullpolicy=IF_NOT_PRESENT => skip - if plugin_hash in plugin_path_by_hash: - old_path = plugin_path_by_hash.pop(plugin_hash) + if plugin['hash'] in plugin_path_by_hash: force_download = plugin.get('forceDownload', False) if pull_policy == PullPolicy.ALWAYS or force_download: - print(f'\n======= Forcing download of already installed NPM plugin {package}', flush=True) + print('\n======= Forcing download of already installed dynamic plugin', package, flush=True) else: - print(f'\n======= Skipping download of already installed NPM plugin {package}', flush=True) - return old_path, plugin.get('pluginConfig') + print('\n======= Skipping download of already installed dynamic plugin', package, flush=True) + plugin_already_installed = True + plugin_path_by_hash.pop(plugin['hash']) else: - print(f'\n======= Installing dynamic NPM plugin {package}', flush=True) + print('\n======= Installing dynamic plugin', package, flush=True) + + if plugin_already_installed: + return maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) package_is_local = package.startswith('./') - if (not package_is_local - and not skipIntegrityCheck - and 'integrity' not in plugin): - raise InstallException(f'No integrity hash provided for Package {package}') + if not package_is_local and not skipIntegrityCheck and not 'integrity' in plugin: + raise InstallException(f"No integrity hash provided for Package {package}") - # Ajusta se local if package_is_local: package = os.path.join(os.getcwd(), package[2:]) print('\t==> Grabbing package archive through `npm pack`', flush=True) - completed = subprocess.run( - ['npm', 'pack', package], - capture_output=True, - cwd=dynamicPluginsRoot - ) + completed = subprocess.run(['npm', 'pack', package], capture_output=True, cwd=dynamicPluginsRoot) if completed.returncode != 0: - raise InstallException( - f'Error while installing plugin {package} with npm pack: ' - + completed.stderr.decode('utf-8') - ) + raise InstallException(f'Error while installing plugin { package } with \'npm pack\' : ' + completed.stderr.decode('utf-8')) - archive = os.path.join( - dynamicPluginsRoot, completed.stdout.decode('utf-8').strip() - ) + archive = os.path.join(dynamicPluginsRoot, completed.stdout.decode('utf-8').strip()) - if not package_is_local and not skipIntegrityCheck: + if not (package_is_local or skipIntegrityCheck): print('\t==> Verifying package integrity', flush=True) verify_package_integrity(plugin, archive, dynamicPluginsRoot) - # Normalmente o nome do dir = , sem .tgz directory = archive.replace('.tgz', '') directoryRealpath = os.path.realpath(directory) - installed_plugin_path = os.path.basename(directoryRealpath) + plugin_path = os.path.basename(directoryRealpath) if os.path.exists(directory): print('\t==> Removing previous plugin directory', directory, flush=True) @@ -334,239 +181,185 @@ def install_plugin(plugin, dynamicPluginsRoot, skipIntegrityCheck, maxEntrySize, os.mkdir(directory) print('\t==> Extracting package archive', archive, flush=True) - with tarfile.open(archive, 'r:gz') as file: - for member in file.getmembers(): - if member.isreg(): - if not member.name.startswith('package/'): - raise InstallException( - f'NPM package archive does not start with \"package/\": {member.name}' - ) - - if member.size > maxEntrySize: - raise InstallException('Zip bomb detected in ' + member.name) - - # remove prefixo - member.name = member.name.removeprefix('package/') - file.extract(member, path=directory, filter='tar') - elif member.isdir(): - print('\t\tSkipping directory entry', member.name, flush=True) - elif member.islnk() or member.issym(): - if not member.linkpath.startswith('package/'): - raise InstallException( - f'NPM package link outside of archive: {member.name} -> {member.linkpath}' - ) - member.name = member.name.removeprefix('package/') - member.linkpath = member.linkpath.removeprefix('package/') - realpath = os.path.realpath( - os.path.join(directory, *os.path.split(member.linkname)) - ) - if not realpath.startswith(directoryRealpath): - raise InstallException( - f'NPM package link outside of archive: {member.name} -> {member.linkpath}' - ) - file.extract(member, path=directory, filter='tar') + file = tarfile.open(archive, 'r:gz') # NOSONAR + for member in file.getmembers(): + if member.isreg(): + if not member.name.startswith('package/'): + raise InstallException("NPM package archive archive does not start with 'package/' as it should: " + member.name) + if member.size > maxEntrySize: + raise InstallException('Zip bomb detected in ' + member.name) + member.name = member.name.removeprefix('package/') + file.extract(member, path=directory, filter='tar') + elif member.isdir(): + print('\t\tSkipping directory entry', member.name, flush=True) + elif member.islnk() or member.issym(): + if not member.linkpath.startswith('package/'): + raise InstallException('NPM package archive contains a link outside of the archive: ' + member.name + ' -> ' + member.linkpath') + member.name = member.name.removeprefix('package/') + member.linkpath = member.linkpath.removeprefix('package/') + realpath = os.path.realpath(os.path.join(directory, *os.path.split(member.linkname))) + if not realpath.startswith(directoryRealpath): + raise InstallException('NPM package archive contains a link outside of the archive: ' + member.name + ' -> ' + member.linkpath) + file.extract(member, path=directory, filter='tar') + else: + if member.type == tarfile.CHRTYPE: + type_str = "character device" + elif member.type == tarfile.BLKTYPE: + type_str = "block device" + elif member.type == tarfile.FIFOTYPE: + type_str = "FIFO" else: - # se for CHRTYPE, BLKTYPE, etc - raise InstallException( - f'NPM package archive contains special file: {member.name}' - ) + type_str = "unknown" + raise InstallException('NPM package archive contains a non regular file: ' + member.name + ' - ' + type_str) + file.close() print('\t==> Removing package archive', archive, flush=True) os.remove(archive) - # Independente de ser OCI ou NPM, grava dynamic-plugin-config.hash - hash_file_path = os.path.join(dynamicPluginsRoot, installed_plugin_path, 'dynamic-plugin-config.hash') + hash = plugin['hash'] + hash_file_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-config.hash') with open(hash_file_path, 'w') as digest_file: - digest_file.write(plugin_hash) + digest_file.write(hash) - print('\t==> Successfully installed dynamic plugin', package, flush=True) - return installed_plugin_path, plugin.get('pluginConfig') + if 'pluginConfig' not in plugin: + print('\t==> Successfully installed dynamic plugin', package, flush=True) + return globalConfig + return maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) -# ================== MAIN ================== def main(): - start_time = datetime.now() - - if len(sys.argv) < 2: - raise InstallException( - 'Usage: python script.py ' - ) + start_time = datetime.now() dynamicPluginsRoot = sys.argv[1] - - # Lock lock_file_path = os.path.join(dynamicPluginsRoot, 'install-dynamic-plugins.lock') atexit.register(remove_lock, lock_file_path) signal.signal(signal.SIGTERM, lambda signum, frame: sys.exit(0)) create_lock(lock_file_path) - try: - maxEntrySize = int(os.environ.get('MAX_ENTRY_SIZE', 20000000)) - skipIntegrityCheck = os.environ.get("SKIP_INTEGRITY_CHECK", "").lower() == "true" + maxEntrySize = int(os.environ.get('MAX_ENTRY_SIZE', 20000000)) + skipIntegrityCheck = os.environ.get("SKIP_INTEGRITY_CHECK", "").lower() == "true" + dynamicPluginsFile = 'dynamic-plugins.yaml' + dynamicPluginsGlobalConfigFile = os.path.join(dynamicPluginsRoot, 'app-config.dynamic-plugins.yaml') + + if not os.path.isfile(dynamicPluginsFile): + print(f"No {dynamicPluginsFile} file found. Skipping dynamic plugins installation.") + with open(dynamicPluginsGlobalConfigFile, 'w') as file: + file.write('') + file.close() + exit(0) + + globalConfig = { + 'dynamicPlugins': { + 'rootDirectory': 'dynamic-plugins-root' + } + } - dynamicPluginsFile = 'dynamic-plugins.yaml' - dynamicPluginsGlobalConfigFile = os.path.join(dynamicPluginsRoot, 'app-config.dynamic-plugins.yaml') + with open(dynamicPluginsFile, 'r') as file: + content = yaml.safe_load(file) - if not os.path.isfile(dynamicPluginsFile): - print(f"No {dynamicPluginsFile} file found. Skipping dynamic plugins installation.") - with open(dynamicPluginsGlobalConfigFile, 'w') as file: - file.write('') - return + if content == '' or content is None: + print(f"{dynamicPluginsFile} file is empty. Skipping dynamic plugins installation.") + with open(dynamicPluginsGlobalConfigFile, 'w') as file: + file.write('') + file.close() + exit(0) - with open(dynamicPluginsFile, 'r') as f: - content = yaml.safe_load(f) + if not isinstance(content, dict): + raise InstallException(f"{dynamicPluginsFile} content must be a YAML object") - if not content: - print(f"{dynamicPluginsFile} file is empty. Skipping dynamic plugins installation.") - with open(dynamicPluginsGlobalConfigFile, 'w') as file: - file.write('') - return + allPlugins = {} + if skipIntegrityCheck: + print(f"SKIP_INTEGRITY_CHECK has been set to {skipIntegrityCheck}, skipping integrity check of packages") - if not isinstance(content, dict): - raise InstallException(f"{dynamicPluginsFile} content must be a YAML object") + if 'includes' in content: + includes = content['includes'] + else: + includes = [] - if skipIntegrityCheck: - print(f"SKIP_INTEGRITY_CHECK={skipIntegrityCheck}, skipping package integrity checks") + if not isinstance(includes, list): + raise InstallException(f"content of the \'includes\' field must be a list in {dynamicPluginsFile}") - globalConfig = { - 'dynamicPlugins': { - 'rootDirectory': 'dynamic-plugins-root' - } - } + for include in includes: + if not isinstance(include, str): + raise InstallException(f"content of the \'includes\' field must be a list of strings in {dynamicPluginsFile}") - allPlugins = {} - includes = content.get('includes', []) - if not isinstance(includes, list): - raise InstallException(f"content of 'includes' must be a list in {dynamicPluginsFile}") - - # Carrega plugins de includes - for include in includes: - if not isinstance(include, str): - raise InstallException(f"'includes' must contain string file paths, got {include}") - print('\n======= Including dynamic plugins from', include, flush=True) - - if not os.path.isfile(include): - raise InstallException(f"File {include} does not exist") - - with open(include, 'r') as incFile: - includeContent = yaml.safe_load(incFile) - - if not isinstance(includeContent, dict): - raise InstallException(f"{include} content must be a YAML object") - - includePlugins = includeContent.get('plugins', []) - if not isinstance(includePlugins, list): - raise InstallException(f"'plugins' must be a list in {include}") - - for plugin in includePlugins: - allPlugins[plugin['package']] = plugin - - # Carrega plugins do dynamic-plugins.yaml principal - plugins = content.get('plugins', []) - if not isinstance(plugins, list): - raise InstallException(f"'plugins' must be a list in {dynamicPluginsFile}") - - for plugin in plugins: - package = plugin['package'] - if package in allPlugins: - print('\n======= Overriding dynamic plugin configuration', package, flush=True) - for key, val in plugin.items(): - if key != 'package': - allPlugins[package][key] = val - else: - allPlugins[package] = plugin - - # Gera hash de cada plugin - for plugin in allPlugins.values(): - hash_dict = copy.deepcopy(plugin) - hash_dict.pop('pluginConfig', None) - plugin_hash = hashlib.sha256( - json.dumps(hash_dict, sort_keys=True).encode('utf-8') - ).hexdigest() - plugin['hash'] = plugin_hash - - # Lê plugins já instalados - plugin_path_by_hash = {} - for dir_name in os.listdir(dynamicPluginsRoot): - dir_path = os.path.join(dynamicPluginsRoot, dir_name) - if os.path.isdir(dir_path): - hash_file_path = os.path.join(dir_path, 'dynamic-plugin-config.hash') - if os.path.isfile(hash_file_path): - with open(hash_file_path, 'r') as hf: - hash_val = hf.read().strip() - plugin_path_by_hash[hash_val] = dir_name - - # Preparação do downloader (OCI) - oci_downloader = OciDownloader(dynamicPluginsRoot) - - # Filtra plugins habilitados - active_plugins = [] - for plugin in allPlugins.values(): - if plugin.get('disabled') is True: - print('\n======= Skipping disabled dynamic plugin', plugin['package'], flush=True) - else: - active_plugins.append(plugin) - - # ================================ - # FASE DE INSTALAÇÃO EM PARALELO - # ================================ - results = [] # lista de (pluginPathInstalado, pluginConfig) - exceptions = [] - - # Usamos 4 threads ou quantas forem (N) dependendo do seu cenário - # Se tiver muitas imagens, aumentar. Se tiver poucas, 4 ou 5 é suficiente. - with ThreadPoolExecutor(max_workers=4) as executor: - future_to_plugin = {} - for plugin in active_plugins: - future = executor.submit( - install_plugin, - plugin, - dynamicPluginsRoot, - skipIntegrityCheck, - maxEntrySize, - plugin_path_by_hash, - oci_downloader - ) - future_to_plugin[future] = plugin['package'] - - for future in as_completed(future_to_plugin): - pkg = future_to_plugin[future] - try: - installed_path, plugin_cfg = future.result() - # Guardamos o que precisamos para merges após as threads - results.append((installed_path, plugin_cfg)) - except Exception as exc: - exceptions.append((pkg, exc)) - - # Se houve exceção em alguma thread, exibimos e abortamos - if exceptions: - for pkg, exc in exceptions: - print(f'\n**** ERROR while installing plugin {pkg}: {exc}', flush=True) - # Caso deseje encerrar com código de erro, ou re-raise: - raise InstallException('One or more plugins failed to install.') - - # ================================ - # MERGE DE CONFIGs - # ================================ - for installed_path, plugin_cfg in results: - if plugin_cfg: - globalConfig = maybeMergeConfig(plugin_cfg, globalConfig) - - # Salva config global - yaml.safe_dump(globalConfig, open(dynamicPluginsGlobalConfigFile, 'w')) - - # Remove plugins que restaram em plugin_path_by_hash (significa que eles - # estavam instalados antes, mas não foram citados agora) - for outdated_hash, old_plugin_dir in plugin_path_by_hash.items(): - plugin_directory = os.path.join(dynamicPluginsRoot, old_plugin_dir) - print('\n======= Removing previously installed dynamic plugin', old_plugin_dir, flush=True) - shutil.rmtree(plugin_directory, ignore_errors=True, onerror=None) + print('\n======= Including dynamic plugins from', include, flush=True) + if not os.path.isfile(include): + raise InstallException(f"File {include} does not exist") + + with open(include, 'r') as file: + includeContent = yaml.safe_load(file) - finally: - # Mesmo se der erro, remove o lock - end_time = datetime.now() - elapsed_time = end_time - start_time - print(f'Total Execution Time: {elapsed_time}') + if not isinstance(includeContent, dict): + raise InstallException(f"{include} content must be a YAML object") -if __name__ == '__main__': - main() + includePlugins = includeContent['plugins'] + if not isinstance(includePlugins, list): + raise InstallException(f"content of the \'plugins\' field must be a list in {include}") + + for plugin in includePlugins: + allPlugins[plugin['package']] = plugin + + if 'plugins' in content: + plugins = content['plugins'] + else: + plugins = [] + + if not isinstance(plugins, list): + raise InstallException(f"content of the \'plugins\' field must be a list in {dynamicPluginsFile}") + + for plugin in plugins: + package = plugin['package'] + if not isinstance(package, str): + raise InstallException(f"content of the \'plugins.package\' field must be a string in {dynamicPluginsFile}") + + if package not in allPlugins: + allPlugins[package] = plugin + continue + + print('\n======= Overriding dynamic plugin configuration', package, flush=True) + for key in plugin: + if key == 'package': + continue + allPlugins[package][key] = plugin[key] + + for plugin in allPlugins.values(): + hash_dict = copy.deepcopy(plugin) + hash_dict.pop('pluginConfig', None) + hash = hashlib.sha256(json.dumps(hash_dict, sort_keys=True).encode('utf-8')).hexdigest() + plugin['hash'] = hash + + plugin_path_by_hash = {} + for dir_name in os.listdir(dynamicPluginsRoot): + dir_path = os.path.join(dynamicPluginsRoot, dir_name) + if os.path.isdir(dir_path): + hash_file_path = os.path.join(dir_path, 'dynamic-plugin-config.hash') + if os.path.isfile(hash_file_path): + with open(hash_file_path, 'r') as hash_file: + hash_value = hash_file.read().strip() + plugin_path_by_hash[hash_value] = dir_name + + oci_downloader = OciDownloader(dynamicPluginsRoot) + + with concurrent.futures.ThreadPoolExecutor() as executor: + futures = [ + executor.submit( + install_plugin, plugin, dynamicPluginsRoot, globalConfig, plugin_path_by_hash, oci_downloader, skipIntegrityCheck, maxEntrySize + ) for plugin in allPlugins.values() + ] + for future in concurrent.futures.as_completed(futures): + globalConfig = future.result() + + yaml.safe_dump(globalConfig, open(dynamicPluginsGlobalConfigFile, 'w')) + + for hash_value in plugin_path_by_hash: + plugin_directory = os.path.join(dynamicPluginsRoot, plugin_path_by_hash[hash_value]) + print('\n======= Removing previously installed dynamic plugin', plugin_path_by_hash[hash_value], flush=True) + shutil.rmtree(plugin_directory, ignore_errors=True, onerror=None) + +end_time = datetime.now() +elapsed_time = end_time - start_time +print(f'Total Execution Time: {elapsed_time}') + + +main() From 21e7e81e3d885b81b92e5473b1e25f4c38cbbd97 Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Mon, 17 Mar 2025 10:05:28 -0300 Subject: [PATCH 06/18] test --- docker/install-dynamic-plugins.py | 504 +++++++++++++++++++----------- 1 file changed, 314 insertions(+), 190 deletions(-) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index 71890b6906..03636c2971 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -14,21 +14,47 @@ import atexit import time import signal -import threading -import concurrent.futures -import functools -from datetime import datetime +import logging + +# Configure logging +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') + +class PullPolicy(StrEnum): + IF_NOT_PRESENT = 'IfNotPresent' + ALWAYS = 'Always' + # NEVER = 'Never' not needed + +class InstallException(Exception): + """Exception class from which every exception in this library will derive.""" + pass + +RECOGNIZED_ALGORITHMS = ( + 'sha512', + 'sha384', + 'sha256', +) + +def merge(source, destination, prefix = ''): + for key, value in source.items(): + if isinstance(value, dict): + # get node or create one + node = destination.setdefault(key, {}) + merge(value, node, key + '.') + else: + # if key exists in destination trigger an error + if key in destination and destination[key] != value: + raise InstallException(f"Config key '{ prefix + key }' defined differently for 2 dynamic plugins") + destination[key] = value + return destination -def cache_results(func): - cache = {} - @functools.wraps(func) - def wrapper(*args, **kwargs): - if args not in cache: - cache[args] = func(*args, **kwargs) - return cache[args] - return wrapper +def maybeMergeConfig(config, globalConfig): + if config is not None and isinstance(config, dict): + logging.info('\t==> Merging plugin-specific configuration') + return merge(config, globalConfig) + else: + return globalConfig class OciDownloader: def __init__(self, destination: str): @@ -40,23 +66,32 @@ def __init__(self, destination: str): self.tmp_dir = self.tmp_dir_obj.name self.image_to_tarball = {} self.destination = destination + self.cat_cmd = shutil.which('cat') + self.openssl_cmd = shutil.which('openssl') + + if not self.cat_cmd or not self.openssl_cmd: + raise InstallException("Required utilities 'cat' and 'openssl' not found in PATH.") - @cache_results def skopeo(self, command): - rv = subprocess.run([self._skopeo] + command, check=True, capture_output=True) + rv = subprocess.run([self._skopeo] + command, check=True, capture_output=True, text=True) if rv.returncode != 0: raise InstallException(f'Error while running skopeo command: {rv.stderr}') return rv.stdout def get_plugin_tar(self, image: str) -> str: if image not in self.image_to_tarball: - print(f'\t==> Copying image {image} to local filesystem', flush=True) + # run skopeo copy to copy the tar ball to the local filesystem + logging.info(f'\t==> Copying image {image} to local filesystem') image_digest = hashlib.sha256(image.encode('utf-8'), usedforsecurity=False).hexdigest() local_dir = os.path.join(self.tmp_dir, image_digest) + # replace oci:// prefix with docker:// image_url = image.replace('oci://', 'docker://') self.skopeo(['copy', image_url, f'dir:{local_dir}']) manifest_path = os.path.join(local_dir, 'manifest.json') - manifest = json.load(open(manifest_path)) + with open(manifest_path, 'r') as f: + manifest = json.load(f) + + # get the first layer of the image layer = manifest['layers'][0]['digest'] (_sha, filename) = layer.split(':') local_path = os.path.join(local_dir, filename) @@ -65,27 +100,33 @@ def get_plugin_tar(self, image: str) -> str: return self.image_to_tarball[image] def extract_plugin(self, tar_file: str, plugin_path: str) -> None: - with tarfile.open(tar_file, 'r:gz') as tar: # NOSONAR - filesToExtract = [] + extracted_path = os.path.abspath(self.destination) + with tarfile.open(tar_file, 'r:gz') as tar: + members = [] for member in tar.getmembers(): if not member.name.startswith(plugin_path): continue + if member.size > int(os.environ.get('MAX_ENTRY_SIZE', 20000000)): - raise InstallException('Zip bomb detected in ' + member.name) + raise InstallException(f'Zip bomb detected in {member.name}') + if member.islnk() or member.issym(): - realpath = os.path.realpath(os.path.join(plugin_path, *os.path.split(member.linkname))) - if not realpath.startswith(plugin_path): - print(f'\t==> WARNING: skipping file containing link outside of the archive: ' + member.name + ' -> ' + member.linkpath) + realpath = os.path.realpath(os.path.join(extracted_path, plugin_path, *os.path.split(member.linkname))) + if not realpath.startswith(extracted_path): + logging.warning(f'\t==> WARNING: skipping file containing link outside of the archive: {member.name} -> {member.linkpath}') continue - filesToExtract.append(member) - tar.extractall(os.path.abspath(self.destination), members=filesToExtract, filter='tar') + members.append(member) + + tar.extractall(extracted_path, members=members, filter='tar') + def download(self, package: str) -> str: + # split by ! to get the path in the image (image, plugin_path) = package.split('!') tar_file = self.get_plugin_tar(image) plugin_directory = os.path.join(self.destination, plugin_path) if os.path.exists(plugin_directory): - print('\t==> Removing previous plugin directory', plugin_directory, flush=True) + logging.info(f'\t==> Removing previous plugin directory {plugin_directory}') shutil.rmtree(plugin_directory, ignore_errors=True, onerror=None) self.extract_plugin(tar_file=tar_file, plugin_path=plugin_path) return plugin_path @@ -95,142 +136,81 @@ def digest(self, package: str) -> str: image_url = image.replace('oci://', 'docker://') output = self.skopeo(['inspect', image_url]) data = json.loads(output) + # OCI artifact digest field is defined as "hash method" ":" "hash" digest = data['Digest'].split(':')[1] return f"{digest}" -def install_plugin(plugin, dynamicPluginsRoot, globalConfig, plugin_path_by_hash, oci_downloader, skipIntegrityCheck, maxEntrySize): +def verify_package_integrity(plugin: dict, archive: str, working_directory: str, cat_cmd, openssl_cmd) -> None: package = plugin['package'] - if 'disabled' in plugin and plugin['disabled'] is True: - print('\n======= Skipping disabled dynamic plugin', package, flush=True) - return globalConfig - - plugin_path = '' - if package.startswith('oci://'): - try: - pull_policy = plugin.get('pullPolicy', PullPolicy.ALWAYS if ':latest!' in package else PullPolicy.IF_NOT_PRESENT) - if plugin['hash'] in plugin_path_by_hash and pull_policy == PullPolicy.IF_NOT_PRESENT: - print('\n======= Skipping download of already installed dynamic plugin', package, flush=True) - plugin_path_by_hash.pop(plugin['hash']) - return maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) - if plugin['hash'] in plugin_path_by_hash and pull_policy == PullPolicy.ALWAYS: - digest_file_path = os.path.join(dynamicPluginsRoot, plugin_path_by_hash.pop(plugin['hash']), 'dynamic-plugin-image.hash') - local_image_digest = None - if os.path.isfile(digest_file_path): - with open(digest_file_path, 'r') as digest_file: - digest_value = digest_file.read().strip() - local_image_digest = digest_value - remote_image_digest = oci_downloader.digest(package) - if remote_image_digest == local_image_digest: - print('\n======= Skipping download of already installed dynamic plugin', package, flush=True) - return maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) - else: - print('\n======= Installing dynamic plugin', package, flush=True) - else: - print('\n======= Installing dynamic plugin', package, flush=True) - - plugin_path = oci_downloader.download(package) - digest_file_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-image.hash') - with open(digest_file_path, 'w') as digest_file: - digest_file.write(oci_downloader.digest(package)) - for key in [k for k, v in plugin_path_by_hash.items() if v == plugin_path]: - plugin_path_by_hash.pop(key) - except Exception as e: - raise InstallException(f"Error while adding OCI plugin {package} to downloader: {e}") - else: - plugin_already_installed = False - pull_policy = plugin.get('pullPolicy', PullPolicy.IF_NOT_PRESENT) - if plugin['hash'] in plugin_path_by_hash: - force_download = plugin.get('forceDownload', False) - if pull_policy == PullPolicy.ALWAYS or force_download: - print('\n======= Forcing download of already installed dynamic plugin', package, flush=True) - else: - print('\n======= Skipping download of already installed dynamic plugin', package, flush=True) - plugin_already_installed = True - plugin_path_by_hash.pop(plugin['hash']) - else: - print('\n======= Installing dynamic plugin', package, flush=True) - - if plugin_already_installed: - return maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) - - package_is_local = package.startswith('./') - if not package_is_local and not skipIntegrityCheck and not 'integrity' in plugin: - raise InstallException(f"No integrity hash provided for Package {package}") - - if package_is_local: - package = os.path.join(os.getcwd(), package[2:]) - - print('\t==> Grabbing package archive through `npm pack`', flush=True) - completed = subprocess.run(['npm', 'pack', package], capture_output=True, cwd=dynamicPluginsRoot) - if completed.returncode != 0: - raise InstallException(f'Error while installing plugin { package } with \'npm pack\' : ' + completed.stderr.decode('utf-8')) - - archive = os.path.join(dynamicPluginsRoot, completed.stdout.decode('utf-8').strip()) - - if not (package_is_local or skipIntegrityCheck): - print('\t==> Verifying package integrity', flush=True) - verify_package_integrity(plugin, archive, dynamicPluginsRoot) - - directory = archive.replace('.tgz', '') - directoryRealpath = os.path.realpath(directory) - plugin_path = os.path.basename(directoryRealpath) - - if os.path.exists(directory): - print('\t==> Removing previous plugin directory', directory, flush=True) - shutil.rmtree(directory, ignore_errors=True, onerror=None) - os.mkdir(directory) - - print('\t==> Extracting package archive', archive, flush=True) - file = tarfile.open(archive, 'r:gz') # NOSONAR - for member in file.getmembers(): - if member.isreg(): - if not member.name.startswith('package/'): - raise InstallException("NPM package archive archive does not start with 'package/' as it should: " + member.name) - if member.size > maxEntrySize: - raise InstallException('Zip bomb detected in ' + member.name) - member.name = member.name.removeprefix('package/') - file.extract(member, path=directory, filter='tar') - elif member.isdir(): - print('\t\tSkipping directory entry', member.name, flush=True) - elif member.islnk() or member.issym(): - if not member.linkpath.startswith('package/'): - raise InstallException('NPM package archive contains a link outside of the archive: ' + member.name + ' -> ' + member.linkpath') - member.name = member.name.removeprefix('package/') - member.linkpath = member.linkpath.removeprefix('package/') - realpath = os.path.realpath(os.path.join(directory, *os.path.split(member.linkname))) - if not realpath.startswith(directoryRealpath): - raise InstallException('NPM package archive contains a link outside of the archive: ' + member.name + ' -> ' + member.linkpath) - file.extract(member, path=directory, filter='tar') - else: - if member.type == tarfile.CHRTYPE: - type_str = "character device" - elif member.type == tarfile.BLKTYPE: - type_str = "block device" - elif member.type == tarfile.FIFOTYPE: - type_str = "FIFO" - else: - type_str = "unknown" - raise InstallException('NPM package archive contains a non regular file: ' + member.name + ' - ' + type_str) - file.close() - - print('\t==> Removing package archive', archive, flush=True) - os.remove(archive) - - hash = plugin['hash'] - hash_file_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-config.hash') - with open(hash_file_path, 'w') as digest_file: - digest_file.write(hash) - - if 'pluginConfig' not in plugin: - print('\t==> Successfully installed dynamic plugin', package, flush=True) - return globalConfig - - return maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) + if 'integrity' not in plugin: + raise InstallException(f'Package integrity for {package} is missing') + + integrity = plugin['integrity'] + if not isinstance(integrity, str): + raise InstallException(f'Package integrity for {package} must be a string') + + integrity = integrity.split('-') + if len(integrity) != 2: + raise InstallException(f'Package integrity for {package} must be a string of the form -') + + algorithm = integrity[0] + if algorithm not in RECOGNIZED_ALGORITHMS: + raise InstallException(f'{package}: Provided Package integrity algorithm {algorithm} is not supported, please use one of following algorithms {RECOGNIZED_ALGORITHMS} instead') + + hash_digest = integrity[1] + try: + base64.b64decode(hash_digest, validate=True) + except binascii.Error: + raise InstallException(f'{package}: Provided Package integrity hash {hash_digest} is not a valid base64 encoding') + + # Use pre-validated commands from OciDownloader + cat_process = subprocess.Popen([cat_cmd, archive], stdout=subprocess.PIPE) + openssl_dgst_process = subprocess.Popen([openssl_cmd, "dgst", "-" + algorithm, "-binary"], stdin=cat_process.stdout, stdout=subprocess.PIPE) + openssl_base64_process = subprocess.Popen([openssl_cmd, "base64", "-A"], stdin=openssl_dgst_process.stdout, stdout=subprocess.PIPE) + + output, _ = openssl_base64_process.communicate() + if hash_digest != output.decode('utf-8').strip(): + raise InstallException(f'{package}: The hash of the downloaded package {output.decode("utf-8").strip()} does not match the provided integrity hash {hash_digest} provided in the configuration file') + +# Create the lock file, so that other instances of the script will wait for this one to finish +def create_lock(lock_file_path): + while True: + try: + with open(lock_file_path, 'x'): + logging.info(f"======= Created lock file: {lock_file_path}") + return + except FileExistsError: + wait_for_lock_release(lock_file_path) + +# Remove the lock file +def remove_lock(lock_file_path): + os.remove(lock_file_path) + logging.info(f"======= Removed lock file: {lock_file_path}") + +# Wait for the lock file to be released +def wait_for_lock_release(lock_file_path): + logging.info(f"======= Waiting for lock release (file: {lock_file_path})...") + while True: + if not os.path.exists(lock_file_path): + break + time.sleep(1) + logging.info("======= Lock released.") + +def load_yaml(file_path): + """Load YAML content from a file.""" + try: + with open(file_path, 'r') as file: + return yaml.safe_load(file) + except FileNotFoundError: + logging.warning(f"File not found: {file_path}") + return None + except yaml.YAMLError as e: + raise InstallException(f"Error parsing YAML file {file_path}: {e}") def main(): - start_time = datetime.now() dynamicPluginsRoot = sys.argv[1] + lock_file_path = os.path.join(dynamicPluginsRoot, 'install-dynamic-plugins.lock') atexit.register(remove_lock, lock_file_path) signal.signal(signal.SIGTERM, lambda signum, frame: sys.exit(0)) @@ -238,27 +218,28 @@ def main(): maxEntrySize = int(os.environ.get('MAX_ENTRY_SIZE', 20000000)) skipIntegrityCheck = os.environ.get("SKIP_INTEGRITY_CHECK", "").lower() == "true" + dynamicPluginsFile = 'dynamic-plugins.yaml' dynamicPluginsGlobalConfigFile = os.path.join(dynamicPluginsRoot, 'app-config.dynamic-plugins.yaml') + # test if file dynamic-plugins.yaml exists if not os.path.isfile(dynamicPluginsFile): - print(f"No {dynamicPluginsFile} file found. Skipping dynamic plugins installation.") + logging.info(f"No {dynamicPluginsFile} file found. Skipping dynamic plugins installation.") with open(dynamicPluginsGlobalConfigFile, 'w') as file: file.write('') file.close() exit(0) globalConfig = { - 'dynamicPlugins': { + 'dynamicPlugins': { 'rootDirectory': 'dynamic-plugins-root' - } + } } - with open(dynamicPluginsFile, 'r') as file: - content = yaml.safe_load(file) + content = load_yaml(dynamicPluginsFile) if content == '' or content is None: - print(f"{dynamicPluginsFile} file is empty. Skipping dynamic plugins installation.") + logging.info(f"{dynamicPluginsFile} file is empty. Skipping dynamic plugins installation.") with open(dynamicPluginsGlobalConfigFile, 'w') as file: file.write('') file.close() @@ -268,13 +249,11 @@ def main(): raise InstallException(f"{dynamicPluginsFile} content must be a YAML object") allPlugins = {} + if skipIntegrityCheck: - print(f"SKIP_INTEGRITY_CHECK has been set to {skipIntegrityCheck}, skipping integrity check of packages") + logging.info(f"SKIP_INTEGRITY_CHECK has been set to {skipIntegrityCheck}, skipping integrity check of packages") - if 'includes' in content: - includes = content['includes'] - else: - includes = [] + includes = content.get('includes', []) if not isinstance(includes, list): raise InstallException(f"content of the \'includes\' field must be a list in {dynamicPluginsFile}") @@ -283,27 +262,24 @@ def main(): if not isinstance(include, str): raise InstallException(f"content of the \'includes\' field must be a list of strings in {dynamicPluginsFile}") - print('\n======= Including dynamic plugins from', include, flush=True) - if not os.path.isfile(include): - raise InstallException(f"File {include} does not exist") + logging.info('\n======= Including dynamic plugins from %s', include) + + includeContent = load_yaml(include) - with open(include, 'r') as file: - includeContent = yaml.safe_load(file) + if includeContent is None: + continue if not isinstance(includeContent, dict): raise InstallException(f"{include} content must be a YAML object") - includePlugins = includeContent['plugins'] + includePlugins = includeContent.get('plugins', []) if not isinstance(includePlugins, list): raise InstallException(f"content of the \'plugins\' field must be a list in {include}") for plugin in includePlugins: allPlugins[plugin['package']] = plugin - if 'plugins' in content: - plugins = content['plugins'] - else: - plugins = [] + plugins = content.get('plugins', []) if not isinstance(plugins, list): raise InstallException(f"content of the \'plugins\' field must be a list in {dynamicPluginsFile}") @@ -313,22 +289,27 @@ def main(): if not isinstance(package, str): raise InstallException(f"content of the \'plugins.package\' field must be a string in {dynamicPluginsFile}") + # if `package` already exists in `allPlugins`, then override its fields if package not in allPlugins: allPlugins[package] = plugin continue - print('\n======= Overriding dynamic plugin configuration', package, flush=True) + # override the included plugins with fields in the main plugins list + logging.info('\n======= Overriding dynamic plugin configuration %s', package) for key in plugin: if key == 'package': continue allPlugins[package][key] = plugin[key] + # add a hash for each plugin configuration to detect changes for plugin in allPlugins.values(): hash_dict = copy.deepcopy(plugin) + # remove elements that shouldn't be tracked for installation detection hash_dict.pop('pluginConfig', None) hash = hashlib.sha256(json.dumps(hash_dict, sort_keys=True).encode('utf-8')).hexdigest() plugin['hash'] = hash + # create a dict of all currently installed plugins in dynamicPluginsRoot plugin_path_by_hash = {} for dir_name in os.listdir(dynamicPluginsRoot): dir_path = os.path.join(dynamicPluginsRoot, dir_name) @@ -341,25 +322,168 @@ def main(): oci_downloader = OciDownloader(dynamicPluginsRoot) - with concurrent.futures.ThreadPoolExecutor() as executor: - futures = [ - executor.submit( - install_plugin, plugin, dynamicPluginsRoot, globalConfig, plugin_path_by_hash, oci_downloader, skipIntegrityCheck, maxEntrySize - ) for plugin in allPlugins.values() - ] - for future in concurrent.futures.as_completed(futures): - globalConfig = future.result() + # iterate through the list of plugins + for plugin in allPlugins.values(): + package = plugin['package'] + + if 'disabled' in plugin and plugin['disabled'] is True: + logging.info('\n======= Skipping disabled dynamic plugin %s', package) + continue + + # Stores the relative path of the plugin directory once downloaded + plugin_path = '' + if package.startswith('oci://'): + # The OCI downloader + try: + pull_policy = plugin.get('pullPolicy', PullPolicy.ALWAYS if ':latest!' in package else PullPolicy.IF_NOT_PRESENT) + + if plugin['hash'] in plugin_path_by_hash and pull_policy == PullPolicy.IF_NOT_PRESENT: + logging.info('\n======= Skipping download of already installed dynamic plugin %s', package) + plugin_path_by_hash.pop(plugin['hash']) + globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) + continue + + if plugin['hash'] in plugin_path_by_hash and pull_policy == PullPolicy.ALWAYS: + digest_file_path = os.path.join(dynamicPluginsRoot, plugin_path_by_hash.pop(plugin['hash']), 'dynamic-plugin-image.hash') + local_image_digest = None + if os.path.isfile(digest_file_path): + with open(digest_file_path, 'r') as digest_file: + digest_value = digest_file.read().strip() + local_image_digest = digest_value + remote_image_digest = oci_downloader.digest(package) + if remote_image_digest == local_image_digest: + logging.info('\n======= Skipping download of already installed dynamic plugin %s', package) + globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) + continue + else: + logging.info('\n======= Installing dynamic plugin %s', package) + + else: + logging.info('\n======= Installing dynamic plugin %s', package) + + plugin_path = oci_downloader.download(package) + digest_file_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-image.hash') + with open(digest_file_path, 'w') as digest_file: + digest_file.write(oci_downloader.digest(package)) + # remove any duplicate hashes which can occur when only the version is updated + for key in [k for k, v in plugin_path_by_hash.items() if v == plugin_path]: + plugin_path_by_hash.pop(key) + except Exception as e: + raise InstallException(f"Error while adding OCI plugin {package} to downloader: {e}") + else: + # The NPM downloader + plugin_already_installed = False + pull_policy = plugin.get('pullPolicy', PullPolicy.IF_NOT_PRESENT) + + if plugin['hash'] in plugin_path_by_hash: + force_download = plugin.get('forceDownload', False) + if pull_policy == PullPolicy.ALWAYS or force_download: + logging.info('\n======= Forcing download of already installed dynamic plugin %s', package) + else: + logging.info('\n======= Skipping download of already installed dynamic plugin %s', package) + plugin_already_installed = True + # remove the hash from plugin_path_by_hash so that we can detect plugins that have been removed + plugin_path_by_hash.pop(plugin['hash']) + else: + logging.info('\n======= Installing dynamic plugin %s', package) + + if plugin_already_installed: + globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) + continue + + package_is_local = package.startswith('./') + + # If package is not local, then integrity check is mandatory + if not package_is_local and not skipIntegrityCheck and not 'integrity' in plugin: + raise InstallException(f"No integrity hash provided for Package {package}") + + if package_is_local: + package = os.path.join(os.getcwd(), package[2:]) + + logging.info('\t==> Grabbing package archive through `npm pack`') + completed = subprocess.run(['npm', 'pack', package], capture_output=True, cwd=dynamicPluginsRoot, text=True) + if completed.returncode != 0: + raise InstallException(f'Error while installing plugin { package } with \'npm pack\' : ' + completed.stderr) + + archive = os.path.join(dynamicPluginsRoot, completed.stdout.strip()) + + if not (package_is_local or skipIntegrityCheck): + logging.info('\t==> Verifying package integrity') + verify_package_integrity(plugin, archive, dynamicPluginsRoot, oci_downloader.cat_cmd, oci_downloader.openssl_cmd) + + directory = archive.replace('.tgz', '') + directoryRealpath = os.path.realpath(directory) + plugin_path = os.path.basename(directoryRealpath) + + if os.path.exists(directory): + logging.info('\t==> Removing previous plugin directory %s', directory) + shutil.rmtree(directory, ignore_errors=True, onerror=None) + os.mkdir(directory) + + logging.info('\t==> Extracting package archive %s', archive) + with tarfile.open(archive, 'r:gz') as file: + # extract the archive content but take care of zip bombs + for member in file.getmembers(): + if member.isreg(): + if not member.name.startswith('package/'): + raise InstallException("NPM package archive archive does not start with 'package/' as it should: " + member.name) + + if member.size > maxEntrySize: + raise InstallException('Zip bomb detected in ' + member.name) + + member.name = member.name.removeprefix('package/') + file.extract(member, path=directory, filter='tar') + elif member.isdir(): + logging.info('\t\tSkipping directory entry %s', member.name) + elif member.islnk() or member.issym(): + if not member.linkpath.startswith('package/'): + raise InstallException('NPM package archive contains a link outside of the archive: ' + member.name + ' -> ' + member.linkpath) + + member.name = member.name.removeprefix('package/') + member.linkpath = member.linkpath.removeprefix('package/') + + realpath = os.path.realpath(os.path.join(directory, *os.path.split(member.linkname))) + if not realpath.startswith(directoryRealpath): + raise InstallException('NPM package archive contains a link outside of the archive: ' + member.name + ' -> ' + member.linkpath) + + file.extract(member, path=directory, filter='tar') + else: + if member.type == tarfile.CHRTYPE: + type_str = "character device" + elif member.type == tarfile.BLKTYPE: + type_str = "block device" + elif member.type == tarfile.FIFOTYPE: + type_str = "FIFO" + else: + type_str = "unknown" + + raise InstallException('NPM package archive contains a non regular file: ' + member.name + ' - ' + type_str) + + logging.info('\t==> Removing package archive %s', archive) + os.remove(archive) + + # create a hash file in the plugin directory + hash = plugin['hash'] + hash_file_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-config.hash') + with open(hash_file_path, 'w') as digest_file: + digest_file.write(hash) + + if 'pluginConfig' not in plugin: + logging.info('\t==> Successfully installed dynamic plugin %s', package) + continue + + # if some plugin configuration is defined, merge it with the global configuration + globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) + + logging.info('\t==> Successfully installed dynamic plugin %s', package) yaml.safe_dump(globalConfig, open(dynamicPluginsGlobalConfigFile, 'w')) + # remove plugins that have been removed from the configuration for hash_value in plugin_path_by_hash: plugin_directory = os.path.join(dynamicPluginsRoot, plugin_path_by_hash[hash_value]) - print('\n======= Removing previously installed dynamic plugin', plugin_path_by_hash[hash_value], flush=True) + logging.info('\n======= Removing previously installed dynamic plugin %s', plugin_path_by_hash[hash_value]) shutil.rmtree(plugin_directory, ignore_errors=True, onerror=None) -end_time = datetime.now() -elapsed_time = end_time - start_time -print(f'Total Execution Time: {elapsed_time}') - - -main() +if __name__ == "__main__": + main() From 3a4c6ce7ec621e53d98711db04e3c430af7c88e6 Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Mon, 17 Mar 2025 11:09:34 -0300 Subject: [PATCH 07/18] test --- docker/install-dynamic-plugins.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index 03636c2971..f9453f9b00 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -15,6 +15,7 @@ import time import signal import logging +from datetime import datetime # Configure logging logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') @@ -209,6 +210,7 @@ def load_yaml(file_path): def main(): + start_time = datetime.now() dynamicPluginsRoot = sys.argv[1] lock_file_path = os.path.join(dynamicPluginsRoot, 'install-dynamic-plugins.lock') @@ -485,5 +487,9 @@ def main(): logging.info('\n======= Removing previously installed dynamic plugin %s', plugin_path_by_hash[hash_value]) shutil.rmtree(plugin_directory, ignore_errors=True, onerror=None) + end_time = datetime.now() + elapsed_time = end_time - start_time + print(f"Total Execution Time: {elapsed_time}") + if __name__ == "__main__": main() From 70d1555712ba6693dc9c100e19ad34bf763fecb4 Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Mon, 17 Mar 2025 13:41:21 -0300 Subject: [PATCH 08/18] test --- docker/install-dynamic-plugins.py | 199 ++++++++++++++++++++---------- 1 file changed, 135 insertions(+), 64 deletions(-) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index f9453f9b00..50f3990270 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -26,7 +26,7 @@ class PullPolicy(StrEnum): # NEVER = 'Never' not needed class InstallException(Exception): - """Exception class from which every exception in this library will derive.""" + """Base exception for all custom exceptions in this script.""" pass RECOGNIZED_ALGORITHMS = ( @@ -35,29 +35,37 @@ class InstallException(Exception): 'sha256', ) -def merge(source, destination, prefix = ''): +def merge(source, destination, prefix=''): + """ + Recursively merges the 'source' dictionary into 'destination'. + Raises an InstallException if a conflicting key/value pair is found. + """ for key, value in source.items(): if isinstance(value, dict): - # get node or create one node = destination.setdefault(key, {}) - merge(value, node, key + '.') + merge(value, node, prefix + key + '.') else: - # if key exists in destination trigger an error if key in destination and destination[key] != value: - raise InstallException(f"Config key '{ prefix + key }' defined differently for 2 dynamic plugins") - + raise InstallException( + f"Config key '{prefix + key}' is defined differently in two dynamic plugins." + ) destination[key] = value - return destination def maybeMergeConfig(config, globalConfig): + """ + If 'config' is a dict, merges it into 'globalConfig'. + Otherwise, returns 'globalConfig' unchanged. + """ if config is not None and isinstance(config, dict): logging.info('\t==> Merging plugin-specific configuration') return merge(config, globalConfig) - else: - return globalConfig + return globalConfig class OciDownloader: + """ + Handles downloading and extracting plugins stored in OCI registries (via 'skopeo'). + """ def __init__(self, destination: str): self._skopeo = shutil.which('skopeo') if self._skopeo is None: @@ -67,81 +75,117 @@ def __init__(self, destination: str): self.tmp_dir = self.tmp_dir_obj.name self.image_to_tarball = {} self.destination = destination - self.cat_cmd = shutil.which('cat') - self.openssl_cmd = shutil.which('openssl') - if not self.cat_cmd or not self.openssl_cmd: - raise InstallException("Required utilities 'cat' and 'openssl' not found in PATH.") + self.openssl_cmd = shutil.which('openssl') + if not self.openssl_cmd: + raise InstallException("Required utility 'openssl' not found in PATH.") def skopeo(self, command): - rv = subprocess.run([self._skopeo] + command, check=True, capture_output=True, text=True) + """ + Executes a 'skopeo' command and returns its stdout as a string. + Raises InstallException on failure. + """ + rv = subprocess.run( + [self._skopeo] + command, + check=True, + capture_output=True, + text=True + ) if rv.returncode != 0: raise InstallException(f'Error while running skopeo command: {rv.stderr}') return rv.stdout def get_plugin_tar(self, image: str) -> str: + """ + Downloads the specified OCI image (if not already downloaded) using skopeo + and returns the local path to the tar file. + """ if image not in self.image_to_tarball: - # run skopeo copy to copy the tar ball to the local filesystem logging.info(f'\t==> Copying image {image} to local filesystem') image_digest = hashlib.sha256(image.encode('utf-8'), usedforsecurity=False).hexdigest() + local_dir = os.path.join(self.tmp_dir, image_digest) - # replace oci:// prefix with docker:// image_url = image.replace('oci://', 'docker://') self.skopeo(['copy', image_url, f'dir:{local_dir}']) + manifest_path = os.path.join(local_dir, 'manifest.json') with open(manifest_path, 'r') as f: manifest = json.load(f) - # get the first layer of the image + # Retrieves the first layer from the manifest layer = manifest['layers'][0]['digest'] - (_sha, filename) = layer.split(':') + _, filename = layer.split(':') local_path = os.path.join(local_dir, filename) self.image_to_tarball[image] = local_path return self.image_to_tarball[image] def extract_plugin(self, tar_file: str, plugin_path: str) -> None: + """ + Extracts only files under 'plugin_path' from 'tar_file'. Also performs + size checks and symlink verifications to protect against zip bombs or link escapes. + """ extracted_path = os.path.abspath(self.destination) + max_entry_size = int(os.environ.get('MAX_ENTRY_SIZE', 20000000)) + with tarfile.open(tar_file, 'r:gz') as tar: members = [] for member in tar.getmembers(): if not member.name.startswith(plugin_path): continue - if member.size > int(os.environ.get('MAX_ENTRY_SIZE', 20000000)): + if member.size > max_entry_size: raise InstallException(f'Zip bomb detected in {member.name}') if member.islnk() or member.issym(): - realpath = os.path.realpath(os.path.join(extracted_path, plugin_path, *os.path.split(member.linkname))) + realpath = os.path.realpath( + os.path.join(extracted_path, plugin_path, *os.path.split(member.linkname)) + ) if not realpath.startswith(extracted_path): - logging.warning(f'\t==> WARNING: skipping file containing link outside of the archive: {member.name} -> {member.linkpath}') + logging.warning( + f'\t==> WARNING: skipping file containing link outside of the archive: ' + f'{member.name} -> {member.linkpath}' + ) continue members.append(member) tar.extractall(extracted_path, members=members, filter='tar') - def download(self, package: str) -> str: - # split by ! to get the path in the image - (image, plugin_path) = package.split('!') + """ + Receives something like 'oci://repo/image!path_in_tar', downloads and extracts only + the 'path_in_tar' directory into self.destination. Returns 'plugin_path'. + """ + image, plugin_path = package.split('!') tar_file = self.get_plugin_tar(image) + plugin_directory = os.path.join(self.destination, plugin_path) if os.path.exists(plugin_directory): logging.info(f'\t==> Removing previous plugin directory {plugin_directory}') shutil.rmtree(plugin_directory, ignore_errors=True, onerror=None) + self.extract_plugin(tar_file=tar_file, plugin_path=plugin_path) return plugin_path def digest(self, package: str) -> str: - (image, plugin_path) = package.split('!') + """ + Returns the digest of the OCI artifact using 'skopeo inspect'. + """ + image, _ = package.split('!') image_url = image.replace('oci://', 'docker://') output = self.skopeo(['inspect', image_url]) data = json.loads(output) - # OCI artifact digest field is defined as "hash method" ":" "hash" + # For example: 'Digest': 'sha256:3a033c...' digest = data['Digest'].split(':')[1] return f"{digest}" -def verify_package_integrity(plugin: dict, archive: str, working_directory: str, cat_cmd, openssl_cmd) -> None: +def verify_package_integrity(plugin: dict, archive: str, working_directory: str, openssl_cmd: str) -> None: + """ + Verifies the integrity of the specified 'archive' based on plugin['integrity'], + which must be of the form -. + Uses: openssl dgst - -binary | openssl base64 -A + Compares the calculated base64 hash to the one provided in the plugin definition. + """ package = plugin['package'] if 'integrity' not in plugin: raise InstallException(f'Package integrity for {package} is missing') @@ -150,55 +194,82 @@ def verify_package_integrity(plugin: dict, archive: str, working_directory: str, if not isinstance(integrity, str): raise InstallException(f'Package integrity for {package} must be a string') - integrity = integrity.split('-') - if len(integrity) != 2: - raise InstallException(f'Package integrity for {package} must be a string of the form -') + parts = integrity.split('-') + if len(parts) != 2: + raise InstallException( + f'Package integrity for {package} must be -' + ) - algorithm = integrity[0] + algorithm, hash_digest = parts if algorithm not in RECOGNIZED_ALGORITHMS: - raise InstallException(f'{package}: Provided Package integrity algorithm {algorithm} is not supported, please use one of following algorithms {RECOGNIZED_ALGORITHMS} instead') + raise InstallException( + f'{package}: Provided Package integrity algorithm {algorithm} is not supported. ' + f'Use one of: {RECOGNIZED_ALGORITHMS}' + ) - hash_digest = integrity[1] try: - base64.b64decode(hash_digest, validate=True) + base64.b64decode(hash_digest, validate=True) except binascii.Error: - raise InstallException(f'{package}: Provided Package integrity hash {hash_digest} is not a valid base64 encoding') + raise InstallException( + f'{package}: The provided hash {hash_digest} is not valid base64' + ) + + # Instead of using 'cat', we open the file in Python and pipe its contents to openssl + with open(archive, 'rb') as archive_file: + # Equivalent to: cat archive | openssl dgst - -binary | openssl base64 -A + openssl_dgst_process = subprocess.Popen( + [openssl_cmd, 'dgst', f'-{algorithm}', '-binary'], + stdin=archive_file, + stdout=subprocess.PIPE + ) + openssl_base64_process = subprocess.Popen( + [openssl_cmd, 'base64', '-A'], + stdin=openssl_dgst_process.stdout, + stdout=subprocess.PIPE + ) + + output, _ = openssl_base64_process.communicate() + calculated_hash = output.decode('utf-8').strip() + + if hash_digest != calculated_hash: + raise InstallException( + f'{package}: The archive hash {calculated_hash} does not match the integrity hash {hash_digest}' + ) - # Use pre-validated commands from OciDownloader - cat_process = subprocess.Popen([cat_cmd, archive], stdout=subprocess.PIPE) - openssl_dgst_process = subprocess.Popen([openssl_cmd, "dgst", "-" + algorithm, "-binary"], stdin=cat_process.stdout, stdout=subprocess.PIPE) - openssl_base64_process = subprocess.Popen([openssl_cmd, "base64", "-A"], stdin=openssl_dgst_process.stdout, stdout=subprocess.PIPE) - - output, _ = openssl_base64_process.communicate() - if hash_digest != output.decode('utf-8').strip(): - raise InstallException(f'{package}: The hash of the downloaded package {output.decode("utf-8").strip()} does not match the provided integrity hash {hash_digest} provided in the configuration file') - -# Create the lock file, so that other instances of the script will wait for this one to finish def create_lock(lock_file_path): + """ + Creates a lock file. If the file already exists, waits until it is released. + """ while True: - try: - with open(lock_file_path, 'x'): - logging.info(f"======= Created lock file: {lock_file_path}") - return - except FileExistsError: - wait_for_lock_release(lock_file_path) - -# Remove the lock file + try: + with open(lock_file_path, 'x'): + logging.info(f"======= Created lock file: {lock_file_path}") + return + except FileExistsError: + wait_for_lock_release(lock_file_path) + def remove_lock(lock_file_path): - os.remove(lock_file_path) - logging.info(f"======= Removed lock file: {lock_file_path}") + """ + Removes the lock file if it exists. + """ + if os.path.exists(lock_file_path): + os.remove(lock_file_path) + logging.info(f"======= Removed lock file: {lock_file_path}") -# Wait for the lock file to be released def wait_for_lock_release(lock_file_path): - logging.info(f"======= Waiting for lock release (file: {lock_file_path})...") - while True: - if not os.path.exists(lock_file_path): - break - time.sleep(1) - logging.info("======= Lock released.") + """ + Waits for the specified lock file to be removed, indicating that another process has finished. + """ + logging.info(f"======= Waiting for lock release (file: {lock_file_path})...") + while os.path.exists(lock_file_path): + time.sleep(1) + logging.info("======= Lock released.") def load_yaml(file_path): - """Load YAML content from a file.""" + """ + Loads YAML content from 'file_path'. Returns None if the file does not exist. + Raises InstallException if there's a parsing error. + """ try: with open(file_path, 'r') as file: return yaml.safe_load(file) From 0ac96a554201f7d56cf915f79d08bcc946d4d099 Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Mon, 17 Mar 2025 13:42:32 -0300 Subject: [PATCH 09/18] test --- docker/install-dynamic-plugins.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index 50f3990270..16379ca17b 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -560,7 +560,7 @@ def main(): end_time = datetime.now() elapsed_time = end_time - start_time - print(f"Total Execution Time: {elapsed_time}") + print(f"Total Execution Timee: {elapsed_time}") if __name__ == "__main__": main() From 2559eb7e82c2c33d65299220aff0531f95deff10 Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Mon, 17 Mar 2025 14:41:27 -0300 Subject: [PATCH 10/18] test --- docker/install-dynamic-plugins.py | 470 ++++++++++++++++-------------- 1 file changed, 249 insertions(+), 221 deletions(-) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index 16379ca17b..a9d6890d54 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -17,16 +17,19 @@ import logging from datetime import datetime -# Configure logging +# Configuração básica de logging logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') +# ------------------------------------------------------------------------------ +# Definições de Classes e Constantes +# ------------------------------------------------------------------------------ class PullPolicy(StrEnum): IF_NOT_PRESENT = 'IfNotPresent' ALWAYS = 'Always' # NEVER = 'Never' not needed class InstallException(Exception): - """Base exception for all custom exceptions in this script.""" + """Exceção base para erros neste script.""" pass RECOGNIZED_ALGORITHMS = ( @@ -35,10 +38,13 @@ class InstallException(Exception): 'sha256', ) +# ------------------------------------------------------------------------------ +# Funções Auxiliares +# ------------------------------------------------------------------------------ def merge(source, destination, prefix=''): """ - Recursively merges the 'source' dictionary into 'destination'. - Raises an InstallException if a conflicting key/value pair is found. + Faz merge recursivo do dicionário 'source' em 'destination'. + Se encontrar chave com valor conflitante, lança InstallException. """ for key, value in source.items(): if isinstance(value, dict): @@ -47,25 +53,74 @@ def merge(source, destination, prefix=''): else: if key in destination and destination[key] != value: raise InstallException( - f"Config key '{prefix + key}' is defined differently in two dynamic plugins." + f"Config key '{prefix + key}' definida de forma diferente em plugins distintos." ) destination[key] = value return destination def maybeMergeConfig(config, globalConfig): """ - If 'config' is a dict, merges it into 'globalConfig'. - Otherwise, returns 'globalConfig' unchanged. + Se 'config' for dict, faz merge no 'globalConfig'; caso contrário, retorna 'globalConfig' inalterado. """ if config is not None and isinstance(config, dict): logging.info('\t==> Merging plugin-specific configuration') return merge(config, globalConfig) return globalConfig -class OciDownloader: +# ------------------------------------------------------------------------------ +# Funções de Lock +# ------------------------------------------------------------------------------ +def create_lock(lock_file_path): + """ + Cria arquivo de lock. Se já existir, aguarda até ele ser liberado. + """ + while True: + try: + with open(lock_file_path, 'x'): + logging.info(f"======= Created lock file: {lock_file_path}") + return + except FileExistsError: + wait_for_lock_release(lock_file_path) + +def remove_lock(lock_file_path): + """ + Remove o lock file, se existir. + """ + if os.path.exists(lock_file_path): + os.remove(lock_file_path) + logging.info(f"======= Removed lock file: {lock_file_path}") + +def wait_for_lock_release(lock_file_path): + """ + Fica em loop até o arquivo de lock ser removido, indicando que outro processo concluiu. + """ + logging.info(f"======= Waiting for lock release (file: {lock_file_path})...") + while os.path.exists(lock_file_path): + time.sleep(1) + logging.info("======= Lock released.") + +# ------------------------------------------------------------------------------ +# Função para carregar YAML +# ------------------------------------------------------------------------------ +def load_yaml(file_path): """ - Handles downloading and extracting plugins stored in OCI registries (via 'skopeo'). + Carrega o conteúdo YAML de 'file_path'. + Retorna None se o arquivo não existir. + Lança InstallException em caso de erros de parsing. """ + if not os.path.isfile(file_path): + logging.warning(f"File not found: {file_path}") + return None + try: + with open(file_path, 'r') as file: + return yaml.safe_load(file) + except yaml.YAMLError as e: + raise InstallException(f"Error parsing YAML file {file_path}: {e}") + +# ------------------------------------------------------------------------------ +# Classe para lidar com download via OCI (skopeo) +# ------------------------------------------------------------------------------ +class OciDownloader: def __init__(self, destination: str): self._skopeo = shutil.which('skopeo') if self._skopeo is None: @@ -82,23 +137,16 @@ def __init__(self, destination: str): def skopeo(self, command): """ - Executes a 'skopeo' command and returns its stdout as a string. - Raises InstallException on failure. + Executa 'skopeo' com os argumentos especificados e retorna stdout como string. """ - rv = subprocess.run( - [self._skopeo] + command, - check=True, - capture_output=True, - text=True - ) + rv = subprocess.run([self._skopeo] + command, check=True, capture_output=True, text=True) if rv.returncode != 0: raise InstallException(f'Error while running skopeo command: {rv.stderr}') return rv.stdout def get_plugin_tar(self, image: str) -> str: """ - Downloads the specified OCI image (if not already downloaded) using skopeo - and returns the local path to the tar file. + Faz o download da imagem (se ainda não feito), usando skopeo, e retorna o caminho local ao tar. """ if image not in self.image_to_tarball: logging.info(f'\t==> Copying image {image} to local filesystem') @@ -112,9 +160,8 @@ def get_plugin_tar(self, image: str) -> str: with open(manifest_path, 'r') as f: manifest = json.load(f) - # Retrieves the first layer from the manifest - layer = manifest['layers'][0]['digest'] - _, filename = layer.split(':') + layer_digest = manifest['layers'][0]['digest'] + _, filename = layer_digest.split(':') local_path = os.path.join(local_dir, filename) self.image_to_tarball[image] = local_path @@ -122,11 +169,11 @@ def get_plugin_tar(self, image: str) -> str: def extract_plugin(self, tar_file: str, plugin_path: str) -> None: """ - Extracts only files under 'plugin_path' from 'tar_file'. Also performs - size checks and symlink verifications to protect against zip bombs or link escapes. + Extrai apenas arquivos que começam com 'plugin_path' do tar.gz, verificando tamanho (anti zip-bomb) + e possíveis links fora do escopo. """ extracted_path = os.path.abspath(self.destination) - max_entry_size = int(os.environ.get('MAX_ENTRY_SIZE', 20000000)) + max_size = int(os.environ.get('MAX_ENTRY_SIZE', 20000000)) with tarfile.open(tar_file, 'r:gz') as tar: members = [] @@ -134,7 +181,7 @@ def extract_plugin(self, tar_file: str, plugin_path: str) -> None: if not member.name.startswith(plugin_path): continue - if member.size > max_entry_size: + if member.size > max_size: raise InstallException(f'Zip bomb detected in {member.name}') if member.islnk() or member.issym(): @@ -153,8 +200,8 @@ def extract_plugin(self, tar_file: str, plugin_path: str) -> None: def download(self, package: str) -> str: """ - Receives something like 'oci://repo/image!path_in_tar', downloads and extracts only - the 'path_in_tar' directory into self.destination. Returns 'plugin_path'. + Recebe algo como 'oci://repo/img!path_no_tar' e extrai só o diretório path_no_tar no destino. + Retorna plugin_path. """ image, plugin_path = package.split('!') tar_file = self.get_plugin_tar(image) @@ -169,124 +216,83 @@ def download(self, package: str) -> str: def digest(self, package: str) -> str: """ - Returns the digest of the OCI artifact using 'skopeo inspect'. + Retorna o digest da imagem OCI usando 'skopeo inspect'. """ image, _ = package.split('!') image_url = image.replace('oci://', 'docker://') output = self.skopeo(['inspect', image_url]) data = json.loads(output) - # For example: 'Digest': 'sha256:3a033c...' - digest = data['Digest'].split(':')[1] - return f"{digest}" + # Ex.: 'Digest': 'sha256:3a033c...' + return data['Digest'].split(':')[1] +# ------------------------------------------------------------------------------ +# Verificação de Integridade +# ------------------------------------------------------------------------------ def verify_package_integrity(plugin: dict, archive: str, working_directory: str, openssl_cmd: str) -> None: """ - Verifies the integrity of the specified 'archive' based on plugin['integrity'], - which must be of the form -. - Uses: openssl dgst - -binary | openssl base64 -A - Compares the calculated base64 hash to the one provided in the plugin definition. + Verifica integridade do arquivo 'archive' usando plugin['integrity'] no formato -. """ package = plugin['package'] - if 'integrity' not in plugin: + integrity = plugin.get('integrity') + if not integrity: raise InstallException(f'Package integrity for {package} is missing') - integrity = plugin['integrity'] if not isinstance(integrity, str): raise InstallException(f'Package integrity for {package} must be a string') parts = integrity.split('-') if len(parts) != 2: raise InstallException( - f'Package integrity for {package} must be -' + f'Package integrity for {package} must be -' ) - algorithm, hash_digest = parts + algorithm, base64_digest = parts if algorithm not in RECOGNIZED_ALGORITHMS: raise InstallException( f'{package}: Provided Package integrity algorithm {algorithm} is not supported. ' - f'Use one of: {RECOGNIZED_ALGORITHMS}' + f'Use one of {RECOGNIZED_ALGORITHMS}.' ) try: - base64.b64decode(hash_digest, validate=True) + base64.b64decode(base64_digest, validate=True) except binascii.Error: raise InstallException( - f'{package}: The provided hash {hash_digest} is not valid base64' + f'{package}: Provided Package integrity hash {base64_digest} is not valid base64' ) - # Instead of using 'cat', we open the file in Python and pipe its contents to openssl + # Lê o arquivo em Python (sem usar 'cat') e passa ao openssl with open(archive, 'rb') as archive_file: - # Equivalent to: cat archive | openssl dgst - -binary | openssl base64 -A - openssl_dgst_process = subprocess.Popen( + dgst_proc = subprocess.Popen( [openssl_cmd, 'dgst', f'-{algorithm}', '-binary'], - stdin=archive_file, - stdout=subprocess.PIPE + stdin=archive_file, stdout=subprocess.PIPE ) - openssl_base64_process = subprocess.Popen( + base64_proc = subprocess.Popen( [openssl_cmd, 'base64', '-A'], - stdin=openssl_dgst_process.stdout, - stdout=subprocess.PIPE + stdin=dgst_proc.stdout, stdout=subprocess.PIPE ) - - output, _ = openssl_base64_process.communicate() + output, _ = base64_proc.communicate() calculated_hash = output.decode('utf-8').strip() - if hash_digest != calculated_hash: + if base64_digest != calculated_hash: raise InstallException( - f'{package}: The archive hash {calculated_hash} does not match the integrity hash {hash_digest}' + f'{package}: Hash mismatch. Expected={base64_digest}, got={calculated_hash}' ) -def create_lock(lock_file_path): - """ - Creates a lock file. If the file already exists, waits until it is released. - """ - while True: - try: - with open(lock_file_path, 'x'): - logging.info(f"======= Created lock file: {lock_file_path}") - return - except FileExistsError: - wait_for_lock_release(lock_file_path) - -def remove_lock(lock_file_path): - """ - Removes the lock file if it exists. - """ - if os.path.exists(lock_file_path): - os.remove(lock_file_path) - logging.info(f"======= Removed lock file: {lock_file_path}") - -def wait_for_lock_release(lock_file_path): - """ - Waits for the specified lock file to be removed, indicating that another process has finished. - """ - logging.info(f"======= Waiting for lock release (file: {lock_file_path})...") - while os.path.exists(lock_file_path): - time.sleep(1) - logging.info("======= Lock released.") - -def load_yaml(file_path): - """ - Loads YAML content from 'file_path'. Returns None if the file does not exist. - Raises InstallException if there's a parsing error. - """ - try: - with open(file_path, 'r') as file: - return yaml.safe_load(file) - except FileNotFoundError: - logging.warning(f"File not found: {file_path}") - return None - except yaml.YAMLError as e: - raise InstallException(f"Error parsing YAML file {file_path}: {e}") - +# ------------------------------------------------------------------------------ +# Função Principal +# ------------------------------------------------------------------------------ def main(): - start_time = datetime.now() + + if len(sys.argv) < 2: + raise InstallException("Usage: python script.py ") + dynamicPluginsRoot = sys.argv[1] + # Configura lock lock_file_path = os.path.join(dynamicPluginsRoot, 'install-dynamic-plugins.lock') atexit.register(remove_lock, lock_file_path) - signal.signal(signal.SIGTERM, lambda signum, frame: sys.exit(0)) + signal.signal(signal.SIGTERM, lambda *a: sys.exit(0)) create_lock(lock_file_path) maxEntrySize = int(os.environ.get('MAX_ENTRY_SIZE', 20000000)) @@ -295,194 +301,207 @@ def main(): dynamicPluginsFile = 'dynamic-plugins.yaml' dynamicPluginsGlobalConfigFile = os.path.join(dynamicPluginsRoot, 'app-config.dynamic-plugins.yaml') - # test if file dynamic-plugins.yaml exists + # Checa se existe dynamic-plugins.yaml if not os.path.isfile(dynamicPluginsFile): logging.info(f"No {dynamicPluginsFile} file found. Skipping dynamic plugins installation.") - with open(dynamicPluginsGlobalConfigFile, 'w') as file: - file.write('') - file.close() - exit(0) + with open(dynamicPluginsGlobalConfigFile, 'w') as f: + f.write('') + sys.exit(0) + # Config global inicial globalConfig = { - 'dynamicPlugins': { + 'dynamicPlugins': { 'rootDirectory': 'dynamic-plugins-root' - } + } } content = load_yaml(dynamicPluginsFile) - - if content == '' or content is None: - logging.info(f"{dynamicPluginsFile} file is empty. Skipping dynamic plugins installation.") - with open(dynamicPluginsGlobalConfigFile, 'w') as file: - file.write('') - file.close() - exit(0) + if not content: + logging.info(f"{dynamicPluginsFile} file is empty or invalid. Skipping dynamic plugins installation.") + with open(dynamicPluginsGlobalConfigFile, 'w') as f: + f.write('') + sys.exit(0) if not isinstance(content, dict): raise InstallException(f"{dynamicPluginsFile} content must be a YAML object") - allPlugins = {} - + # Se SKIP_INTEGRITY_CHECK for true if skipIntegrityCheck: - logging.info(f"SKIP_INTEGRITY_CHECK has been set to {skipIntegrityCheck}, skipping integrity check of packages") + logging.info(f"SKIP_INTEGRITY_CHECK={skipIntegrityCheck}, skipping package integrity checks") + # Processa includes includes = content.get('includes', []) - if not isinstance(includes, list): - raise InstallException(f"content of the \'includes\' field must be a list in {dynamicPluginsFile}") + raise InstallException(f"'includes' field must be a list in {dynamicPluginsFile}") + allPlugins = {} for include in includes: if not isinstance(include, str): - raise InstallException(f"content of the \'includes\' field must be a list of strings in {dynamicPluginsFile}") - + raise InstallException(f"'includes' must be a list of strings in {dynamicPluginsFile}") logging.info('\n======= Including dynamic plugins from %s', include) includeContent = load_yaml(include) - if includeContent is None: - continue + continue # se arquivo não existe ou vazio, pula if not isinstance(includeContent, dict): raise InstallException(f"{include} content must be a YAML object") - includePlugins = includeContent.get('plugins', []) - if not isinstance(includePlugins, list): - raise InstallException(f"content of the \'plugins\' field must be a list in {include}") + incPlugins = includeContent.get('plugins', []) + if not isinstance(incPlugins, list): + raise InstallException(f"'plugins' field must be a list in {include}") - for plugin in includePlugins: - allPlugins[plugin['package']] = plugin + for plug in incPlugins: + allPlugins[plug['package']] = plug + # Lê lista de plugins do YAML principal plugins = content.get('plugins', []) - if not isinstance(plugins, list): - raise InstallException(f"content of the \'plugins\' field must be a list in {dynamicPluginsFile}") + raise InstallException(f"'plugins' field must be a list in {dynamicPluginsFile}") + # Sobrescreve configurações de plugins duplicados for plugin in plugins: package = plugin['package'] if not isinstance(package, str): - raise InstallException(f"content of the \'plugins.package\' field must be a string in {dynamicPluginsFile}") + raise InstallException(f"'plugins.package' must be a string in {dynamicPluginsFile}") - # if `package` already exists in `allPlugins`, then override its fields - if package not in allPlugins: + if package in allPlugins: + logging.info('\n======= Overriding dynamic plugin configuration %s', package) + for k, v in plugin.items(): + if k != 'package': + allPlugins[package][k] = v + else: allPlugins[package] = plugin - continue - - # override the included plugins with fields in the main plugins list - logging.info('\n======= Overriding dynamic plugin configuration %s', package) - for key in plugin: - if key == 'package': - continue - allPlugins[package][key] = plugin[key] - # add a hash for each plugin configuration to detect changes + # Calcula hash de cada plugin for plugin in allPlugins.values(): hash_dict = copy.deepcopy(plugin) - # remove elements that shouldn't be tracked for installation detection hash_dict.pop('pluginConfig', None) - hash = hashlib.sha256(json.dumps(hash_dict, sort_keys=True).encode('utf-8')).hexdigest() - plugin['hash'] = hash + plugin_hash = hashlib.sha256( + json.dumps(hash_dict, sort_keys=True).encode('utf-8') + ).hexdigest() + plugin['hash'] = plugin_hash - # create a dict of all currently installed plugins in dynamicPluginsRoot + # Identifica plugins já instalados (mapeados por hash) plugin_path_by_hash = {} for dir_name in os.listdir(dynamicPluginsRoot): dir_path = os.path.join(dynamicPluginsRoot, dir_name) if os.path.isdir(dir_path): - hash_file_path = os.path.join(dir_path, 'dynamic-plugin-config.hash') - if os.path.isfile(hash_file_path): - with open(hash_file_path, 'r') as hash_file: - hash_value = hash_file.read().strip() - plugin_path_by_hash[hash_value] = dir_name + h_file = os.path.join(dir_path, 'dynamic-plugin-config.hash') + if os.path.isfile(h_file): + with open(h_file, 'r') as hf: + existing_hash = hf.read().strip() + plugin_path_by_hash[existing_hash] = dir_name + # Prepara downloader OCI oci_downloader = OciDownloader(dynamicPluginsRoot) - # iterate through the list of plugins + # Percorre plugins e instala for plugin in allPlugins.values(): package = plugin['package'] - if 'disabled' in plugin and plugin['disabled'] is True: + if plugin.get('disabled') is True: logging.info('\n======= Skipping disabled dynamic plugin %s', package) continue - # Stores the relative path of the plugin directory once downloaded - plugin_path = '' + pull_policy = plugin.get( + 'pullPolicy', + PullPolicy.ALWAYS if ':latest!' in package else PullPolicy.IF_NOT_PRESENT + ) + if isinstance(pull_policy, str): + pull_policy = PullPolicy(pull_policy) + + plugin_path = '' # caminho relativo instalado if package.startswith('oci://'): - # The OCI downloader + # Instala plugin via OCI try: - pull_policy = plugin.get('pullPolicy', PullPolicy.ALWAYS if ':latest!' in package else PullPolicy.IF_NOT_PRESENT) - - if plugin['hash'] in plugin_path_by_hash and pull_policy == PullPolicy.IF_NOT_PRESENT: - logging.info('\n======= Skipping download of already installed dynamic plugin %s', package) + if (plugin['hash'] in plugin_path_by_hash + and pull_policy == PullPolicy.IF_NOT_PRESENT): + # Já instalado e policy = IfNotPresent => skip + logging.info('\n======= Skipping download of installed plugin %s', package) plugin_path_by_hash.pop(plugin['hash']) globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) continue + # Se já instalado e policy = ALWAYS => checar se digest mudou if plugin['hash'] in plugin_path_by_hash and pull_policy == PullPolicy.ALWAYS: - digest_file_path = os.path.join(dynamicPluginsRoot, plugin_path_by_hash.pop(plugin['hash']), 'dynamic-plugin-image.hash') - local_image_digest = None - if os.path.isfile(digest_file_path): - with open(digest_file_path, 'r') as digest_file: - digest_value = digest_file.read().strip() - local_image_digest = digest_value - remote_image_digest = oci_downloader.digest(package) - if remote_image_digest == local_image_digest: - logging.info('\n======= Skipping download of already installed dynamic plugin %s', package) + old_dir = plugin_path_by_hash.pop(plugin['hash']) + old_digest_file = os.path.join(dynamicPluginsRoot, old_dir, 'dynamic-plugin-image.hash') + local_digest = None + if os.path.isfile(old_digest_file): + with open(old_digest_file, 'r') as df: + local_digest = df.read().strip() + remote_digest = oci_downloader.digest(package) + if remote_digest == local_digest: + logging.info('\n======= Skipping download of installed plugin (same digest) %s', package) globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) continue else: logging.info('\n======= Installing dynamic plugin %s', package) - else: logging.info('\n======= Installing dynamic plugin %s', package) + # De fato faz o download plugin_path = oci_downloader.download(package) - digest_file_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-image.hash') - with open(digest_file_path, 'w') as digest_file: - digest_file.write(oci_downloader.digest(package)) - # remove any duplicate hashes which can occur when only the version is updated - for key in [k for k, v in plugin_path_by_hash.items() if v == plugin_path]: - plugin_path_by_hash.pop(key) + + # Salva o digest remoto + digest_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-image.hash') + with open(digest_path, 'w') as df: + df.write(oci_downloader.digest(package)) + + # Remove duplicatas do plugin_path_by_hash que apontem p/ mesmo plugin_path + duplicates = [k for k, v in plugin_path_by_hash.items() if v == plugin_path] + for dup in duplicates: + plugin_path_by_hash.pop(dup) except Exception as e: raise InstallException(f"Error while adding OCI plugin {package} to downloader: {e}") + else: - # The NPM downloader + # Instala plugin via NPM plugin_already_installed = False - pull_policy = plugin.get('pullPolicy', PullPolicy.IF_NOT_PRESENT) if plugin['hash'] in plugin_path_by_hash: - force_download = plugin.get('forceDownload', False) - if pull_policy == PullPolicy.ALWAYS or force_download: - logging.info('\n======= Forcing download of already installed dynamic plugin %s', package) + force_dl = plugin.get('forceDownload', False) + if pull_policy == PullPolicy.ALWAYS or force_dl: + logging.info('\n======= Forcing download of installed dynamic plugin %s', package) else: - logging.info('\n======= Skipping download of already installed dynamic plugin %s', package) + logging.info('\n======= Skipping download of installed dynamic plugin %s', package) plugin_already_installed = True - # remove the hash from plugin_path_by_hash so that we can detect plugins that have been removed plugin_path_by_hash.pop(plugin['hash']) else: logging.info('\n======= Installing dynamic plugin %s', package) if plugin_already_installed: + # apenas faz merge de config, se houver globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) continue + # Verifica se local => se for, pula check de integridade package_is_local = package.startswith('./') - - # If package is not local, then integrity check is mandatory - if not package_is_local and not skipIntegrityCheck and not 'integrity' in plugin: + if (not package_is_local) and (not skipIntegrityCheck) and ('integrity' not in plugin): raise InstallException(f"No integrity hash provided for Package {package}") if package_is_local: package = os.path.join(os.getcwd(), package[2:]) logging.info('\t==> Grabbing package archive through `npm pack`') - completed = subprocess.run(['npm', 'pack', package], capture_output=True, cwd=dynamicPluginsRoot, text=True) + completed = subprocess.run( + ['npm', 'pack', package], + capture_output=True, cwd=dynamicPluginsRoot, text=True + ) if completed.returncode != 0: - raise InstallException(f'Error while installing plugin { package } with \'npm pack\' : ' + completed.stderr) + raise InstallException( + f"Error while installing plugin {package} with 'npm pack': {completed.stderr}" + ) archive = os.path.join(dynamicPluginsRoot, completed.stdout.strip()) - if not (package_is_local or skipIntegrityCheck): + # Verifica integridade se aplicável + if not package_is_local and not skipIntegrityCheck: logging.info('\t==> Verifying package integrity') - verify_package_integrity(plugin, archive, dynamicPluginsRoot, oci_downloader.cat_cmd, oci_downloader.openssl_cmd) + verify_package_integrity( + plugin, archive, dynamicPluginsRoot, oci_downloader.openssl_cmd + ) directory = archive.replace('.tgz', '') directoryRealpath = os.path.realpath(directory) @@ -494,73 +513,82 @@ def main(): os.mkdir(directory) logging.info('\t==> Extracting package archive %s', archive) - with tarfile.open(archive, 'r:gz') as file: - # extract the archive content but take care of zip bombs - for member in file.getmembers(): + with tarfile.open(archive, 'r:gz') as f: + for member in f.getmembers(): if member.isreg(): if not member.name.startswith('package/'): - raise InstallException("NPM package archive archive does not start with 'package/' as it should: " + member.name) - + raise InstallException( + "NPM package archive doesn't start with 'package/': " + member.name + ) if member.size > maxEntrySize: raise InstallException('Zip bomb detected in ' + member.name) member.name = member.name.removeprefix('package/') - file.extract(member, path=directory, filter='tar') + f.extract(member, path=directory, filter='tar') elif member.isdir(): logging.info('\t\tSkipping directory entry %s', member.name) elif member.islnk() or member.issym(): if not member.linkpath.startswith('package/'): - raise InstallException('NPM package archive contains a link outside of the archive: ' + member.name + ' -> ' + member.linkpath) + raise InstallException( + f'NPM package archive link outside of archive: {member.name} -> {member.linkpath}' + ) member.name = member.name.removeprefix('package/') member.linkpath = member.linkpath.removeprefix('package/') - realpath = os.path.realpath(os.path.join(directory, *os.path.split(member.linkname))) + realpath = os.path.realpath( + os.path.join(directory, *os.path.split(member.linkname)) + ) if not realpath.startswith(directoryRealpath): - raise InstallException('NPM package archive contains a link outside of the archive: ' + member.name + ' -> ' + member.linkpath) - - file.extract(member, path=directory, filter='tar') + raise InstallException( + f'NPM package archive link outside of the archive: {member.name} -> {member.linkpath}' + ) + f.extract(member, path=directory, filter='tar') else: + # Se for CHRTYPE, BLKTYPE, FIFOTYPE ou outro + t_str = 'unknown' if member.type == tarfile.CHRTYPE: - type_str = "character device" + t_str = 'character device' elif member.type == tarfile.BLKTYPE: - type_str = "block device" + t_str = 'block device' elif member.type == tarfile.FIFOTYPE: - type_str = "FIFO" - else: - type_str = "unknown" - - raise InstallException('NPM package archive contains a non regular file: ' + member.name + ' - ' + type_str) + t_str = 'FIFO' + raise InstallException( + f'NPM package archive contains a non-regular file: {member.name} - {t_str}' + ) logging.info('\t==> Removing package archive %s', archive) os.remove(archive) - # create a hash file in the plugin directory - hash = plugin['hash'] - hash_file_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-config.hash') - with open(hash_file_path, 'w') as digest_file: - digest_file.write(hash) + # Cria arquivo de hash no plugin + plugin_hash_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-config.hash') + with open(plugin_hash_path, 'w') as df: + df.write(plugin['hash']) + # Se não há pluginConfig, já finaliza if 'pluginConfig' not in plugin: - logging.info('\t==> Successfully installed dynamic plugin %s', package) - continue + logging.info('\t==> Successfully installed dynamic plugin %s', package) + continue - # if some plugin configuration is defined, merge it with the global configuration + # Faz merge de config do plugin globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) logging.info('\t==> Successfully installed dynamic plugin %s', package) + # Salva config global no final yaml.safe_dump(globalConfig, open(dynamicPluginsGlobalConfigFile, 'w')) - # remove plugins that have been removed from the configuration - for hash_value in plugin_path_by_hash: - plugin_directory = os.path.join(dynamicPluginsRoot, plugin_path_by_hash[hash_value]) - logging.info('\n======= Removing previously installed dynamic plugin %s', plugin_path_by_hash[hash_value]) + # Remove plugins que ficaram sem hash no path + for old_hash in plugin_path_by_hash: + old_plugin_dir = plugin_path_by_hash[old_hash] + plugin_directory = os.path.join(dynamicPluginsRoot, old_plugin_dir) + logging.info('\n======= Removing previously installed dynamic plugin %s', old_plugin_dir) shutil.rmtree(plugin_directory, ignore_errors=True, onerror=None) + # Exibe tempo total end_time = datetime.now() elapsed_time = end_time - start_time - print(f"Total Execution Timee: {elapsed_time}") + print(f"Total Execution Timeee: {elapsed_time}") if __name__ == "__main__": main() From 4edd18bb13610893e3ea8bfee903e1790cad7a27 Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Mon, 17 Mar 2025 15:49:43 -0300 Subject: [PATCH 11/18] test --- docker/install-dynamic-plugins.py | 373 ++++++++++++++++++++++-------- 1 file changed, 275 insertions(+), 98 deletions(-) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index a9d6890d54..d15357e29f 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -1,47 +1,154 @@ +#!/usr/bin/env python3 +""" +Script para instalar plugins dinâmicos a partir de imagens OCI ou pacotes NPM. +Otimizado para contêineres Kubernetes com foco em confiabilidade e diagnóstico. +""" + +import base64 +import binascii import copy -from enum import StrEnum +from datetime import datetime import hashlib import json +import logging import os -import sys -import tempfile -import yaml -import tarfile import shutil +import signal import subprocess -import base64 -import binascii -import atexit +import sys +import tarfile +import tempfile import time -import signal -import logging -from datetime import datetime +import yaml +from enum import StrEnum +from functools import lru_cache +from pathlib import Path +from typing import Dict, List, Any, Optional, Set, Tuple, Union + +# ------------------------------------------------------------------------------ +# Configuração de Logging +# ------------------------------------------------------------------------------ + +class OptimizedLogger: + """Sistema de logging otimizado para ambientes containerizados.""" + + def __init__(self, log_dir=None): + """Inicializa o logger com opções para arquivo de log.""" + self.logger = logging.getLogger('dynamic-plugins') + self.logger.setLevel(logging.INFO) + self.log_file = None + + # Remover handlers existentes para evitar duplicação + for handler in self.logger.handlers[:]: + self.logger.removeHandler(handler) + + # Formatador para mensagens de log + formatter = logging.Formatter( + '%(asctime)s - %(levelname)s - %(message)s', + datefmt='%Y-%m-%d %H:%M:%S' + ) + + # Handler para console sempre presente + console_handler = logging.StreamHandler() + console_handler.setFormatter(formatter) + self.logger.addHandler(console_handler) + + # Opcionalmente adicionar handler de arquivo + if log_dir: + try: + log_path = Path(log_dir) + log_path.mkdir(parents=True, exist_ok=True) + timestamp = datetime.now().strftime('%Y%m%d_%H%M%S') + self.log_file = log_path / f"plugin_install_{timestamp}.log" + + file_handler = logging.FileHandler(str(self.log_file)) + file_handler.setFormatter(formatter) + self.logger.addHandler(file_handler) + + self.info(f"Log file created at: {self.log_file}") + except Exception as e: + # Falha silenciosamente mas continua com logging no console + self.logger.warning(f"Could not set up file logging: {e}") + + def info(self, msg, *args, **kwargs): + """Log com nível INFO.""" + self.logger.info(msg, *args, **kwargs) + + def warning(self, msg, *args, **kwargs): + """Log com nível WARNING.""" + self.logger.warning(msg, *args, **kwargs) + + def error(self, msg, *args, **kwargs): + """Log com nível ERROR.""" + self.logger.error(msg, *args, **kwargs) + + def debug(self, msg, *args, **kwargs): + """Log com nível DEBUG.""" + self.logger.debug(msg, *args, **kwargs) + + def critical(self, msg, *args, **kwargs): + """Log com nível CRITICAL.""" + self.logger.critical(msg, *args, **kwargs) + + def log_system_info(self): + """Registra informações do sistema para diagnóstico.""" + self.info("-" * 50) + self.info("System Information:") + self.info(f" Hostname: {os.environ.get('HOSTNAME', 'unknown')}") + self.info(f" Time: {datetime.now().isoformat()}") + + # Informações específicas do Kubernetes se disponíveis + if os.path.exists('/var/run/secrets/kubernetes.io/serviceaccount'): + try: + with open('/var/run/secrets/kubernetes.io/serviceaccount/namespace', 'r') as f: + namespace = f.read().strip() + self.info(f" Kubernetes namespace: {namespace}") + except Exception: + pass -# Configuração básica de logging -logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') + # Informações de sistema + try: + import platform + self.info(f" Python version: {platform.python_version()}") + self.info(f" Platform: {platform.platform()}") + except ImportError: + pass + + self.info("-" * 50) + + def log_execution_result(self, success=True, error=None): + """Registra o resultado da execução do script.""" + if success: + self.info("Plugin installation completed successfully!") + else: + self.error(f"Plugin installation failed: {error}") + + if self.log_file: + self.info(f"Full logs available at: {self.log_file}") + +# Inicializa logger global para uso durante importação do módulo +logger = OptimizedLogger() # ------------------------------------------------------------------------------ # Definições de Classes e Constantes # ------------------------------------------------------------------------------ + class PullPolicy(StrEnum): IF_NOT_PRESENT = 'IfNotPresent' ALWAYS = 'Always' - # NEVER = 'Never' not needed class InstallException(Exception): """Exceção base para erros neste script.""" pass -RECOGNIZED_ALGORITHMS = ( - 'sha512', - 'sha384', - 'sha256', -) +# Algoritmos de hash suportados para verificação de integridade +RECOGNIZED_ALGORITHMS = frozenset(['sha512', 'sha384', 'sha256']) # ------------------------------------------------------------------------------ # Funções Auxiliares # ------------------------------------------------------------------------------ -def merge(source, destination, prefix=''): + +def merge(source: Dict[str, Any], destination: Dict[str, Any], prefix: str = '') -> Dict[str, Any]: """ Faz merge recursivo do dicionário 'source' em 'destination'. Se encontrar chave com valor conflitante, lança InstallException. @@ -58,125 +165,184 @@ def merge(source, destination, prefix=''): destination[key] = value return destination -def maybeMergeConfig(config, globalConfig): +def maybe_merge_config(config: Optional[Dict[str, Any]], global_config: Dict[str, Any]) -> Dict[str, Any]: """ Se 'config' for dict, faz merge no 'globalConfig'; caso contrário, retorna 'globalConfig' inalterado. """ if config is not None and isinstance(config, dict): - logging.info('\t==> Merging plugin-specific configuration') - return merge(config, globalConfig) - return globalConfig + logger.info('\t==> Merging plugin-specific configuration') + return merge(config, global_config) + return global_config + +def check_prerequisites() -> Tuple[str, ...]: + """ + Verifica e retorna o caminho de ferramentas necessárias. + Lança InstallException se alguma não estiver disponível. + """ + required_tools = { + 'skopeo': "Skopeo is required for OCI image handling", + 'npm': "NPM is required for NPM package handling" + } + missing = [] + found_tools = {} + + for tool, description in required_tools.items(): + path = shutil.which(tool) + if path: + found_tools[tool] = path + else: + missing.append(f"- {tool}: {description}") + + if missing: + raise InstallException(f"Required tools not found in PATH:\n" + "\n".join(missing)) + + return found_tools # ------------------------------------------------------------------------------ # Funções de Lock # ------------------------------------------------------------------------------ -def create_lock(lock_file_path): + +def create_lock(lock_file_path: Union[str, Path]): """ Cria arquivo de lock. Se já existir, aguarda até ele ser liberado. """ + lock_path = Path(lock_file_path) while True: try: - with open(lock_file_path, 'x'): - logging.info(f"======= Created lock file: {lock_file_path}") - return + lock_path.touch(exist_ok=False) + logger.info(f"======= Created lock file: {lock_path}") + return except FileExistsError: - wait_for_lock_release(lock_file_path) + wait_for_lock_release(lock_path) -def remove_lock(lock_file_path): +def remove_lock(lock_file_path: Union[str, Path]): """ Remove o lock file, se existir. """ - if os.path.exists(lock_file_path): - os.remove(lock_file_path) - logging.info(f"======= Removed lock file: {lock_file_path}") + lock_path = Path(lock_file_path) + if lock_path.exists(): + try: + lock_path.unlink() + logger.info(f"======= Removed lock file: {lock_path}") + except OSError as e: + logger.warning(f"======= Failed to remove lock file: {e}") -def wait_for_lock_release(lock_file_path): +def wait_for_lock_release(lock_file_path: Union[str, Path]): """ - Fica em loop até o arquivo de lock ser removido, indicando que outro processo concluiu. + Fica em loop até o arquivo de lock ser removido, com detecção de locks obsoletos. """ - logging.info(f"======= Waiting for lock release (file: {lock_file_path})...") - while os.path.exists(lock_file_path): + lock_path = Path(lock_file_path) + logger.info(f"======= Waiting for lock release (file: {lock_path})...") + + start_time = time.time() + timeout = 300 # 5 minutos + + while lock_path.exists(): time.sleep(1) - logging.info("======= Lock released.") + + # Verificar timeout + if time.time() - start_time > timeout: + logger.warning(f"Lock wait timeout after {timeout}s - may be stale, removing") + remove_lock(lock_path) + break + + logger.info("======= Lock released.") # ------------------------------------------------------------------------------ -# Função para carregar YAML +# Funções para carregamento de arquivos # ------------------------------------------------------------------------------ -def load_yaml(file_path): + +def load_yaml(file_path: Union[str, Path]) -> Optional[Any]: """ Carrega o conteúdo YAML de 'file_path'. Retorna None se o arquivo não existir. Lança InstallException em caso de erros de parsing. """ - if not os.path.isfile(file_path): - logging.warning(f"File not found: {file_path}") + path = Path(file_path) + if not path.is_file(): + logger.warning(f"File not found: {path}") return None + try: - with open(file_path, 'r') as file: + with path.open('r') as file: return yaml.safe_load(file) except yaml.YAMLError as e: - raise InstallException(f"Error parsing YAML file {file_path}: {e}") + raise InstallException(f"Error parsing YAML file {path}: {e}") # ------------------------------------------------------------------------------ -# Classe para lidar com download via OCI (skopeo) +# OCI Downloader com otimizações # ------------------------------------------------------------------------------ + class OciDownloader: - def __init__(self, destination: str): - self._skopeo = shutil.which('skopeo') - if self._skopeo is None: + def __init__(self, destination: Union[str, Path], tools: Dict[str, str]): + """ + Inicializa o OciDownloader com ferramentas validadas. + + Args: + destination: Diretório onde os plugins serão extraídos + tools: Dicionário com caminhos para ferramentas necessárias + """ + self._skopeo = tools.get('skopeo') + if not self._skopeo: raise InstallException('skopeo executable not found in PATH') self.tmp_dir_obj = tempfile.TemporaryDirectory() - self.tmp_dir = self.tmp_dir_obj.name - self.image_to_tarball = {} - self.destination = destination - - self.openssl_cmd = shutil.which('openssl') - if not self.openssl_cmd: - raise InstallException("Required utility 'openssl' not found in PATH.") + self.tmp_dir = Path(self.tmp_dir_obj.name) + self.image_to_tarball: Dict[str, Path] = {} + self.destination = Path(destination) + self._digest_cache: Dict[str, str] = {} - def skopeo(self, command): + def skopeo(self, command: List[str]) -> str: """ - Executa 'skopeo' com os argumentos especificados e retorna stdout como string. + Executa 'skopeo' com os argumentos especificados e retorna stdout. """ - rv = subprocess.run([self._skopeo] + command, check=True, capture_output=True, text=True) - if rv.returncode != 0: - raise InstallException(f'Error while running skopeo command: {rv.stderr}') - return rv.stdout + try: + result = subprocess.run( + [self._skopeo] + command, + check=True, + capture_output=True, + text=True + ) + return result.stdout + except subprocess.CalledProcessError as e: + error_msg = f'Error while running skopeo command: {e.stderr}' + logger.error(error_msg) + raise InstallException(error_msg) - def get_plugin_tar(self, image: str) -> str: + def get_plugin_tar(self, image: str) -> Path: """ Faz o download da imagem (se ainda não feito), usando skopeo, e retorna o caminho local ao tar. """ - if image not in self.image_to_tarball: - logging.info(f'\t==> Copying image {image} to local filesystem') - image_digest = hashlib.sha256(image.encode('utf-8'), usedforsecurity=False).hexdigest() + if image in self.image_to_tarball: + return self.image_to_tarball[image] - local_dir = os.path.join(self.tmp_dir, image_digest) - image_url = image.replace('oci://', 'docker://') - self.skopeo(['copy', image_url, f'dir:{local_dir}']) + logger.info(f'\t==> Copying image {image} to local filesystem') + image_digest = hashlib.sha256(image.encode('utf-8'), usedforsecurity=False).hexdigest() - manifest_path = os.path.join(local_dir, 'manifest.json') - with open(manifest_path, 'r') as f: - manifest = json.load(f) + local_dir = self.tmp_dir / image_digest + image_url = image.replace('oci://', 'docker://') + self.skopeo(['copy', image_url, f'dir:{local_dir}']) - layer_digest = manifest['layers'][0]['digest'] - _, filename = layer_digest.split(':') - local_path = os.path.join(local_dir, filename) - self.image_to_tarball[image] = local_path + manifest_path = local_dir / 'manifest.json' + with manifest_path.open('r') as f: + manifest = json.load(f) - return self.image_to_tarball[image] + layer_digest = manifest['layers'][0]['digest'] + _, filename = layer_digest.split(':') + local_path = local_dir / filename + self.image_to_tarball[image] = local_path + return local_path - def extract_plugin(self, tar_file: str, plugin_path: str) -> None: + def extract_plugin(self, tar_file: Path, plugin_path: str) -> None: """ - Extrai apenas arquivos que começam com 'plugin_path' do tar.gz, verificando tamanho (anti zip-bomb) - e possíveis links fora do escopo. + Extrai apenas arquivos que começam com 'plugin_path' do tar.gz, + verificando tamanho (anti zip-bomb) e possíveis links fora do escopo. """ - extracted_path = os.path.abspath(self.destination) + extracted_path = self.destination.absolute() max_size = int(os.environ.get('MAX_ENTRY_SIZE', 20000000)) with tarfile.open(tar_file, 'r:gz') as tar: - members = [] + members_to_extract = [] for member in tar.getmembers(): if not member.name.startswith(plugin_path): continue @@ -185,18 +351,21 @@ def extract_plugin(self, tar_file: str, plugin_path: str) -> None: raise InstallException(f'Zip bomb detected in {member.name}') if member.islnk() or member.issym(): - realpath = os.path.realpath( - os.path.join(extracted_path, plugin_path, *os.path.split(member.linkname)) - ) - if not realpath.startswith(extracted_path): - logging.warning( + realpath = Path(extracted_path / plugin_path).joinpath( + *Path(member.linkname).parts).resolve() + + if not str(realpath).startswith(str(extracted_path)): + logger.warning( f'\t==> WARNING: skipping file containing link outside of the archive: ' f'{member.name} -> {member.linkpath}' ) continue - members.append(member) - tar.extractall(extracted_path, members=members, filter='tar') + members_to_extract.append(member) + + # Extração em batch é mais eficiente + if members_to_extract: + tar.extractall(extracted_path, members=members_to_extract, filter='tar') def download(self, package: str) -> str: """ @@ -206,24 +375,32 @@ def download(self, package: str) -> str: image, plugin_path = package.split('!') tar_file = self.get_plugin_tar(image) - plugin_directory = os.path.join(self.destination, plugin_path) - if os.path.exists(plugin_directory): - logging.info(f'\t==> Removing previous plugin directory {plugin_directory}') - shutil.rmtree(plugin_directory, ignore_errors=True, onerror=None) + plugin_directory = self.destination / plugin_path + if plugin_directory.exists(): + logger.info(f'\t==> Removing previous plugin directory {plugin_directory}') + try: + shutil.rmtree(plugin_directory) + except OSError as e: + logger.warning(f'\t==> Could not remove directory: {e}, trying alternative method') + # Tentar removê-lo forçadamente se o método normal falhar + try: + os.system(f'rm -rf "{plugin_directory}"') + except Exception as e2: + raise InstallException(f"Failed to remove directory {plugin_directory}: {e2}") self.extract_plugin(tar_file=tar_file, plugin_path=plugin_path) return plugin_path def digest(self, package: str) -> str: """ - Retorna o digest da imagem OCI usando 'skopeo inspect'. + Retorna o digest da imagem OCI usando 'skopeo inspect' com cache. """ image, _ = package.split('!') - image_url = image.replace('oci://', 'docker://') - output = self.skopeo(['inspect', image_url]) - data = json.loads(output) - # Ex.: 'Digest': 'sha256:3a033c...' - return data['Digest'].split(':')[1] + + # Verificar cache primeiro + if image in self._digest_cache: + return self._digest_cache[image] + # ------------------------------------------------------------------------------ # Verificação de Integridade From 1c0f321b636d4870d510f9906023e6bd531fe48d Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Mon, 17 Mar 2025 15:51:39 -0300 Subject: [PATCH 12/18] test --- docker/install-dynamic-plugins.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index d15357e29f..c5fc21f452 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -765,7 +765,7 @@ def main(): # Exibe tempo total end_time = datetime.now() elapsed_time = end_time - start_time - print(f"Total Execution Timeee: {elapsed_time}") + print(f"Total Execution Timeeeee: {elapsed_time}") if __name__ == "__main__": main() From 0fdacdcc7187d49cb704cf0366d7f03450eca17b Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Mon, 17 Mar 2025 17:33:33 -0300 Subject: [PATCH 13/18] test --- docker/install-dynamic-plugins.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index c5fc21f452..397890d4aa 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -24,6 +24,7 @@ from functools import lru_cache from pathlib import Path from typing import Dict, List, Any, Optional, Set, Tuple, Union +import atexit # ------------------------------------------------------------------------------ # Configuração de Logging From d12fc64adb93187cfacb370fd00f2661af7bfa8b Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Mon, 17 Mar 2025 18:55:27 -0300 Subject: [PATCH 14/18] test --- docker/install-dynamic-plugins.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index 397890d4aa..f4961f117f 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -571,8 +571,9 @@ def main(): existing_hash = hf.read().strip() plugin_path_by_hash[existing_hash] = dir_name + tools = check_prerequisites() # Prepara downloader OCI - oci_downloader = OciDownloader(dynamicPluginsRoot) + oci_downloader = OciDownloader(dynamicPluginsRoot, tools) # Percorre plugins e instala for plugin in allPlugins.values(): From 6c7131ab1c09891d37df3475fa12117f731e35e2 Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Mon, 17 Mar 2025 19:39:56 -0300 Subject: [PATCH 15/18] test --- docker/install-dynamic-plugins.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index f4961f117f..4324e0d587 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -599,7 +599,7 @@ def main(): # Já instalado e policy = IfNotPresent => skip logging.info('\n======= Skipping download of installed plugin %s', package) plugin_path_by_hash.pop(plugin['hash']) - globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) + globalConfig = maybe_merge_config(plugin.get('pluginConfig'), globalConfig) continue # Se já instalado e policy = ALWAYS => checar se digest mudou @@ -613,7 +613,7 @@ def main(): remote_digest = oci_downloader.digest(package) if remote_digest == local_digest: logging.info('\n======= Skipping download of installed plugin (same digest) %s', package) - globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) + globalConfig = maybe_merge_config(plugin.get('pluginConfig'), globalConfig) continue else: logging.info('\n======= Installing dynamic plugin %s', package) @@ -652,7 +652,7 @@ def main(): if plugin_already_installed: # apenas faz merge de config, se houver - globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) + globalConfig = maybe_merge_config(plugin.get('pluginConfig'), globalConfig) continue # Verifica se local => se for, pula check de integridade @@ -750,8 +750,7 @@ def main(): continue # Faz merge de config do plugin - globalConfig = maybeMergeConfig(plugin.get('pluginConfig'), globalConfig) - + globalConfig = maybe_merge_config(plugin.get('pluginConfig'), globalConfig) logging.info('\t==> Successfully installed dynamic plugin %s', package) # Salva config global no final From 3fc0b787ed40ecf4bb88f4f5309cb00d8898c2ec Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Mon, 17 Mar 2025 20:37:44 -0300 Subject: [PATCH 16/18] test --- docker/install-dynamic-plugins.py | 77 ++++++++++++++++++++----------- 1 file changed, 51 insertions(+), 26 deletions(-) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index 4324e0d587..2eeeb6befd 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -406,11 +406,23 @@ def digest(self, package: str) -> str: # ------------------------------------------------------------------------------ # Verificação de Integridade # ------------------------------------------------------------------------------ -def verify_package_integrity(plugin: dict, archive: str, working_directory: str, openssl_cmd: str) -> None: +def verify_package_integrity(plugin: dict, archive: Union[str, Path], logger=None) -> None: """ - Verifica integridade do arquivo 'archive' usando plugin['integrity'] no formato -. + Verifica a integridade do pacote usando algoritmos de hash nativos do Python. + + Args: + plugin: Dicionário contendo informações do plugin, incluindo a chave 'integrity' + archive: Caminho para o arquivo a ser verificado + logger: Objeto logger para logging (opcional) + + Raises: + InstallException: Se a verificação de integridade falhar ou parâmetros inválidos """ + # Use o logger global se nenhum for fornecido + log = logger or globals().get('logger', logging) package = plugin['package'] + + # Verificar se a chave 'integrity' existe integrity = plugin.get('integrity') if not integrity: raise InstallException(f'Package integrity for {package} is missing') @@ -418,43 +430,58 @@ def verify_package_integrity(plugin: dict, archive: str, working_directory: str, if not isinstance(integrity, str): raise InstallException(f'Package integrity for {package} must be a string') + # Analisar a string de integridade parts = integrity.split('-') if len(parts) != 2: raise InstallException( f'Package integrity for {package} must be -' ) - algorithm, base64_digest = parts + algorithm, hash_digest = parts + + # Verificar se o algoritmo é suportado if algorithm not in RECOGNIZED_ALGORITHMS: raise InstallException( - f'{package}: Provided Package integrity algorithm {algorithm} is not supported. ' - f'Use one of {RECOGNIZED_ALGORITHMS}.' + f'{package}: Algorithm {algorithm} not supported. Use one of: {", ".join(RECOGNIZED_ALGORITHMS)}' ) + # Verificar se o hash é base64 válido try: - base64.b64decode(base64_digest, validate=True) + base64.b64decode(hash_digest, validate=True) except binascii.Error: raise InstallException( - f'{package}: Provided Package integrity hash {base64_digest} is not valid base64' + f'{package}: Provided Package integrity hash {hash_digest} is not valid base64' ) - # Lê o arquivo em Python (sem usar 'cat') e passa ao openssl - with open(archive, 'rb') as archive_file: - dgst_proc = subprocess.Popen( - [openssl_cmd, 'dgst', f'-{algorithm}', '-binary'], - stdin=archive_file, stdout=subprocess.PIPE - ) - base64_proc = subprocess.Popen( - [openssl_cmd, 'base64', '-A'], - stdin=dgst_proc.stdout, stdout=subprocess.PIPE - ) - output, _ = base64_proc.communicate() - calculated_hash = output.decode('utf-8').strip() + # Mapear algoritmos para funções do hashlib + hash_algorithms = { + 'sha256': hashlib.sha256, + 'sha384': hashlib.sha384, + 'sha512': hashlib.sha512 + } - if base64_digest != calculated_hash: - raise InstallException( - f'{package}: Hash mismatch. Expected={base64_digest}, got={calculated_hash}' - ) + # Calcular hash do arquivo + log.info(f'\t==> Verifying {algorithm} integrity of {Path(archive).name}') + + try: + hasher = hash_algorithms[algorithm]() + with open(archive, 'rb') as f: + # Ler em chunks para evitar carregar todo o arquivo na memória + for chunk in iter(lambda: f.read(65536), b''): + hasher.update(chunk) + + calculated = base64.b64encode(hasher.digest()).decode('utf-8') + + if hash_digest != calculated: + raise InstallException( + f'{package}: Hash mismatch.\n' + f'Expected: {hash_digest}\n' + f'Got: {calculated}' + ) + + log.info(f'\t==> Integrity verification passed') + except IOError as e: + raise InstallException(f"Failed to read file {archive}: {e}") # ------------------------------------------------------------------------------ # Função Principal @@ -678,9 +705,7 @@ def main(): # Verifica integridade se aplicável if not package_is_local and not skipIntegrityCheck: logging.info('\t==> Verifying package integrity') - verify_package_integrity( - plugin, archive, dynamicPluginsRoot, oci_downloader.openssl_cmd - ) + verify_package_integrity(plugin, archive) directory = archive.replace('.tgz', '') directoryRealpath = os.path.realpath(directory) From 48352bc3364503f56bfb33329a5fc3194970da32 Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Tue, 18 Mar 2025 09:06:37 -0300 Subject: [PATCH 17/18] test --- docker/install-dynamic-plugins.py | 157 +++++++++++++----------------- 1 file changed, 65 insertions(+), 92 deletions(-) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index 2eeeb6befd..130eb65a96 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -494,7 +494,6 @@ def main(): dynamicPluginsRoot = sys.argv[1] - # Configura lock lock_file_path = os.path.join(dynamicPluginsRoot, 'install-dynamic-plugins.lock') atexit.register(remove_lock, lock_file_path) signal.signal(signal.SIGTERM, lambda *a: sys.exit(0)) @@ -506,14 +505,12 @@ def main(): dynamicPluginsFile = 'dynamic-plugins.yaml' dynamicPluginsGlobalConfigFile = os.path.join(dynamicPluginsRoot, 'app-config.dynamic-plugins.yaml') - # Checa se existe dynamic-plugins.yaml if not os.path.isfile(dynamicPluginsFile): logging.info(f"No {dynamicPluginsFile} file found. Skipping dynamic plugins installation.") with open(dynamicPluginsGlobalConfigFile, 'w') as f: f.write('') sys.exit(0) - # Config global inicial globalConfig = { 'dynamicPlugins': { 'rootDirectory': 'dynamic-plugins-root' @@ -522,22 +519,20 @@ def main(): content = load_yaml(dynamicPluginsFile) if not content: - logging.info(f"{dynamicPluginsFile} file is empty or invalid. Skipping dynamic plugins installation.") + logging.info(f"{dynamicPluginsFile} is empty or invalid. Skipping installation.") with open(dynamicPluginsGlobalConfigFile, 'w') as f: f.write('') sys.exit(0) if not isinstance(content, dict): - raise InstallException(f"{dynamicPluginsFile} content must be a YAML object") + raise InstallException(f"{dynamicPluginsFile} must be a YAML object") - # Se SKIP_INTEGRITY_CHECK for true if skipIntegrityCheck: - logging.info(f"SKIP_INTEGRITY_CHECK={skipIntegrityCheck}, skipping package integrity checks") + logging.info(f"SKIP_INTEGRITY_CHECK={skipIntegrityCheck}, skipping integrity checks") - # Processa includes includes = content.get('includes', []) if not isinstance(includes, list): - raise InstallException(f"'includes' field must be a list in {dynamicPluginsFile}") + raise InstallException(f"'includes' must be a list in {dynamicPluginsFile}") allPlugins = {} for include in includes: @@ -546,30 +541,28 @@ def main(): logging.info('\n======= Including dynamic plugins from %s', include) includeContent = load_yaml(include) - if includeContent is None: - continue # se arquivo não existe ou vazio, pula + if not includeContent: + continue if not isinstance(includeContent, dict): - raise InstallException(f"{include} content must be a YAML object") + raise InstallException(f"{include} must be a YAML object") - incPlugins = includeContent.get('plugins', []) - if not isinstance(incPlugins, list): - raise InstallException(f"'plugins' field must be a list in {include}") + incPlugs = includeContent.get('plugins', []) + if not isinstance(incPlugs, list): + raise InstallException(f"'plugins' must be a list in {include}") - for plug in incPlugins: - allPlugins[plug['package']] = plug + for p in incPlugs: + allPlugins[p['package']] = p - # Lê lista de plugins do YAML principal + # Plugins do arquivo principal plugins = content.get('plugins', []) if not isinstance(plugins, list): - raise InstallException(f"'plugins' field must be a list in {dynamicPluginsFile}") + raise InstallException(f"'plugins' must be a list in {dynamicPluginsFile}") - # Sobrescreve configurações de plugins duplicados for plugin in plugins: package = plugin['package'] if not isinstance(package, str): - raise InstallException(f"'plugins.package' must be a string in {dynamicPluginsFile}") - + raise InstallException(f"'plugins.package' must be a string") if package in allPlugins: logging.info('\n======= Overriding dynamic plugin configuration %s', package) for k, v in plugin.items(): @@ -578,35 +571,32 @@ def main(): else: allPlugins[package] = plugin - # Calcula hash de cada plugin + # Calcula hash for plugin in allPlugins.values(): hash_dict = copy.deepcopy(plugin) hash_dict.pop('pluginConfig', None) - plugin_hash = hashlib.sha256( - json.dumps(hash_dict, sort_keys=True).encode('utf-8') - ).hexdigest() - plugin['hash'] = plugin_hash + h = hashlib.sha256(json.dumps(hash_dict, sort_keys=True).encode('utf-8')).hexdigest() + plugin['hash'] = h - # Identifica plugins já instalados (mapeados por hash) + # Lê diretorios instalados plugin_path_by_hash = {} for dir_name in os.listdir(dynamicPluginsRoot): dir_path = os.path.join(dynamicPluginsRoot, dir_name) if os.path.isdir(dir_path): - h_file = os.path.join(dir_path, 'dynamic-plugin-config.hash') - if os.path.isfile(h_file): - with open(h_file, 'r') as hf: - existing_hash = hf.read().strip() - plugin_path_by_hash[existing_hash] = dir_name + hash_file = os.path.join(dir_path, 'dynamic-plugin-config.hash') + if os.path.isfile(hash_file): + with open(hash_file, 'r') as hf: + old_hash = hf.read().strip() + plugin_path_by_hash[old_hash] = dir_name tools = check_prerequisites() - # Prepara downloader OCI oci_downloader = OciDownloader(dynamicPluginsRoot, tools) - # Percorre plugins e instala + # Instala cada plugin for plugin in allPlugins.values(): package = plugin['package'] - if plugin.get('disabled') is True: + if plugin.get('disabled'): logging.info('\n======= Skipping disabled dynamic plugin %s', package) continue @@ -617,19 +607,15 @@ def main(): if isinstance(pull_policy, str): pull_policy = PullPolicy(pull_policy) - plugin_path = '' # caminho relativo instalado if package.startswith('oci://'): - # Instala plugin via OCI + # Instala via OCI try: - if (plugin['hash'] in plugin_path_by_hash - and pull_policy == PullPolicy.IF_NOT_PRESENT): - # Já instalado e policy = IfNotPresent => skip + if plugin['hash'] in plugin_path_by_hash and pull_policy == PullPolicy.IF_NOT_PRESENT: logging.info('\n======= Skipping download of installed plugin %s', package) plugin_path_by_hash.pop(plugin['hash']) globalConfig = maybe_merge_config(plugin.get('pluginConfig'), globalConfig) continue - # Se já instalado e policy = ALWAYS => checar se digest mudou if plugin['hash'] in plugin_path_by_hash and pull_policy == PullPolicy.ALWAYS: old_dir = plugin_path_by_hash.pop(plugin['hash']) old_digest_file = os.path.join(dynamicPluginsRoot, old_dir, 'dynamic-plugin-image.hash') @@ -639,7 +625,7 @@ def main(): local_digest = df.read().strip() remote_digest = oci_downloader.digest(package) if remote_digest == local_digest: - logging.info('\n======= Skipping download of installed plugin (same digest) %s', package) + logging.info('\n======= Skipping download (same digest) %s', package) globalConfig = maybe_merge_config(plugin.get('pluginConfig'), globalConfig) continue else: @@ -647,45 +633,39 @@ def main(): else: logging.info('\n======= Installing dynamic plugin %s', package) - # De fato faz o download plugin_path = oci_downloader.download(package) - - # Salva o digest remoto digest_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-image.hash') with open(digest_path, 'w') as df: df.write(oci_downloader.digest(package)) - # Remove duplicatas do plugin_path_by_hash que apontem p/ mesmo plugin_path + # Remove duplicatas duplicates = [k for k, v in plugin_path_by_hash.items() if v == plugin_path] for dup in duplicates: plugin_path_by_hash.pop(dup) except Exception as e: - raise InstallException(f"Error while adding OCI plugin {package} to downloader: {e}") + raise InstallException(f"Error while adding OCI plugin {package}: {e}") else: - # Instala plugin via NPM - plugin_already_installed = False - + # NPM plugin + already_installed = False if plugin['hash'] in plugin_path_by_hash: force_dl = plugin.get('forceDownload', False) if pull_policy == PullPolicy.ALWAYS or force_dl: - logging.info('\n======= Forcing download of installed dynamic plugin %s', package) + logging.info('\n======= Forcing download of installed plugin %s', package) else: - logging.info('\n======= Skipping download of installed dynamic plugin %s', package) - plugin_already_installed = True + logging.info('\n======= Skipping download of installed plugin %s', package) + already_installed = True plugin_path_by_hash.pop(plugin['hash']) else: logging.info('\n======= Installing dynamic plugin %s', package) - if plugin_already_installed: - # apenas faz merge de config, se houver + if already_installed: globalConfig = maybe_merge_config(plugin.get('pluginConfig'), globalConfig) continue - # Verifica se local => se for, pula check de integridade package_is_local = package.startswith('./') - if (not package_is_local) and (not skipIntegrityCheck) and ('integrity' not in plugin): - raise InstallException(f"No integrity hash provided for Package {package}") + if (not package_is_local) and (not skipIntegrityCheck) and 'integrity' not in plugin: + raise InstallException(f"No integrity hash for {package}") if package_is_local: package = os.path.join(os.getcwd(), package[2:]) @@ -693,27 +673,28 @@ def main(): logging.info('\t==> Grabbing package archive through `npm pack`') completed = subprocess.run( ['npm', 'pack', package], - capture_output=True, cwd=dynamicPluginsRoot, text=True + cwd=dynamicPluginsRoot, + capture_output=True, + text=True ) if completed.returncode != 0: raise InstallException( - f"Error while installing plugin {package} with 'npm pack': {completed.stderr}" + f"Error installing plugin {package}: {completed.stderr}" ) archive = os.path.join(dynamicPluginsRoot, completed.stdout.strip()) - # Verifica integridade se aplicável if not package_is_local and not skipIntegrityCheck: logging.info('\t==> Verifying package integrity') verify_package_integrity(plugin, archive) directory = archive.replace('.tgz', '') - directoryRealpath = os.path.realpath(directory) - plugin_path = os.path.basename(directoryRealpath) + directory_realpath = os.path.realpath(directory) + plugin_path = os.path.basename(directory_realpath) if os.path.exists(directory): logging.info('\t==> Removing previous plugin directory %s', directory) - shutil.rmtree(directory, ignore_errors=True, onerror=None) + shutil.rmtree(directory, ignore_errors=True) os.mkdir(directory) logging.info('\t==> Extracting package archive %s', archive) @@ -722,11 +703,10 @@ def main(): if member.isreg(): if not member.name.startswith('package/'): raise InstallException( - "NPM package archive doesn't start with 'package/': " + member.name + f"NPM package archive doesn't start with 'package/': {member.name}" ) if member.size > maxEntrySize: raise InstallException('Zip bomb detected in ' + member.name) - member.name = member.name.removeprefix('package/') f.extract(member, path=directory, filter='tar') elif member.isdir(): @@ -734,22 +714,18 @@ def main(): elif member.islnk() or member.issym(): if not member.linkpath.startswith('package/'): raise InstallException( - f'NPM package archive link outside of archive: {member.name} -> {member.linkpath}' + f"NPM package link outside: {member.name} -> {member.linkpath}" ) - member.name = member.name.removeprefix('package/') member.linkpath = member.linkpath.removeprefix('package/') - realpath = os.path.realpath( - os.path.join(directory, *os.path.split(member.linkname)) - ) - if not realpath.startswith(directoryRealpath): + rp = os.path.realpath(os.path.join(directory, *os.path.split(member.linkname))) + if not rp.startswith(directory_realpath): raise InstallException( - f'NPM package archive link outside of the archive: {member.name} -> {member.linkpath}' + f"NPM package link escapes archive: {member.name} -> {member.linkpath}" ) f.extract(member, path=directory, filter='tar') else: - # Se for CHRTYPE, BLKTYPE, FIFOTYPE ou outro t_str = 'unknown' if member.type == tarfile.CHRTYPE: t_str = 'character device' @@ -758,40 +734,37 @@ def main(): elif member.type == tarfile.FIFOTYPE: t_str = 'FIFO' raise InstallException( - f'NPM package archive contains a non-regular file: {member.name} - {t_str}' + f'Archive has a non-regular file: {member.name} - {t_str}' ) logging.info('\t==> Removing package archive %s', archive) os.remove(archive) - # Cria arquivo de hash no plugin - plugin_hash_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-config.hash') - with open(plugin_hash_path, 'w') as df: - df.write(plugin['hash']) + # Cria arquivo de hash + hash_file_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-config.hash') + with open(hash_file_path, 'w') as hf: + hf.write(plugin['hash']) - # Se não há pluginConfig, já finaliza + # Se não tem pluginConfig, acabou if 'pluginConfig' not in plugin: logging.info('\t==> Successfully installed dynamic plugin %s', package) continue - # Faz merge de config do plugin globalConfig = maybe_merge_config(plugin.get('pluginConfig'), globalConfig) logging.info('\t==> Successfully installed dynamic plugin %s', package) - # Salva config global no final + # Salva config final yaml.safe_dump(globalConfig, open(dynamicPluginsGlobalConfigFile, 'w')) - # Remove plugins que ficaram sem hash no path - for old_hash in plugin_path_by_hash: - old_plugin_dir = plugin_path_by_hash[old_hash] - plugin_directory = os.path.join(dynamicPluginsRoot, old_plugin_dir) - logging.info('\n======= Removing previously installed dynamic plugin %s', old_plugin_dir) - shutil.rmtree(plugin_directory, ignore_errors=True, onerror=None) + # Remove plugins não mencionados + for old_hash, old_dir in plugin_path_by_hash.items(): + logging.info('\n======= Removing previously installed dynamic plugin %s', old_dir) + plugin_dir = os.path.join(dynamicPluginsRoot, old_dir) + shutil.rmtree(plugin_dir, ignore_errors=True) - # Exibe tempo total end_time = datetime.now() - elapsed_time = end_time - start_time - print(f"Total Execution Timeeeee: {elapsed_time}") + print(f"Total Execution Timeex: {end_time - start_time}") + if __name__ == "__main__": main() From a4f496e1e150729765b74d049aa2dbe2ab342753 Mon Sep 17 00:00:00 2001 From: Gustavo Lira Date: Tue, 18 Mar 2025 10:26:48 -0300 Subject: [PATCH 18/18] test --- docker/install-dynamic-plugins.py | 536 +++++++++++++----------------- 1 file changed, 239 insertions(+), 297 deletions(-) diff --git a/docker/install-dynamic-plugins.py b/docker/install-dynamic-plugins.py index 130eb65a96..147b43e788 100755 --- a/docker/install-dynamic-plugins.py +++ b/docker/install-dynamic-plugins.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 """ -Script para instalar plugins dinâmicos a partir de imagens OCI ou pacotes NPM. -Otimizado para contêineres Kubernetes com foco em confiabilidade e diagnóstico. +Script para instalar plugins dinâmicos a partir de imagens OCI ou pacotes NPM, +agora com instalação paralela via ThreadPoolExecutor. """ import base64 @@ -23,8 +23,9 @@ from enum import StrEnum from functools import lru_cache from pathlib import Path -from typing import Dict, List, Any, Optional, Set, Tuple, Union +from typing import Dict, List, Any, Optional, Tuple, Union import atexit +from concurrent.futures import ThreadPoolExecutor, as_completed # ------------------------------------------------------------------------------ # Configuração de Logging @@ -68,27 +69,21 @@ def __init__(self, log_dir=None): self.info(f"Log file created at: {self.log_file}") except Exception as e: - # Falha silenciosamente mas continua com logging no console self.logger.warning(f"Could not set up file logging: {e}") def info(self, msg, *args, **kwargs): - """Log com nível INFO.""" self.logger.info(msg, *args, **kwargs) def warning(self, msg, *args, **kwargs): - """Log com nível WARNING.""" self.logger.warning(msg, *args, **kwargs) def error(self, msg, *args, **kwargs): - """Log com nível ERROR.""" self.logger.error(msg, *args, **kwargs) def debug(self, msg, *args, **kwargs): - """Log com nível DEBUG.""" self.logger.debug(msg, *args, **kwargs) def critical(self, msg, *args, **kwargs): - """Log com nível CRITICAL.""" self.logger.critical(msg, *args, **kwargs) def log_system_info(self): @@ -98,7 +93,6 @@ def log_system_info(self): self.info(f" Hostname: {os.environ.get('HOSTNAME', 'unknown')}") self.info(f" Time: {datetime.now().isoformat()}") - # Informações específicas do Kubernetes se disponíveis if os.path.exists('/var/run/secrets/kubernetes.io/serviceaccount'): try: with open('/var/run/secrets/kubernetes.io/serviceaccount/namespace', 'r') as f: @@ -107,7 +101,6 @@ def log_system_info(self): except Exception: pass - # Informações de sistema try: import platform self.info(f" Python version: {platform.python_version()}") @@ -127,7 +120,6 @@ def log_execution_result(self, success=True, error=None): if self.log_file: self.info(f"Full logs available at: {self.log_file}") -# Inicializa logger global para uso durante importação do módulo logger = OptimizedLogger() # ------------------------------------------------------------------------------ @@ -142,7 +134,6 @@ class InstallException(Exception): """Exceção base para erros neste script.""" pass -# Algoritmos de hash suportados para verificação de integridade RECOGNIZED_ALGORITHMS = frozenset(['sha512', 'sha384', 'sha256']) # ------------------------------------------------------------------------------ @@ -150,10 +141,6 @@ class InstallException(Exception): # ------------------------------------------------------------------------------ def merge(source: Dict[str, Any], destination: Dict[str, Any], prefix: str = '') -> Dict[str, Any]: - """ - Faz merge recursivo do dicionário 'source' em 'destination'. - Se encontrar chave com valor conflitante, lança InstallException. - """ for key, value in source.items(): if isinstance(value, dict): node = destination.setdefault(key, {}) @@ -167,46 +154,33 @@ def merge(source: Dict[str, Any], destination: Dict[str, Any], prefix: str = '') return destination def maybe_merge_config(config: Optional[Dict[str, Any]], global_config: Dict[str, Any]) -> Dict[str, Any]: - """ - Se 'config' for dict, faz merge no 'globalConfig'; caso contrário, retorna 'globalConfig' inalterado. - """ if config is not None and isinstance(config, dict): logger.info('\t==> Merging plugin-specific configuration') return merge(config, global_config) return global_config -def check_prerequisites() -> Tuple[str, ...]: - """ - Verifica e retorna o caminho de ferramentas necessárias. - Lança InstallException se alguma não estiver disponível. - """ +def check_prerequisites() -> Dict[str, str]: required_tools = { 'skopeo': "Skopeo is required for OCI image handling", 'npm': "NPM is required for NPM package handling" } + found = {} missing = [] - found_tools = {} - - for tool, description in required_tools.items(): + for tool, desc in required_tools.items(): path = shutil.which(tool) if path: - found_tools[tool] = path + found[tool] = path else: - missing.append(f"- {tool}: {description}") - + missing.append(f"- {tool}: {desc}") if missing: - raise InstallException(f"Required tools not found in PATH:\n" + "\n".join(missing)) - - return found_tools + raise InstallException("Required tools not found:\n" + "\n".join(missing)) + return found # ------------------------------------------------------------------------------ # Funções de Lock # ------------------------------------------------------------------------------ def create_lock(lock_file_path: Union[str, Path]): - """ - Cria arquivo de lock. Se já existir, aguarda até ele ser liberado. - """ lock_path = Path(lock_file_path) while True: try: @@ -217,36 +191,25 @@ def create_lock(lock_file_path: Union[str, Path]): wait_for_lock_release(lock_path) def remove_lock(lock_file_path: Union[str, Path]): - """ - Remove o lock file, se existir. - """ lock_path = Path(lock_file_path) if lock_path.exists(): try: lock_path.unlink() logger.info(f"======= Removed lock file: {lock_path}") except OSError as e: - logger.warning(f"======= Failed to remove lock file: {e}") + logger.warning(f"Failed to remove lock file: {e}") -def wait_for_lock_release(lock_file_path: Union[str, Path]): - """ - Fica em loop até o arquivo de lock ser removido, com detecção de locks obsoletos. - """ - lock_path = Path(lock_file_path) +def wait_for_lock_release(lock_path: Path): logger.info(f"======= Waiting for lock release (file: {lock_path})...") - start_time = time.time() - timeout = 300 # 5 minutos + timeout = 300 # 5 minutos de timeout while lock_path.exists(): time.sleep(1) - - # Verificar timeout if time.time() - start_time > timeout: - logger.warning(f"Lock wait timeout after {timeout}s - may be stale, removing") + logger.warning(f"Lock wait timed out after {timeout}s - removing stale lock.") remove_lock(lock_path) break - logger.info("======= Lock released.") # ------------------------------------------------------------------------------ @@ -254,21 +217,15 @@ def wait_for_lock_release(lock_file_path: Union[str, Path]): # ------------------------------------------------------------------------------ def load_yaml(file_path: Union[str, Path]) -> Optional[Any]: - """ - Carrega o conteúdo YAML de 'file_path'. - Retorna None se o arquivo não existir. - Lança InstallException em caso de erros de parsing. - """ - path = Path(file_path) - if not path.is_file(): - logger.warning(f"File not found: {path}") + p = Path(file_path) + if not p.is_file(): + logger.warning(f"File not found: {p}") return None - try: - with path.open('r') as file: - return yaml.safe_load(file) + with p.open('r') as f: + return yaml.safe_load(f) except yaml.YAMLError as e: - raise InstallException(f"Error parsing YAML file {path}: {e}") + raise InstallException(f"Error parsing YAML file {p}: {e}") # ------------------------------------------------------------------------------ # OCI Downloader com otimizações @@ -276,27 +233,17 @@ def load_yaml(file_path: Union[str, Path]) -> Optional[Any]: class OciDownloader: def __init__(self, destination: Union[str, Path], tools: Dict[str, str]): - """ - Inicializa o OciDownloader com ferramentas validadas. - - Args: - destination: Diretório onde os plugins serão extraídos - tools: Dicionário com caminhos para ferramentas necessárias - """ self._skopeo = tools.get('skopeo') if not self._skopeo: - raise InstallException('skopeo executable not found in PATH') + raise InstallException('skopeo not in PATH') self.tmp_dir_obj = tempfile.TemporaryDirectory() self.tmp_dir = Path(self.tmp_dir_obj.name) - self.image_to_tarball: Dict[str, Path] = {} + self.image_to_tarball = {} self.destination = Path(destination) - self._digest_cache: Dict[str, str] = {} + self._digest_cache = {} def skopeo(self, command: List[str]) -> str: - """ - Executa 'skopeo' com os argumentos especificados e retorna stdout. - """ try: result = subprocess.run( [self._skopeo] + command, @@ -306,21 +253,18 @@ def skopeo(self, command: List[str]) -> str: ) return result.stdout except subprocess.CalledProcessError as e: - error_msg = f'Error while running skopeo command: {e.stderr}' - logger.error(error_msg) - raise InstallException(error_msg) + msg = f"Error running skopeo: {e.stderr}" + logger.error(msg) + raise InstallException(msg) def get_plugin_tar(self, image: str) -> Path: - """ - Faz o download da imagem (se ainda não feito), usando skopeo, e retorna o caminho local ao tar. - """ if image in self.image_to_tarball: return self.image_to_tarball[image] logger.info(f'\t==> Copying image {image} to local filesystem') - image_digest = hashlib.sha256(image.encode('utf-8'), usedforsecurity=False).hexdigest() + digest = hashlib.sha256(image.encode('utf-8'), usedforsecurity=False).hexdigest() + local_dir = self.tmp_dir / digest - local_dir = self.tmp_dir / image_digest image_url = image.replace('oci://', 'docker://') self.skopeo(['copy', image_url, f'dir:{local_dir}']) @@ -328,164 +272,112 @@ def get_plugin_tar(self, image: str) -> Path: with manifest_path.open('r') as f: manifest = json.load(f) - layer_digest = manifest['layers'][0]['digest'] - _, filename = layer_digest.split(':') - local_path = local_dir / filename + layer_digest = manifest['layers'][0]['digest'].split(':')[1] + local_path = local_dir / layer_digest self.image_to_tarball[image] = local_path return local_path - def extract_plugin(self, tar_file: Path, plugin_path: str) -> None: - """ - Extrai apenas arquivos que começam com 'plugin_path' do tar.gz, - verificando tamanho (anti zip-bomb) e possíveis links fora do escopo. - """ + def extract_plugin(self, tar_file: Path, plugin_path: str): extracted_path = self.destination.absolute() max_size = int(os.environ.get('MAX_ENTRY_SIZE', 20000000)) with tarfile.open(tar_file, 'r:gz') as tar: - members_to_extract = [] + members = [] for member in tar.getmembers(): if not member.name.startswith(plugin_path): continue - if member.size > max_size: - raise InstallException(f'Zip bomb detected in {member.name}') - + raise InstallException(f'Zip bomb in {member.name}') if member.islnk() or member.issym(): - realpath = Path(extracted_path / plugin_path).joinpath( - *Path(member.linkname).parts).resolve() - + realpath = (extracted_path / plugin_path).joinpath(*Path(member.linkname).parts).resolve() if not str(realpath).startswith(str(extracted_path)): logger.warning( - f'\t==> WARNING: skipping file containing link outside of the archive: ' - f'{member.name} -> {member.linkpath}' + f'\t==> WARNING: skipping symlink outside: {member.name} -> {member.linkpath}' ) continue - - members_to_extract.append(member) - - # Extração em batch é mais eficiente - if members_to_extract: - tar.extractall(extracted_path, members=members_to_extract, filter='tar') + members.append(member) + tar.extractall(extracted_path, members=members, filter='tar') def download(self, package: str) -> str: - """ - Recebe algo como 'oci://repo/img!path_no_tar' e extrai só o diretório path_no_tar no destino. - Retorna plugin_path. - """ image, plugin_path = package.split('!') - tar_file = self.get_plugin_tar(image) + tar_path = self.get_plugin_tar(image) - plugin_directory = self.destination / plugin_path - if plugin_directory.exists(): - logger.info(f'\t==> Removing previous plugin directory {plugin_directory}') - try: - shutil.rmtree(plugin_directory) - except OSError as e: - logger.warning(f'\t==> Could not remove directory: {e}, trying alternative method') - # Tentar removê-lo forçadamente se o método normal falhar - try: - os.system(f'rm -rf "{plugin_directory}"') - except Exception as e2: - raise InstallException(f"Failed to remove directory {plugin_directory}: {e2}") + plugin_dir = self.destination / plugin_path + if plugin_dir.exists(): + logger.info(f'\t==> Removing previous plugin directory {plugin_dir}') + shutil.rmtree(plugin_dir, ignore_errors=True) - self.extract_plugin(tar_file=tar_file, plugin_path=plugin_path) + self.extract_plugin(tar_path, plugin_path) return plugin_path def digest(self, package: str) -> str: - """ - Retorna o digest da imagem OCI usando 'skopeo inspect' com cache. - """ image, _ = package.split('!') - - # Verificar cache primeiro if image in self._digest_cache: return self._digest_cache[image] + image_url = image.replace('oci://', 'docker://') + output = self.skopeo(['inspect', image_url]) + data = json.loads(output) + result = data['Digest'].split(':')[1] + self._digest_cache[image] = result + return result # ------------------------------------------------------------------------------ -# Verificação de Integridade +# Verificação de Integridade com hashlib (sem openssl) # ------------------------------------------------------------------------------ -def verify_package_integrity(plugin: dict, archive: Union[str, Path], logger=None) -> None: - """ - Verifica a integridade do pacote usando algoritmos de hash nativos do Python. - - Args: - plugin: Dicionário contendo informações do plugin, incluindo a chave 'integrity' - archive: Caminho para o arquivo a ser verificado - logger: Objeto logger para logging (opcional) - - Raises: - InstallException: Se a verificação de integridade falhar ou parâmetros inválidos - """ - # Use o logger global se nenhum for fornecido - log = logger or globals().get('logger', logging) + +def verify_package_integrity(plugin: dict, archive: Union[str, Path]): package = plugin['package'] - # Verificar se a chave 'integrity' existe integrity = plugin.get('integrity') if not integrity: raise InstallException(f'Package integrity for {package} is missing') - if not isinstance(integrity, str): raise InstallException(f'Package integrity for {package} must be a string') - # Analisar a string de integridade parts = integrity.split('-') if len(parts) != 2: raise InstallException( - f'Package integrity for {package} must be -' + f'Integrity must be - for {package}' ) - algorithm, hash_digest = parts - - # Verificar se o algoritmo é suportado + algorithm, b64_digest = parts if algorithm not in RECOGNIZED_ALGORITHMS: raise InstallException( - f'{package}: Algorithm {algorithm} not supported. Use one of: {", ".join(RECOGNIZED_ALGORITHMS)}' + f'{package}: Provided algorithm {algorithm} not supported. ' + f'Use one of: {RECOGNIZED_ALGORITHMS}' ) - # Verificar se o hash é base64 válido try: - base64.b64decode(hash_digest, validate=True) + base64.b64decode(b64_digest, validate=True) except binascii.Error: - raise InstallException( - f'{package}: Provided Package integrity hash {hash_digest} is not valid base64' - ) + raise InstallException(f'{package}: Invalid base64: {b64_digest}') - # Mapear algoritmos para funções do hashlib - hash_algorithms = { + # Mapear algoritmo + import hashlib + hash_map = { 'sha256': hashlib.sha256, 'sha384': hashlib.sha384, 'sha512': hashlib.sha512 } + hasher = hash_map[algorithm]() - # Calcular hash do arquivo - log.info(f'\t==> Verifying {algorithm} integrity of {Path(archive).name}') - - try: - hasher = hash_algorithms[algorithm]() - with open(archive, 'rb') as f: - # Ler em chunks para evitar carregar todo o arquivo na memória - for chunk in iter(lambda: f.read(65536), b''): - hasher.update(chunk) - - calculated = base64.b64encode(hasher.digest()).decode('utf-8') - - if hash_digest != calculated: - raise InstallException( - f'{package}: Hash mismatch.\n' - f'Expected: {hash_digest}\n' - f'Got: {calculated}' - ) + with open(archive, 'rb') as f: + for chunk in iter(lambda: f.read(65536), b''): + hasher.update(chunk) - log.info(f'\t==> Integrity verification passed') - except IOError as e: - raise InstallException(f"Failed to read file {archive}: {e}") + calculated = base64.b64encode(hasher.digest()).decode('utf-8') + if calculated != b64_digest: + raise InstallException( + f'{package}: integrity check failed. ' + f'Expected={b64_digest}, Got={calculated}' + ) + logger.info(f'\t==> Integrity check passed for {package}') # ------------------------------------------------------------------------------ -# Função Principal +# Função principal com paralelismo # ------------------------------------------------------------------------------ + def main(): start_time = datetime.now() @@ -506,7 +398,7 @@ def main(): dynamicPluginsGlobalConfigFile = os.path.join(dynamicPluginsRoot, 'app-config.dynamic-plugins.yaml') if not os.path.isfile(dynamicPluginsFile): - logging.info(f"No {dynamicPluginsFile} file found. Skipping dynamic plugins installation.") + logger.info(f"No {dynamicPluginsFile} file found. Skipping dynamic plugins installation.") with open(dynamicPluginsGlobalConfigFile, 'w') as f: f.write('') sys.exit(0) @@ -519,7 +411,7 @@ def main(): content = load_yaml(dynamicPluginsFile) if not content: - logging.info(f"{dynamicPluginsFile} is empty or invalid. Skipping installation.") + logger.info(f"{dynamicPluginsFile} is empty or invalid. Skipping installation.") with open(dynamicPluginsGlobalConfigFile, 'w') as f: f.write('') sys.exit(0) @@ -528,7 +420,7 @@ def main(): raise InstallException(f"{dynamicPluginsFile} must be a YAML object") if skipIntegrityCheck: - logging.info(f"SKIP_INTEGRITY_CHECK={skipIntegrityCheck}, skipping integrity checks") + logger.info(f"SKIP_INTEGRITY_CHECK={skipIntegrityCheck}, skipping integrity checks") includes = content.get('includes', []) if not isinstance(includes, list): @@ -538,12 +430,11 @@ def main(): for include in includes: if not isinstance(include, str): raise InstallException(f"'includes' must be a list of strings in {dynamicPluginsFile}") - logging.info('\n======= Including dynamic plugins from %s', include) + logger.info('\n======= Including dynamic plugins from %s', include) includeContent = load_yaml(include) if not includeContent: continue - if not isinstance(includeContent, dict): raise InstallException(f"{include} must be a YAML object") @@ -559,47 +450,56 @@ def main(): if not isinstance(plugins, list): raise InstallException(f"'plugins' must be a list in {dynamicPluginsFile}") + # Override for plugin in plugins: package = plugin['package'] - if not isinstance(package, str): - raise InstallException(f"'plugins.package' must be a string") if package in allPlugins: - logging.info('\n======= Overriding dynamic plugin configuration %s', package) + logger.info('\n======= Overriding dynamic plugin configuration %s', package) for k, v in plugin.items(): if k != 'package': allPlugins[package][k] = v else: allPlugins[package] = plugin - # Calcula hash + # Gera hash for plugin in allPlugins.values(): hash_dict = copy.deepcopy(plugin) hash_dict.pop('pluginConfig', None) h = hashlib.sha256(json.dumps(hash_dict, sort_keys=True).encode('utf-8')).hexdigest() plugin['hash'] = h - # Lê diretorios instalados + # Lê instalados plugin_path_by_hash = {} for dir_name in os.listdir(dynamicPluginsRoot): dir_path = os.path.join(dynamicPluginsRoot, dir_name) if os.path.isdir(dir_path): - hash_file = os.path.join(dir_path, 'dynamic-plugin-config.hash') - if os.path.isfile(hash_file): - with open(hash_file, 'r') as hf: - old_hash = hf.read().strip() + hf = os.path.join(dir_path, 'dynamic-plugin-config.hash') + if os.path.isfile(hf): + with open(hf, 'r') as hf2: + old_hash = hf2.read().strip() plugin_path_by_hash[old_hash] = dir_name tools = check_prerequisites() oci_downloader = OciDownloader(dynamicPluginsRoot, tools) - # Instala cada plugin - for plugin in allPlugins.values(): - package = plugin['package'] - - if plugin.get('disabled'): - logging.info('\n======= Skipping disabled dynamic plugin %s', package) - continue + # Filtrar plugins ativos + active_plugins = [] + for p in allPlugins.values(): + if not p.get('disabled'): + active_plugins.append(p) + else: + logger.info('\n======= Skipping disabled dynamic plugin %s', p['package']) + # ----------- + # Passo 1: função para instalar 1 plugin (chamada em paralelo) + # ----------- + def install_one_plugin(plugin): + """ + Retorna (plugin, installed_path, erro_ou_None). + Se erro, installed_path será None, e vice-versa. + """ + package = plugin['package'] + plugin_hash = plugin['hash'] pull_policy = plugin.get( 'pullPolicy', PullPolicy.ALWAYS if ':latest!' in package else PullPolicy.IF_NOT_PRESENT @@ -607,17 +507,20 @@ def main(): if isinstance(pull_policy, str): pull_policy = PullPolicy(pull_policy) + installed_path = None + + # Se OCI if package.startswith('oci://'): - # Instala via OCI try: - if plugin['hash'] in plugin_path_by_hash and pull_policy == PullPolicy.IF_NOT_PRESENT: - logging.info('\n======= Skipping download of installed plugin %s', package) - plugin_path_by_hash.pop(plugin['hash']) - globalConfig = maybe_merge_config(plugin.get('pluginConfig'), globalConfig) - continue - - if plugin['hash'] in plugin_path_by_hash and pull_policy == PullPolicy.ALWAYS: - old_dir = plugin_path_by_hash.pop(plugin['hash']) + # If already installed & policy=IF_NOT_PRESENT => skip + if plugin_hash in plugin_path_by_hash and pull_policy == PullPolicy.IF_NOT_PRESENT: + logger.info('\n======= Skipping download of installed plugin %s', package) + plugin_path_by_hash.pop(plugin_hash) + return (plugin, None, None) + + # If already installed & policy=ALWAYS => check digest + if plugin_hash in plugin_path_by_hash and pull_policy == PullPolicy.ALWAYS: + old_dir = plugin_path_by_hash.pop(plugin_hash) old_digest_file = os.path.join(dynamicPluginsRoot, old_dir, 'dynamic-plugin-image.hash') local_digest = None if os.path.isfile(old_digest_file): @@ -625,52 +528,52 @@ def main(): local_digest = df.read().strip() remote_digest = oci_downloader.digest(package) if remote_digest == local_digest: - logging.info('\n======= Skipping download (same digest) %s', package) - globalConfig = maybe_merge_config(plugin.get('pluginConfig'), globalConfig) - continue + logger.info('\n======= Skipping download (same digest) %s', package) + return (plugin, None, None) else: - logging.info('\n======= Installing dynamic plugin %s', package) + logger.info('\n======= Installing dynamic plugin %s', package) else: - logging.info('\n======= Installing dynamic plugin %s', package) + logger.info('\n======= Installing dynamic plugin %s', package) - plugin_path = oci_downloader.download(package) - digest_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-image.hash') + installed_path = oci_downloader.download(package) + digest_path = os.path.join(dynamicPluginsRoot, installed_path, 'dynamic-plugin-image.hash') with open(digest_path, 'w') as df: df.write(oci_downloader.digest(package)) # Remove duplicatas - duplicates = [k for k, v in plugin_path_by_hash.items() if v == plugin_path] + duplicates = [k for k, v in plugin_path_by_hash.items() if v == installed_path] for dup in duplicates: plugin_path_by_hash.pop(dup) + except Exception as e: - raise InstallException(f"Error while adding OCI plugin {package}: {e}") + return (plugin, None, f"Error while adding OCI plugin {package}: {e}") else: - # NPM plugin - already_installed = False - if plugin['hash'] in plugin_path_by_hash: + # NPM + plugin_already_installed = False + if plugin_hash in plugin_path_by_hash: force_dl = plugin.get('forceDownload', False) if pull_policy == PullPolicy.ALWAYS or force_dl: - logging.info('\n======= Forcing download of installed plugin %s', package) + logger.info('\n======= Forcing download of installed plugin %s', package) else: - logging.info('\n======= Skipping download of installed plugin %s', package) - already_installed = True - plugin_path_by_hash.pop(plugin['hash']) + logger.info('\n======= Skipping download of installed plugin %s', package) + plugin_already_installed = True + plugin_path_by_hash.pop(plugin_hash) else: - logging.info('\n======= Installing dynamic plugin %s', package) + logger.info('\n======= Installing dynamic plugin %s', package) - if already_installed: - globalConfig = maybe_merge_config(plugin.get('pluginConfig'), globalConfig) - continue + if plugin_already_installed: + # skip + return (plugin, None, None) package_is_local = package.startswith('./') if (not package_is_local) and (not skipIntegrityCheck) and 'integrity' not in plugin: - raise InstallException(f"No integrity hash for {package}") + return (plugin, None, f"No integrity hash for {package}") if package_is_local: package = os.path.join(os.getcwd(), package[2:]) - logging.info('\t==> Grabbing package archive through `npm pack`') + logger.info('\t==> Grabbing package archive through `npm pack`') completed = subprocess.run( ['npm', 'pack', package], cwd=dynamicPluginsRoot, @@ -678,89 +581,128 @@ def main(): text=True ) if completed.returncode != 0: - raise InstallException( - f"Error installing plugin {package}: {completed.stderr}" - ) + return (plugin, None, f"Error installing plugin {package}: {completed.stderr}") archive = os.path.join(dynamicPluginsRoot, completed.stdout.strip()) - if not package_is_local and not skipIntegrityCheck: - logging.info('\t==> Verifying package integrity') - verify_package_integrity(plugin, archive) + if (not package_is_local) and (not skipIntegrityCheck): + logger.info('\t==> Verifying package integrity') + try: + verify_package_integrity(plugin, archive) + except Exception as e: + return (plugin, None, f"Integrity check failed for {package}: {e}") directory = archive.replace('.tgz', '') directory_realpath = os.path.realpath(directory) - plugin_path = os.path.basename(directory_realpath) + installed_path = os.path.basename(directory_realpath) if os.path.exists(directory): - logging.info('\t==> Removing previous plugin directory %s', directory) + logger.info('\t==> Removing previous plugin directory %s', directory) shutil.rmtree(directory, ignore_errors=True) os.mkdir(directory) - logging.info('\t==> Extracting package archive %s', archive) - with tarfile.open(archive, 'r:gz') as f: - for member in f.getmembers(): - if member.isreg(): - if not member.name.startswith('package/'): - raise InstallException( - f"NPM package archive doesn't start with 'package/': {member.name}" - ) - if member.size > maxEntrySize: - raise InstallException('Zip bomb detected in ' + member.name) - member.name = member.name.removeprefix('package/') - f.extract(member, path=directory, filter='tar') - elif member.isdir(): - logging.info('\t\tSkipping directory entry %s', member.name) - elif member.islnk() or member.issym(): - if not member.linkpath.startswith('package/'): - raise InstallException( - f"NPM package link outside: {member.name} -> {member.linkpath}" - ) - member.name = member.name.removeprefix('package/') - member.linkpath = member.linkpath.removeprefix('package/') - - rp = os.path.realpath(os.path.join(directory, *os.path.split(member.linkname))) - if not rp.startswith(directory_realpath): + logger.info('\t==> Extracting package archive %s', archive) + try: + with tarfile.open(archive, 'r:gz') as f: + for member in f.getmembers(): + if member.isreg(): + if not member.name.startswith('package/'): + raise InstallException( + f"NPM package archive doesn't start with 'package/': {member.name}" + ) + if member.size > maxEntrySize: + raise InstallException('Zip bomb detected in ' + member.name) + member.name = member.name.removeprefix('package/') + f.extract(member, path=directory, filter='tar') + elif member.isdir(): + logger.info('\t\tSkipping directory entry %s', member.name) + elif member.islnk() or member.issym(): + if not member.linkpath.startswith('package/'): + raise InstallException( + f"NPM package link outside: {member.name} -> {member.linkpath}" + ) + member.name = member.name.removeprefix('package/') + member.linkpath = member.linkpath.removeprefix('package/') + + rp = os.path.realpath(os.path.join(directory, *os.path.split(member.linkname))) + if not rp.startswith(directory_realpath): + raise InstallException( + f"NPM package link escapes archive: {member.name} -> {member.linkpath}" + ) + f.extract(member, path=directory, filter='tar') + else: + t_str = 'unknown' + if member.type == tarfile.CHRTYPE: + t_str = 'character device' + elif member.type == tarfile.BLKTYPE: + t_str = 'block device' + elif member.type == tarfile.FIFOTYPE: + t_str = 'FIFO' raise InstallException( - f"NPM package link escapes archive: {member.name} -> {member.linkpath}" + f'Archive has a non-regular file: {member.name} - {t_str}' ) - f.extract(member, path=directory, filter='tar') - else: - t_str = 'unknown' - if member.type == tarfile.CHRTYPE: - t_str = 'character device' - elif member.type == tarfile.BLKTYPE: - t_str = 'block device' - elif member.type == tarfile.FIFOTYPE: - t_str = 'FIFO' - raise InstallException( - f'Archive has a non-regular file: {member.name} - {t_str}' - ) + except Exception as ex: + return (plugin, None, str(ex)) - logging.info('\t==> Removing package archive %s', archive) + logger.info('\t==> Removing package archive %s', archive) os.remove(archive) # Cria arquivo de hash - hash_file_path = os.path.join(dynamicPluginsRoot, plugin_path, 'dynamic-plugin-config.hash') - with open(hash_file_path, 'w') as hf: - hf.write(plugin['hash']) - - # Se não tem pluginConfig, acabou - if 'pluginConfig' not in plugin: - logging.info('\t==> Successfully installed dynamic plugin %s', package) - continue - - globalConfig = maybe_merge_config(plugin.get('pluginConfig'), globalConfig) - logging.info('\t==> Successfully installed dynamic plugin %s', package) + if installed_path: + hash_file_path = os.path.join(dynamicPluginsRoot, installed_path, 'dynamic-plugin-config.hash') + with open(hash_file_path, 'w') as hf: + hf.write(plugin_hash) + + return (plugin, installed_path, None) + + # ----------- + # Passo 2: instalar plugins em paralelo + # ----------- + from concurrent.futures import ThreadPoolExecutor, as_completed + results = [] + exceptions = [] + + # Ajuste o max_workers conforme o ambiente + with ThreadPoolExecutor(max_workers=2) as executor: + future_map = {} + for p in active_plugins: + f = executor.submit(install_one_plugin, p) + future_map[f] = p['package'] + + for f in as_completed(future_map): + pkg = future_map[f] + try: + plugin_obj, installed_path, error = f.result() + if error: + exceptions.append((pkg, error)) + else: + results.append((plugin_obj, installed_path)) + except Exception as e: + exceptions.append((pkg, str(e))) + + # Se houve exceções, aborta + if exceptions: + for pkg, err in exceptions: + logger.error(f"Error installing {pkg}: {err}") + raise InstallException("One or more plugins failed to install in parallel") + + # ----------- + # Passo 3: merges de config e logs de finalização + # ----------- + for plugin_obj, installed_path in results: + # Se installed_path for None => plugin não foi reinstalado, mas não é erro + if 'pluginConfig' in plugin_obj: + globalConfig = maybe_merge_config(plugin_obj.get('pluginConfig'), globalConfig) + logger.info('\t==> Successfully installed dynamic plugin %s', plugin_obj['package']) # Salva config final - yaml.safe_dump(globalConfig, open(dynamicPluginsGlobalConfigFile, 'w')) + with open(dynamicPluginsGlobalConfigFile, 'w') as gf: + yaml.safe_dump(globalConfig, gf) - # Remove plugins não mencionados + # Plugins que não foram removidos do plugin_path_by_hash => não aparecem mais na config for old_hash, old_dir in plugin_path_by_hash.items(): - logging.info('\n======= Removing previously installed dynamic plugin %s', old_dir) - plugin_dir = os.path.join(dynamicPluginsRoot, old_dir) - shutil.rmtree(plugin_dir, ignore_errors=True) + logger.info('\n======= Removing previously installed dynamic plugin %s', old_dir) + shutil.rmtree(os.path.join(dynamicPluginsRoot, old_dir), ignore_errors=True) end_time = datetime.now() print(f"Total Execution Timeex: {end_time - start_time}")