Skip to content

Commit 1a93b60

Browse files
authored
Merge pull request #153 from ligangty/release
Merge from main branch to release
2 parents 5b0a456 + 8dbfba6 commit 1a93b60

30 files changed

+382
-141
lines changed

charon/cmd/command.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -345,14 +345,15 @@ def delete(
345345
__safe_delete(tmp_dir)
346346

347347

348-
def __get_targets(target: List[str], conf: CharonConfig) -> List[Tuple[str, str, str]]:
348+
def __get_targets(target: List[str], conf: CharonConfig) -> List[Tuple[str, str, str, str]]:
349349
targets_ = []
350350
for tgt in target:
351351
aws_bucket = conf.get_aws_bucket(tgt)
352352
if not aws_bucket:
353353
continue
354354
prefix = conf.get_bucket_prefix(tgt)
355-
targets_.append([tgt, aws_bucket, prefix])
355+
registry = conf.get_bucket_registry(tgt)
356+
targets_.append([tgt, aws_bucket, prefix, registry])
356357
if len(targets_) == 0:
357358
logger.error(
358359
"All targets are not valid or configured, "

charon/config.py

+16-2
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
import logging
2121

2222
from charon.utils.strings import remove_prefix
23+
from charon.constants import DEFAULT_REGISTRY
2324

2425
CONFIG_FILE = "charon.yaml"
2526

@@ -53,8 +54,8 @@ def get_aws_bucket(self, target: str) -> str:
5354
return None
5455
bucket = target_.get("bucket", None)
5556
if not bucket:
56-
logger.error("The bucket %s is not found for target %s "
57-
"in charon configuration.")
57+
logger.error("The bucket is not found for target %s "
58+
"in charon configuration.", target)
5859
return bucket
5960

6061
def get_bucket_prefix(self, target: str) -> str:
@@ -73,6 +74,19 @@ def get_bucket_prefix(self, target: str) -> str:
7374
prefix = remove_prefix(prefix, "/")
7475
return prefix
7576

77+
def get_bucket_registry(self, target: str) -> str:
78+
target_: Dict = self.__targets.get(target, None)
79+
if not target_ or not isinstance(target_, Dict):
80+
logger.error("The target %s is not found in charon configuration.", target)
81+
return None
82+
registry = target_.get("registry", None)
83+
if not registry:
84+
registry = DEFAULT_REGISTRY
85+
logger.error("The registry is not found for target %s "
86+
"in charon configuration, so DEFAULT_REGISTRY(localhost) will be used.",
87+
target)
88+
return registry
89+
7690
def get_manifest_bucket(self) -> str:
7791
return self.__manifest_bucket
7892

charon/constants.py

+2
Original file line numberDiff line numberDiff line change
@@ -173,3 +173,5 @@
173173
PROD_INFO_SUFFIX = ".prodinfo"
174174
MANIFEST_SUFFIX = ".txt"
175175
DEFAULT_ERRORS_LOG = "errors.log"
176+
177+
DEFAULT_REGISTRY = "localhost"

charon/pkgs/maven.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -256,7 +256,7 @@ def handle_maven_uploading(
256256
prod_key: str,
257257
ignore_patterns=None,
258258
root="maven-repository",
259-
targets: List[Tuple[str, str, str]] = None,
259+
targets: List[Tuple[str, str, str, str]] = None,
260260
aws_profile=None,
261261
dir_=None,
262262
do_index=True,
@@ -418,7 +418,7 @@ def handle_maven_del(
418418
prod_key: str,
419419
ignore_patterns=None,
420420
root="maven-repository",
421-
targets: List[Tuple[str, str, str]] = None,
421+
targets: List[Tuple[str, str, str, str]] = None,
422422
aws_profile=None,
423423
dir_=None,
424424
do_index=True,

charon/pkgs/npm.py

+46-43
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
from charon.pkgs.pkg_utils import upload_post_process, rollback_post_process
3131
from charon.utils.strings import remove_prefix
3232
from charon.utils.files import write_manifest
33+
from charon.utils.map import del_none
3334

3435
logger = logging.getLogger(__name__)
3536

@@ -65,7 +66,7 @@ def __init__(self, metadata, is_version):
6566
def handle_npm_uploading(
6667
tarball_path: str,
6768
product: str,
68-
targets: List[Tuple[str, str, str]] = None,
69+
targets: List[Tuple[str, str, str, str]] = None,
6970
aws_profile=None,
7071
dir_=None,
7172
do_index=True,
@@ -86,48 +87,60 @@ def handle_npm_uploading(
8687
8788
Returns the directory used for archive processing and if uploading is successful
8889
"""
89-
target_dir, valid_paths, package_metadata = _scan_metadata_paths_from_archive(
90-
tarball_path, prod=product, dir__=dir_
91-
)
92-
if not os.path.isdir(target_dir):
93-
logger.error("Error: the extracted target_dir path %s does not exist.", target_dir)
94-
sys.exit(1)
95-
96-
valid_dirs = __get_path_tree(valid_paths, target_dir)
97-
98-
# main_target = targets[0]
9990
client = S3Client(aws_profile=aws_profile, dry_run=dry_run)
100-
targets_ = [(target[1], remove_prefix(target[2], "/")) for target in targets]
101-
logger.info(
102-
"Start uploading files to s3 buckets: %s",
103-
[target[1] for target in targets]
104-
)
105-
failed_files = client.upload_files(
106-
file_paths=valid_paths,
107-
targets=targets_,
108-
product=product,
109-
root=target_dir
110-
)
111-
logger.info("Files uploading done\n")
112-
113-
succeeded = True
11491
for target in targets:
92+
bucket_ = target[1]
93+
prefix__ = remove_prefix(target[2], "/")
94+
registry__ = target[3]
95+
target_dir, valid_paths, package_metadata = _scan_metadata_paths_from_archive(
96+
tarball_path, registry__, prod=product, dir__=dir_
97+
)
98+
if not os.path.isdir(target_dir):
99+
logger.error("Error: the extracted target_dir path %s does not exist.", target_dir)
100+
sys.exit(1)
101+
valid_dirs = __get_path_tree(valid_paths, target_dir)
102+
103+
logger.info("Start uploading files to s3 buckets: %s", bucket_)
104+
failed_files = client.upload_files(
105+
file_paths=[valid_paths[0]],
106+
targets=[(bucket_, prefix__)],
107+
product=product,
108+
root=target_dir
109+
)
110+
logger.info("Files uploading done\n")
111+
112+
succeeded = True
113+
115114
if not manifest_bucket_name:
116115
logger.warning(
117116
'Warning: No manifest bucket is provided, will ignore the process of manifest '
118117
'uploading\n')
119118
else:
120119
logger.info("Start uploading manifest to s3 bucket %s", manifest_bucket_name)
121-
manifest_folder = target[1]
120+
manifest_folder = bucket_
122121
manifest_name, manifest_full_path = write_manifest(valid_paths, target_dir, product)
122+
123123
client.upload_manifest(
124124
manifest_name, manifest_full_path,
125125
manifest_folder, manifest_bucket_name
126126
)
127127
logger.info("Manifest uploading is done\n")
128128

129-
bucket_ = target[1]
130-
prefix__ = remove_prefix(target[2], "/")
129+
logger.info(
130+
"Start generating version-level package.json for package: %s in s3 bucket %s",
131+
package_metadata.name, bucket_
132+
)
133+
failed_metas = []
134+
_version_metadata_path = valid_paths[1]
135+
_failed_metas = client.upload_metadatas(
136+
meta_file_paths=[_version_metadata_path],
137+
target=(bucket_, prefix__),
138+
product=product,
139+
root=target_dir
140+
)
141+
failed_metas.extend(_failed_metas)
142+
logger.info("version-level package.json uploading done")
143+
131144
logger.info(
132145
"Start generating package.json for package: %s in s3 bucket %s",
133146
package_metadata.name, bucket_
@@ -137,7 +150,6 @@ def handle_npm_uploading(
137150
)
138151
logger.info("package.json generation done\n")
139152

140-
failed_metas = []
141153
if META_FILE_GEN_KEY in meta_files:
142154
_failed_metas = client.upload_metadatas(
143155
meta_file_paths=[meta_files[META_FILE_GEN_KEY]],
@@ -178,7 +190,7 @@ def handle_npm_uploading(
178190
def handle_npm_del(
179191
tarball_path: str,
180192
product: str,
181-
targets: List[Tuple[str, str, str]] = None,
193+
targets: List[Tuple[str, str, str, str]] = None,
182194
aws_profile=None,
183195
dir_=None,
184196
do_index=True,
@@ -381,11 +393,11 @@ def _gen_npm_package_metadata_for_del(
381393
return meta_files
382394

383395

384-
def _scan_metadata_paths_from_archive(path: str, prod="", dir__=None) -> Tuple[str, list,
385-
NPMPackageMetadata]:
396+
def _scan_metadata_paths_from_archive(path: str, registry: str, prod="", dir__=None) ->\
397+
Tuple[str, list, NPMPackageMetadata]:
386398
tmp_root = mkdtemp(prefix=f"npm-charon-{prod}-", dir=dir__)
387399
try:
388-
_, valid_paths = extract_npm_tarball(path, tmp_root, True)
400+
_, valid_paths = extract_npm_tarball(path, tmp_root, True, registry)
389401
if len(valid_paths) > 1:
390402
version = _scan_for_version(valid_paths[1])
391403
package = NPMPackageMetadata(version, True)
@@ -502,23 +514,14 @@ def _write_package_metadata_to_file(package_metadata: NPMPackageMetadata, root='
502514
final_package_metadata_path = os.path.join(root, package_metadata.name, PACKAGE_JSON)
503515
try:
504516
with open(final_package_metadata_path, mode='w', encoding='utf-8') as f:
505-
dump(_del_none(package_metadata.__dict__.copy()), f)
517+
dump(del_none(package_metadata.__dict__.copy()), f)
506518
return final_package_metadata_path
507519
except FileNotFoundError:
508520
logger.error(
509521
'Can not create file %s because of some missing folders', final_package_metadata_path
510522
)
511523

512524

513-
def _del_none(d):
514-
for key, value in list(d.items()):
515-
if value is None:
516-
del d[key]
517-
elif isinstance(value, dict):
518-
_del_none(value)
519-
return d
520-
521-
522525
def __get_path_tree(paths: str, prefix: str) -> Set[str]:
523526
valid_dirs = set()
524527
for f in paths:

charon/storage.py

+12-9
Original file line numberDiff line numberDiff line change
@@ -383,15 +383,18 @@ async def path_upload_handler(
383383
# NOTE: This should not happen for most cases, as most
384384
# of the metadata file does not have product info. Just
385385
# leave for requirement change in future
386-
(prods, no_error) = await self.__run_async(
387-
self.__get_prod_info,
388-
path_key, bucket_name
389-
)
390-
if not no_error:
391-
failed.append(full_file_path)
392-
return
393-
if no_error and product not in prods:
394-
prods.append(product)
386+
# This is now used for npm version-level package.json
387+
prods = [product]
388+
if existed:
389+
(prods, no_error) = await self.__run_async(
390+
self.__get_prod_info,
391+
path_key, bucket_name
392+
)
393+
if not no_error:
394+
failed.append(full_file_path)
395+
return
396+
if no_error and product not in prods:
397+
prods.append(product)
395398
updated = await self.__update_prod_info(
396399
path_key, bucket_name, prods
397400
)

charon/utils/archive.py

+28-5
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,14 @@
2020
import requests
2121
import tempfile
2222
import shutil
23+
import subresource_integrity
2324
from enum import Enum
24-
from json import load, JSONDecodeError
25+
from json import load, JSONDecodeError, dump
2526
from typing import Tuple
2627
from zipfile import ZipFile, is_zipfile
28+
from charon.constants import DEFAULT_REGISTRY
29+
from charon.utils.files import digest, HashType
30+
from charon.utils.map import del_none
2731

2832
logger = logging.getLogger(__name__)
2933

@@ -42,7 +46,8 @@ def extract_zip_with_files(zf: ZipFile, target_dir: str, file_suffix: str, debug
4246
zf.extractall(target_dir, members=filtered)
4347

4448

45-
def extract_npm_tarball(path: str, target_dir: str, is_for_upload: bool) -> Tuple[str, list]:
49+
def extract_npm_tarball(path: str, target_dir: str, is_for_upload: bool, registry=DEFAULT_REGISTRY)\
50+
-> Tuple[str, list]:
4651
""" Extract npm tarball will relocate the tgz file and metadata files.
4752
* Locate tar path ( e.g.: jquery/-/jquery-7.6.1.tgz or @types/jquery/-/jquery-2.2.3.tgz).
4853
* Locate version metadata path (e.g.: jquery/7.6.1 or @types/jquery/2.2.3).
@@ -54,7 +59,7 @@ def extract_npm_tarball(path: str, target_dir: str, is_for_upload: bool) -> Tupl
5459
tgz.extractall()
5560
for f in tgz:
5661
if f.name.endswith("package.json"):
57-
parse_paths = __parse_npm_package_version_paths(f.path)
62+
version_data, parse_paths = __parse_npm_package_version_paths(f.path)
5863
package_name_path = parse_paths[0]
5964
os.makedirs(os.path.join(target_dir, parse_paths[0]))
6065
tarball_parent_path = os.path.join(target_dir, parse_paths[0], "-")
@@ -63,7 +68,11 @@ def extract_npm_tarball(path: str, target_dir: str, is_for_upload: bool) -> Tupl
6368
target_dir, parse_paths[0], parse_paths[1]
6469
)
6570
valid_paths.append(os.path.join(version_metadata_parent_path, "package.json"))
71+
6672
if is_for_upload:
73+
tgz_relative_path = "/".join([parse_paths[0], "-", _get_tgz_name(path)])
74+
__write_npm_version_dist(path, f.path, version_data, tgz_relative_path, registry)
75+
6776
os.makedirs(tarball_parent_path)
6877
target = os.path.join(tarball_parent_path, os.path.basename(path))
6978
shutil.copyfile(path, target)
@@ -81,12 +90,26 @@ def _get_tgz_name(path: str):
8190
return ""
8291

8392

84-
def __parse_npm_package_version_paths(path: str) -> list:
93+
def __write_npm_version_dist(path: str, version_meta_extract_path: str, version_data: dict,
94+
tgz_relative_path: str, registry: str):
95+
dist = dict()
96+
dist["tarball"] = "".join(["https://", registry, "/", tgz_relative_path])
97+
dist["shasum"] = digest(path, HashType.SHA1)
98+
with open(path, "rb") as tarball:
99+
tarball_data = tarball.read()
100+
integrity = subresource_integrity.render(tarball_data, ['sha512'])
101+
dist["integrity"] = integrity
102+
version_data["dist"] = dist
103+
with open(version_meta_extract_path, mode='w', encoding='utf-8') as f:
104+
dump(del_none(version_data), f)
105+
106+
107+
def __parse_npm_package_version_paths(path: str) -> Tuple[dict, list]:
85108
try:
86109
with open(path, encoding='utf-8') as version_package:
87110
data = load(version_package)
88111
package_version_paths = [data['name'], data['version']]
89-
return package_version_paths
112+
return data, package_version_paths
90113
except JSONDecodeError:
91114
logger.error('Error: Failed to parse json!')
92115

charon/utils/map.py

+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
def del_none(d):
2+
for key, value in list(d.items()):
3+
if value is None:
4+
del d[key]
5+
elif isinstance(value, dict):
6+
del_none(value)
7+
return d

requirements.txt

+1
Original file line numberDiff line numberDiff line change
@@ -7,3 +7,4 @@ click==8.0.3
77
requests==2.27.1
88
ruamel.yaml==0.17.20
99
defusedxml==0.7.1
10+
subresource-integrity==0.2

setup.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020

2121
from setuptools import setup, find_packages
2222

23-
version = "1.1.0"
23+
version = "1.1.1"
2424

2525
# f = open('README.md')
2626
# long_description = f.read().strip()

tests/base.py

+4
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,10 @@ def setUp(self):
5050
ea:
5151
bucket: "charon-test-ea"
5252
prefix: earlyaccess/all
53+
54+
npm:
55+
bucket: "charon-test-npm"
56+
registry: "npm1.registry.redhat.com"
5357
"""
5458
self.prepare_config(config_base, default_config_content)
5559

tests/commons.py

+2
Original file line numberDiff line numberDiff line change
@@ -101,6 +101,8 @@
101101
"@babel/code-frame/-/code-frame-7.15.8.tgz",
102102
]
103103
CODE_FRAME_META = "@babel/code-frame/package.json"
104+
105+
CODE_FRAME_7_14_5_META = "@babel/code-frame/7.14.5/package.json"
104106
# For npm indexes
105107
CODE_FRAME_7_14_5_INDEXES = [
106108
"@babel/code-frame/7.14.5/index.html",

0 commit comments

Comments
 (0)