diff --git a/src/main/java/org/commonjava/indy/service/archive/config/PreSeedConfig.java b/src/main/java/org/commonjava/indy/service/archive/config/PreSeedConfig.java index f7baaaa..d56b8a1 100644 --- a/src/main/java/org/commonjava/indy/service/archive/config/PreSeedConfig.java +++ b/src/main/java/org/commonjava/indy/service/archive/config/PreSeedConfig.java @@ -31,4 +31,7 @@ public interface PreSeedConfig @WithName( "not-used-days-cleanup" ) Optional notUsedDaysCleanup(); + + @WithName( "thread-multiplier" ) + Optional threadMultiplier(); } diff --git a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java index b62e345..1b76c48 100644 --- a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java +++ b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java @@ -20,6 +20,7 @@ import jakarta.annotation.PreDestroy; import jakarta.enterprise.context.ApplicationScoped; import jakarta.inject.Inject; +import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.io.IOUtils; import org.apache.http.client.CookieStore; import org.apache.http.client.config.RequestConfig; @@ -33,6 +34,7 @@ import org.commonjava.indy.service.archive.config.PreSeedConfig; import org.commonjava.indy.service.archive.model.ArchiveStatus; import org.commonjava.indy.service.archive.model.dto.HistoricalContentDTO; +import org.commonjava.indy.service.archive.model.dto.HistoricalEntryDTO; import org.commonjava.indy.service.archive.util.HistoricalContentListReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -47,15 +49,24 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; +import java.util.Set; import java.util.concurrent.Callable; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; import java.util.stream.Collectors; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; @@ -64,6 +75,7 @@ @ApplicationScoped public class ArchiveController { + private final Logger logger = LoggerFactory.getLogger( getClass() ); public final static String EVENT_GENERATE_ARCHIVE = "generate-archive"; @@ -71,14 +83,23 @@ public class ArchiveController public final static String ARCHIVE_DIR = "/archive"; - private final Logger logger = LoggerFactory.getLogger( getClass() ); - private final String ARCHIVE_SUFFIX = ".zip"; private final String PART_SUFFIX = ".part"; private final String PART_ARCHIVE_SUFFIX = PART_SUFFIX + ARCHIVE_SUFFIX; + private static final Set CHECKSUMS = Collections.unmodifiableSet( new HashSet() + { + { + add( ".sha1" ); + add( ".sha256" ); + add( ".md5" ); + } + } ); + + private static final Map buildConfigLocks = new ConcurrentHashMap<>(); + @Inject HistoricalContentListReader reader; @@ -88,7 +109,9 @@ public class ArchiveController @Inject ObjectMapper objectMapper; - private ExecutorService executorService; + private ExecutorService downloadExecutor; + + private ExecutorService generateExecutor; private CloseableHttpClient client; @@ -102,17 +125,22 @@ public class ArchiveController public void init() throws IOException { - int threads = 4 * Runtime.getRuntime().availableProcessors(); - executorService = Executors.newFixedThreadPool( threads, ( final Runnable r ) -> { + int threads = preSeedConfig.threadMultiplier().orElse( 4 ) * Runtime.getRuntime().availableProcessors(); + downloadExecutor = Executors.newFixedThreadPool( threads, ( final Runnable r ) -> { final Thread t = new Thread( r ); t.setName( "Download-" + t.getName() ); t.setDaemon( true ); return t; } ); + generateExecutor = Executors.newFixedThreadPool( threads, ( final Runnable r ) -> { + final Thread t = new Thread( r ); + t.setName( "Generate-" + t.getName() ); + t.setDaemon( true ); + return t; + } ); final PoolingHttpClientConnectionManager ccm = new PoolingHttpClientConnectionManager(); ccm.setMaxTotal( 500 ); - RequestConfig rc = RequestConfig.custom().build(); client = HttpClients.custom().setConnectionManager( ccm ).setDefaultRequestConfig( rc ).build(); @@ -130,27 +158,71 @@ public void destroy() public void generate( HistoricalContentDTO content ) { - int threads = 4 * Runtime.getRuntime().availableProcessors(); - ExecutorService generateExecutor = Executors.newFixedThreadPool( threads, ( final Runnable r ) -> { - final Thread t = new Thread( r ); - t.setName( "Generate-" + t.getName() ); - t.setDaemon( true ); - return t; - } ); - generateExecutor.execute( () -> doGenerate( content ) ); + String buildConfigId = content.getBuildConfigId(); + Object lock = buildConfigLocks.computeIfAbsent( buildConfigId, k -> new Object() ); + synchronized ( lock ) + { + if ( isInProgress( buildConfigId ) ) + { + logger.info( "There is already generation process in progress for buildConfigId {}, try lock wait.", + buildConfigId ); + // Conflicted generation, just return immediately + return; + } + + recordInProgress( buildConfigId ); + Future future = generateExecutor.submit( () -> { + try + { + doGenerate( content ); + } + catch ( Exception e ) + { + // If error happens on generation, remove the status to make sure coming generation + removeStatus( buildConfigId ); + logger.error( "Generation failed for buildConfigId {}", buildConfigId, e ); + } + finally + { + recordCompleted( buildConfigId ); + buildConfigLocks.remove( buildConfigId ); + logger.info( "lock released, buildConfigId {}", buildConfigId ); + } + } ); + try + { + future.get( 60, TimeUnit.MINUTES ); + } + catch ( TimeoutException e ) + { + // If timeout happens on generation, cancel and remove the status to make sure coming generation + future.cancel( true ); + removeStatus( buildConfigId ); + logger.error( "Generation timeout for buildConfigId {}", buildConfigId, e ); + } + catch ( InterruptedException | ExecutionException e ) + { + // If future task level error happens on generation, cancel and remove the status to make sure coming generation + future.cancel( true ); + removeStatus( buildConfigId ); + logger.error( "Generation future task level failed for buildConfigId {}", buildConfigId, e ); + } + } } protected Boolean doGenerate( HistoricalContentDTO content ) { logger.info( "Handle generate event: {}, build config id: {}", EVENT_GENERATE_ARCHIVE, content.getBuildConfigId() ); - recordInProgress( content.getBuildConfigId() ); - Map downloadPaths = reader.readPaths( content ); + Map entryDTOs = reader.readEntries( content ); + Map downloadPaths = new HashMap<>(); + entryDTOs.forEach( ( key, value ) -> downloadPaths.put( key, value.getPath() ) ); + Optional archive; try { - downloadArtifacts( downloadPaths, content ); + downloadArtifacts( entryDTOs, downloadPaths, content ); archive = generateArchive( new ArrayList<>( downloadPaths.values() ), content ); } catch ( final InterruptedException e ) @@ -177,7 +249,6 @@ protected Boolean doGenerate( HistoricalContentDTO content ) created = renderArchive( archive.get(), content.getBuildConfigId() ); } - recordCompleted( content.getBuildConfigId() ); return created; } @@ -203,6 +274,36 @@ public void deleteArchive( final String buildConfigId ) logger.info( "Historical archive for build config id: {} is deleted.", buildConfigId ); } + public void deleteArchiveWithChecksum( final String buildConfigId, final String checksum ) + throws IOException + { + logger.info( "Start to delete archive with checksum validation, buildConfigId {}, checksum {}", buildConfigId, + checksum ); + File zip = new File( archiveDir, buildConfigId + ARCHIVE_SUFFIX ); + if ( !zip.exists() ) + { + return; + } + + try (FileInputStream fis = new FileInputStream( zip )) + { + String storedChecksum = DigestUtils.sha256Hex( fis ); + + // only delete the zip once checksum is matched + if ( storedChecksum.equals( checksum ) ) + { + zip.delete(); + logger.info( "Historical archive for build config id: {} is deleted, checksum {}.", buildConfigId, + storedChecksum ); + } + else + { + logger.info( "Don't delete the {} zip, transferred checksum {}, but stored checksum {}.", + buildConfigId, checksum, storedChecksum ); + } + } + } + public void cleanup() throws IOException { @@ -221,28 +322,46 @@ public boolean statusExists( final String buildConfigId ) return treated.containsKey( buildConfigId ); } - public String getStatus( String buildConfigId ) + public String getStatus( final String buildConfigId ) { return treated.get( buildConfigId ); } - private void downloadArtifacts( final Map downloadPaths, final HistoricalContentDTO content ) + public boolean isInProgress( final String buildConfigId ) + { + return statusExists( buildConfigId ) && getStatus( buildConfigId ).equals( + ArchiveStatus.inProgress.getArchiveStatus() ); + } + + private void downloadArtifacts( final Map entryDTOs, + final Map downloadPaths, final HistoricalContentDTO content ) throws InterruptedException, ExecutionException, IOException { BasicCookieStore cookieStore = new BasicCookieStore(); - ExecutorCompletionService executor = new ExecutorCompletionService<>( executorService ); + ExecutorCompletionService executor = new ExecutorCompletionService<>( downloadExecutor ); String contentBuildDir = String.format( "%s/%s", contentDir, content.getBuildConfigId() ); File dir = new File( contentBuildDir ); dir.delete(); - fileTrackedContent( contentBuildDir, content ); - unpackHistoricalArchive( contentBuildDir, content.getBuildConfigId() ); + HistoricalContentDTO originalTracked = unpackHistoricalArchive( contentBuildDir, content.getBuildConfigId() ); + Map> originalChecksumsMap = new HashMap<>(); + if ( originalTracked != null ) + { + logger.trace( "originalChecksumsMap generated for {}", content.getBuildConfigId() ); + Map originalEntries = reader.readEntries( originalTracked ); + originalEntries.forEach( ( key, entry ) -> originalChecksumsMap.put( key, new ArrayList<>( + Arrays.asList( entry.getSha1(), entry.getSha256(), entry.getMd5() ) ) ) ); + } for ( String path : downloadPaths.keySet() ) { String filePath = downloadPaths.get( path ); - executor.submit( download( contentBuildDir, path, filePath, cookieStore ) ); + HistoricalEntryDTO entry = entryDTOs.get( path ); + List checksums = + new ArrayList<>( Arrays.asList( entry.getSha1(), entry.getSha256(), entry.getMd5() ) ); + List originalChecksums = originalChecksumsMap.get( path ); + executor.submit( download( contentBuildDir, path, filePath, checksums, originalChecksums, cookieStore ) ); } int success = 0; int failed = 0; @@ -257,6 +376,8 @@ private void downloadArtifacts( final Map downloadPaths, final H failed++; } } + // file the latest tracked json at the end + fileTrackedContent( contentBuildDir, content ); logger.info( "Artifacts download completed, success:{}, failed:{}", success, failed ); } @@ -369,14 +490,14 @@ private void fileTrackedContent( String contentBuildDir, final HistoricalContent } } - private void unpackHistoricalArchive( String contentBuildDir, String buildConfigId ) + private HistoricalContentDTO unpackHistoricalArchive( String contentBuildDir, String buildConfigId ) throws IOException { final File archive = new File( archiveDir, buildConfigId + ARCHIVE_SUFFIX ); if ( !archive.exists() ) { logger.debug( "Don't find historical archive for buildConfigId: {}.", buildConfigId ); - return; + return null; } logger.info( "Start unpacking historical archive for buildConfigId: {}.", buildConfigId ); @@ -384,6 +505,7 @@ private void unpackHistoricalArchive( String contentBuildDir, String buildConfig ZipEntry entry; while ( ( entry = inputStream.getNextEntry() ) != null ) { + logger.trace( "entry path:" + entry.getName() ); File outputFile = new File( contentBuildDir, entry.getName() ); outputFile.getParentFile().mkdirs(); try ( FileOutputStream outputStream = new FileOutputStream( outputFile ) ) @@ -393,16 +515,56 @@ private void unpackHistoricalArchive( String contentBuildDir, String buildConfig } inputStream.close(); + + File originalTracked = new File( contentBuildDir, buildConfigId ); + if ( originalTracked.exists() ) + { + return objectMapper.readValue( originalTracked, HistoricalContentDTO.class ); + } + else + { + logger.debug( "No tracked json file found after zip unpack for buildConfigId {}", buildConfigId ); + return null; + } + } + + private boolean validateChecksum( final String filePath, final List current, final List original ) + { + if ( CHECKSUMS.stream().anyMatch( suffix -> filePath.toLowerCase().endsWith( suffix ) ) ) + { + // skip to validate checksum files + return false; + } + if ( original == null || original.isEmpty() || original.stream().allMatch( Objects::isNull ) ) + { + return false; + } + // once sha1 is matched, skip downloading + if ( original.get( 0 ) != null && original.get( 0 ).equals( current.get( 0 ) ) ) + { + return true; + } + // once sha256 is matched, skip downloading + if ( original.get( 1 ) != null && original.get( 1 ).equals( current.get( 1 ) ) ) + { + return true; + } + // once md5 is matched, skip downloading + return original.get( 2 ) != null && original.get( 2 ).equals( current.get( 2 ) ); } - private Callable download( String contentBuildDir, final String path, final String filePath, + private Callable download( final String contentBuildDir, final String path, final String filePath, + final List checksums, final List originalChecksums, final CookieStore cookieStore ) { return () -> { final File target = new File( contentBuildDir, filePath ); - if ( target.exists() ) + + if ( target.exists() && validateChecksum( filePath, checksums, originalChecksums ) ) { - logger.trace( "<< download( String contentBuildDir, final String path, f context.setCookieStore( cookieStore ); final HttpGet request = new HttpGet( path ); InputStream input = null; + if ( target.exists() ) + { + // prevent the obsolete file still existed caused by http error + target.delete(); + } try { CloseableHttpResponse response = client.execute( request, context ); @@ -465,27 +632,27 @@ private void restoreGenerateStatusFromDisk() throws IOException List contents = walkAllFiles( archiveDir ); for ( File content : contents ) { - if ( content.getName().endsWith( PART_ARCHIVE_SUFFIX ) ) - { - treated.put( content.getName().split( PART_ARCHIVE_SUFFIX )[0], - ArchiveStatus.inProgress.getArchiveStatus() ); - } - else if ( content.getName().endsWith( ARCHIVE_SUFFIX ) ) + if ( content.getName().endsWith( ARCHIVE_SUFFIX ) ) { treated.put( content.getName().split( ARCHIVE_SUFFIX )[0], ArchiveStatus.completed.getArchiveStatus() ); } } } - private void recordInProgress( String buildConfigId ) + private void recordInProgress( final String buildConfigId ) { treated.remove( buildConfigId ); treated.put( buildConfigId, ArchiveStatus.inProgress.getArchiveStatus() ); } - private void recordCompleted( String buildConfigId ) + private void recordCompleted( final String buildConfigId ) { treated.remove( buildConfigId ); treated.put( buildConfigId, ArchiveStatus.completed.getArchiveStatus() ); } + + private void removeStatus( final String buildConfigId ) + { + treated.remove( buildConfigId ); + } } diff --git a/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java b/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java index 6b0a383..96a8c60 100644 --- a/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java +++ b/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java @@ -18,6 +18,19 @@ import com.fasterxml.jackson.databind.ObjectMapper; import io.smallrye.mutiny.Uni; import io.vertx.core.eventbus.EventBus; +import jakarta.inject.Inject; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.DELETE; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.Context; +import jakarta.ws.rs.core.MediaType; +import jakarta.ws.rs.core.Response; +import jakarta.ws.rs.core.Response.ResponseBuilder; +import jakarta.ws.rs.core.UriInfo; import org.apache.commons.io.FileUtils; import org.commonjava.indy.service.archive.controller.ArchiveController; import org.commonjava.indy.service.archive.model.dto.HistoricalContentDTO; @@ -31,19 +44,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import jakarta.ws.rs.PathParam; -import jakarta.inject.Inject; -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.DELETE; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.Produces; -import jakarta.ws.rs.core.Context; -import jakarta.ws.rs.core.MediaType; -import jakarta.ws.rs.core.Response; -import jakarta.ws.rs.core.Response.ResponseBuilder; -import jakarta.ws.rs.core.UriInfo; import java.io.File; import java.io.IOException; import java.io.InputStream; @@ -72,14 +72,15 @@ public class ArchiveManageResources @Inject EventBus bus; - @Operation( description = "Generate archive based on tracked content" ) @APIResponse( responseCode = "202", description = "The archive created request is accepted" ) - @RequestBody( description = "The tracked content definition JSON", name = "body", required = true, content = @Content( mediaType = APPLICATION_JSON, example = - "{" + "\"buildConfigId\": \"XXX\"," + "\"downloads\":" + "[{" + " \"storeKey\": \"\"," - + " \"path\": \"\"," + " \"md5\": \"\"," + " \"sha256\": \"\"," - + " \"sha1\": \"\"," + " \"size\": 001" + " }," + "..." - + "]}", schema = @Schema( implementation = HistoricalContentDTO.class ) ) ) + @RequestBody( description = "The tracked content definition JSON", name = "body", required = true, + content = @Content( mediaType = APPLICATION_JSON, + example = "{" + "\"buildConfigId\": \"XXX\"," + "\"downloads\":" + "[{" + + " \"storeKey\": \"\"," + " \"path\": \"\"," + " \"md5\": \"\"," + + " \"sha256\": \"\"," + " \"sha1\": \"\"," + " \"size\": 001" + + " }," + "..." + "]}", + schema = @Schema( implementation = HistoricalContentDTO.class ) ) ) @POST @Path( "generate" ) @Consumes( APPLICATION_JSON ) @@ -187,6 +188,27 @@ public Uni delete( final @PathParam( "buildConfigId" ) String buildCon return Uni.createFrom().item( noContent().build() ); } + @Operation( description = "Delete the archive by buildConfigId and checksum" ) + @APIResponse( responseCode = "204", description = "The history archive is deleted or doesn't exist" ) + @Path( "{buildConfigId}/{checksum}" ) + @DELETE + public Uni deleteWithChecksum( final @PathParam( "buildConfigId" ) String buildConfigId, + final @PathParam( "checksum" ) String checksum, + final @Context UriInfo uriInfo ) + { + try + { + controller.deleteArchiveWithChecksum( buildConfigId, checksum ); + } + catch ( final IOException e ) + { + final String message = "Failed to delete historical archive for build config id: " + buildConfigId; + logger.error( message, e ); + return fromResponse( message ); + } + return Uni.createFrom().item( noContent().build() ); + } + @Operation( description = "Clean up all the temp workplace" ) @APIResponse( responseCode = "204", description = "The workplace cleanup is finished" ) @Path( "cleanup" ) diff --git a/src/main/java/org/commonjava/indy/service/archive/util/HistoricalContentListReader.java b/src/main/java/org/commonjava/indy/service/archive/util/HistoricalContentListReader.java index c6cff17..2356aea 100644 --- a/src/main/java/org/commonjava/indy/service/archive/util/HistoricalContentListReader.java +++ b/src/main/java/org/commonjava/indy/service/archive/util/HistoricalContentListReader.java @@ -15,12 +15,12 @@ */ package org.commonjava.indy.service.archive.util; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; import org.commonjava.indy.service.archive.config.PreSeedConfig; import org.commonjava.indy.service.archive.model.dto.HistoricalContentDTO; import org.commonjava.indy.service.archive.model.dto.HistoricalEntryDTO; -import jakarta.enterprise.context.ApplicationScoped; -import jakarta.inject.Inject; import java.util.HashMap; import java.util.Map; @@ -43,41 +43,42 @@ public HistoricalContentListReader( PreSeedConfig preSeedConfig ) this.preSeedConfig = preSeedConfig; } - public Map readPaths( HistoricalContentDTO content ) + public Map readEntries( HistoricalContentDTO content ) { - Map pathMap = new HashMap<>(); + Map pathMap = new HashMap<>(); HistoricalEntryDTO[] downloads = content.getDownloads(); - if ( downloads != null ) + if ( downloads == null ) { - for ( HistoricalEntryDTO download : downloads ) - { - String path = download.getPath(); - String packageType = download.getStoreKey().getPackageType(); + return pathMap; + } + for ( HistoricalEntryDTO download : downloads ) + { + String path = download.getPath(); + String packageType = download.getStoreKey().getPackageType(); - if ( packageType.equals( NPM_PKG_KEY ) && !path.endsWith( ".tgz" ) ) - { - // Ignore the npm package metadata in archive - continue; - } - if ( path.contains( "maven-metadata.xml" ) ) - { - // Ignore maven-metadata.xml in archive - continue; - } - // ensure every entry has an available localUrl - buildDownloadUrl( download ); + if ( packageType.equals( NPM_PKG_KEY ) && !path.endsWith( ".tgz" ) ) + { + // Ignore the npm package metadata in archive + continue; + } + if ( path.contains( "maven-metadata.xml" ) ) + { + // Ignore maven-metadata.xml in archive + continue; + } + // ensure every entry has an available localUrl + buildDownloadUrl( download ); - // local url would be preferred to download artifact - String url = download.getLocalUrl(); - if ( url == null ) - { - url = download.getOriginUrl(); - } - if ( url != null ) - { - pathMap.put( url, download.getPath() ); - } + // local url would be preferred to download artifact + String url = download.getLocalUrl(); + if ( url == null ) + { + url = download.getOriginUrl(); + } + if ( url != null ) + { + pathMap.put( url, download ); } } return pathMap; diff --git a/src/main/resources/application.yaml b/src/main/resources/application.yaml index b82efe5..ac6f007 100644 --- a/src/main/resources/application.yaml +++ b/src/main/resources/application.yaml @@ -46,4 +46,5 @@ quarkus: pre-seed: main-indy: https://indy-gateway-master-devel.psi.redhat.com storage-dir: data - not-used-days-cleanup: 10 \ No newline at end of file + not-used-days-cleanup: 10 + thread-multiplier: 4 \ No newline at end of file diff --git a/src/test/java/org/commonjava/indy/service/archive/util/HistoricalContentListReaderTest.java b/src/test/java/org/commonjava/indy/service/archive/util/HistoricalContentListReaderTest.java index b7743da..2e76fe3 100644 --- a/src/test/java/org/commonjava/indy/service/archive/util/HistoricalContentListReaderTest.java +++ b/src/test/java/org/commonjava/indy/service/archive/util/HistoricalContentListReaderTest.java @@ -16,7 +16,6 @@ package org.commonjava.indy.service.archive.util; import io.quarkus.test.junit.QuarkusTest; -import org.commonjava.indy.service.archive.config.PreSeedConfig; import org.commonjava.indy.service.archive.model.StoreKey; import org.commonjava.indy.service.archive.model.StoreType; import org.commonjava.indy.service.archive.model.dto.HistoricalContentDTO; @@ -24,6 +23,7 @@ import org.junit.jupiter.api.Test; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -58,13 +58,15 @@ public void testMavenReadPaths() entryDTOs.add( entry ); entryDTOs.add( metaEntry ); - HistoricalContentDTO contentDTO = new HistoricalContentDTO( "8888", entryDTOs.toArray( - new HistoricalEntryDTO[entryDTOs.size()] ) ); + HistoricalContentDTO contentDTO = + new HistoricalContentDTO( "8888", entryDTOs.toArray( new HistoricalEntryDTO[entryDTOs.size()] ) ); TestPreSeedConfig preSeedConfig = new TestPreSeedConfig( Optional.of( MAIN_INDY ) ); HistoricalContentListReader reader = new HistoricalContentListReader( preSeedConfig ); - Map paths = reader.readPaths( contentDTO ); + Map entryMaps = reader.readEntries( contentDTO ); + Map paths = new HashMap<>(); + entryMaps.forEach( ( key, value ) -> paths.put( key, value.getPath() ) ); assertThat( paths.size(), equalTo( 1 ) ); String storePath = MAIN_INDY + "/api/content" + entry.getStorePath(); @@ -86,13 +88,15 @@ public void testNPMReadPaths() entryDTOs.add( npmEntry ); entryDTOs.add( npmMetaEntry ); - HistoricalContentDTO contentDTO = new HistoricalContentDTO( "8888", entryDTOs.toArray( - new HistoricalEntryDTO[entryDTOs.size()] ) ); + HistoricalContentDTO contentDTO = + new HistoricalContentDTO( "8888", entryDTOs.toArray( new HistoricalEntryDTO[entryDTOs.size()] ) ); TestPreSeedConfig preSeedConfig = new TestPreSeedConfig( Optional.of( MAIN_INDY ) ); HistoricalContentListReader reader = new HistoricalContentListReader( preSeedConfig ); - Map paths = reader.readPaths( contentDTO ); + Map entryMaps = reader.readEntries( contentDTO ); + Map paths = new HashMap<>(); + entryMaps.forEach( ( key, value ) -> paths.put( key, value.getPath() ) ); assertThat( paths.size(), equalTo( 1 ) ); String storePath = MAIN_INDY + "/api/content" + npmEntry.getStorePath(); diff --git a/src/test/java/org/commonjava/indy/service/archive/util/TestPreSeedConfig.java b/src/test/java/org/commonjava/indy/service/archive/util/TestPreSeedConfig.java index a9d8a5b..8bab7d4 100644 --- a/src/test/java/org/commonjava/indy/service/archive/util/TestPreSeedConfig.java +++ b/src/test/java/org/commonjava/indy/service/archive/util/TestPreSeedConfig.java @@ -46,4 +46,10 @@ public Optional notUsedDaysCleanup() return Optional.of( 365l ); } + @Override + public Optional threadMultiplier() + { + return Optional.of( 4 ); + } + }