From 1ffe209958907452d907ce33bd272586ca081835 Mon Sep 17 00:00:00 2001
From: yma <yma@redhat.com>
Date: Mon, 2 Dec 2024 11:45:48 +0800
Subject: [PATCH 1/8] Fix checksum validation for archive generation download

---
 .../archive/controller/ArchiveController.java | 90 ++++++++++++++++---
 .../util/HistoricalContentListReader.java     | 63 ++++++-------
 .../util/HistoricalContentListReaderTest.java | 18 ++--
 3 files changed, 122 insertions(+), 49 deletions(-)

diff --git a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
index b62e345..09a6838 100644
--- a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
+++ b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
@@ -33,6 +33,7 @@
 import org.commonjava.indy.service.archive.config.PreSeedConfig;
 import org.commonjava.indy.service.archive.model.ArchiveStatus;
 import org.commonjava.indy.service.archive.model.dto.HistoricalContentDTO;
+import org.commonjava.indy.service.archive.model.dto.HistoricalEntryDTO;
 import org.commonjava.indy.service.archive.util.HistoricalContentListReader;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -47,10 +48,15 @@
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Optional;
+import java.util.Set;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorCompletionService;
@@ -79,6 +85,15 @@ public class ArchiveController
 
     private final String PART_ARCHIVE_SUFFIX = PART_SUFFIX + ARCHIVE_SUFFIX;
 
+    private static final Set<String> CHECKSUMS = Collections.unmodifiableSet( new HashSet<String>()
+    {
+        {
+            add( ".md5" );
+            add( ".sha1" );
+            add( ".sha256" );
+        }
+    } );
+
     @Inject
     HistoricalContentListReader reader;
 
@@ -146,11 +161,14 @@ protected Boolean doGenerate( HistoricalContentDTO content )
                      content.getBuildConfigId() );
         recordInProgress( content.getBuildConfigId() );
 
-        Map<String, String> downloadPaths = reader.readPaths( content );
+        Map<String, HistoricalEntryDTO> entryDTOs = reader.readEntries( content );
+        Map<String, String> downloadPaths = new HashMap<>();
+        entryDTOs.forEach( ( key, value ) -> downloadPaths.put( key, value.getPath() ) );
+
         Optional<File> archive;
         try
         {
-            downloadArtifacts( downloadPaths, content );
+            downloadArtifacts( entryDTOs, downloadPaths, content );
             archive = generateArchive( new ArrayList<>( downloadPaths.values() ), content );
         }
         catch ( final InterruptedException e )
@@ -226,7 +244,8 @@ public String getStatus( String buildConfigId )
         return treated.get( buildConfigId );
     }
 
-    private void downloadArtifacts( final Map<String, String> downloadPaths, final HistoricalContentDTO content )
+    private void downloadArtifacts( final Map<String, HistoricalEntryDTO> entryDTOs,
+                                    final Map<String, String> downloadPaths, final HistoricalContentDTO content )
             throws InterruptedException, ExecutionException, IOException
     {
         BasicCookieStore cookieStore = new BasicCookieStore();
@@ -236,13 +255,23 @@ private void downloadArtifacts( final Map<String, String> downloadPaths, final H
         File dir = new File( contentBuildDir );
         dir.delete();
 
-        fileTrackedContent( contentBuildDir, content );
-        unpackHistoricalArchive( contentBuildDir, content.getBuildConfigId() );
+        HistoricalContentDTO originalTracked = unpackHistoricalArchive( contentBuildDir, content.getBuildConfigId() );
+        Map<String, List<String>> originalChecksumsMap = new HashMap<>();
+        if ( originalTracked != null )
+        {
+            Map<String, HistoricalEntryDTO> originalEntries = reader.readEntries( originalTracked );
+            originalEntries.forEach( ( key, entry ) -> originalChecksumsMap.put( key, new ArrayList<>(
+                    Arrays.asList( entry.getSha1(), entry.getSha256(), entry.getMd5() ) ) ) );
+        }
 
         for ( String path : downloadPaths.keySet() )
         {
             String filePath = downloadPaths.get( path );
-            executor.submit( download( contentBuildDir, path, filePath, cookieStore ) );
+            HistoricalEntryDTO entry = entryDTOs.get( path );
+            List<String> checksums =
+                    new ArrayList<>( Arrays.asList( entry.getSha1(), entry.getSha256(), entry.getMd5() ) );
+            List<String> originalChecksums = originalChecksumsMap.get( path );
+            executor.submit( download( contentBuildDir, path, filePath, checksums, originalChecksums, cookieStore ) );
         }
         int success = 0;
         int failed = 0;
@@ -257,6 +286,8 @@ private void downloadArtifacts( final Map<String, String> downloadPaths, final H
                 failed++;
             }
         }
+        // file the latest tracked json at the end
+        fileTrackedContent( contentBuildDir, content );
         logger.info( "Artifacts download completed, success:{}, failed:{}", success, failed );
     }
 
@@ -369,14 +400,14 @@ private void fileTrackedContent( String contentBuildDir, final HistoricalContent
         }
     }
 
-    private void unpackHistoricalArchive( String contentBuildDir, String buildConfigId )
+    private HistoricalContentDTO unpackHistoricalArchive( String contentBuildDir, String buildConfigId )
             throws IOException
     {
         final File archive = new File( archiveDir, buildConfigId + ARCHIVE_SUFFIX );
         if ( !archive.exists() )
         {
             logger.debug( "Don't find historical archive for buildConfigId: {}.", buildConfigId );
-            return;
+            return null;
         }
 
         logger.info( "Start unpacking historical archive for buildConfigId: {}.", buildConfigId );
@@ -393,16 +424,53 @@ private void unpackHistoricalArchive( String contentBuildDir, String buildConfig
 
         }
         inputStream.close();
+
+        File originalTracked = new File( contentBuildDir, buildConfigId );
+        if ( originalTracked.exists() )
+        {
+            return objectMapper.readValue( originalTracked, HistoricalContentDTO.class );
+        }
+        else
+        {
+            logger.debug( "No tracked json file found after zip unpack for buildConfigId {}", buildConfigId );
+            return null;
+        }
     }
 
-    private Callable<Boolean> download( String contentBuildDir, final String path, final String filePath,
+    private boolean validateChecksum( final String filePath, final List<String> current, final List<String> original )
+    {
+        if ( CHECKSUMS.stream().anyMatch( suffix -> filePath.toLowerCase().endsWith( "." + suffix ) ) )
+        {
+            // skip to validate checksum files
+            return false;
+        }
+        if ( original == null || original.isEmpty() || original.stream().allMatch( Objects::isNull ) )
+        {
+            return false;
+        }
+        if ( original.get( 0 ) != null && original.get( 0 ).equals( current.get( 0 ) ) )
+        {
+            return true;
+        }
+        if ( original.get( 1 ) != null && original.get( 1 ).equals( current.get( 1 ) ) )
+        {
+            return true;
+        }
+        return original.get( 2 ) != null && original.get( 2 ).equals( current.get( 2 ) );
+    }
+
+    private Callable<Boolean> download( final String contentBuildDir, final String path, final String filePath,
+                                        final List<String> checksums, final List<String> originalChecksums,
                                         final CookieStore cookieStore )
     {
         return () -> {
             final File target = new File( contentBuildDir, filePath );
-            if ( target.exists() )
+
+            if ( target.exists() && validateChecksum( filePath, checksums, originalChecksums ) )
             {
-                logger.trace( "<<<Already existed in historical archive, skip downloading, path: {}.", path );
+                logger.debug(
+                        "<<<Already existed in historical archive, and checksum equals, skip downloading, path: {}.",
+                        path );
                 return true;
             }
             final File dir = target.getParentFile();
diff --git a/src/main/java/org/commonjava/indy/service/archive/util/HistoricalContentListReader.java b/src/main/java/org/commonjava/indy/service/archive/util/HistoricalContentListReader.java
index c6cff17..2356aea 100644
--- a/src/main/java/org/commonjava/indy/service/archive/util/HistoricalContentListReader.java
+++ b/src/main/java/org/commonjava/indy/service/archive/util/HistoricalContentListReader.java
@@ -15,12 +15,12 @@
  */
 package org.commonjava.indy.service.archive.util;
 
+import jakarta.enterprise.context.ApplicationScoped;
+import jakarta.inject.Inject;
 import org.commonjava.indy.service.archive.config.PreSeedConfig;
 import org.commonjava.indy.service.archive.model.dto.HistoricalContentDTO;
 import org.commonjava.indy.service.archive.model.dto.HistoricalEntryDTO;
 
-import jakarta.enterprise.context.ApplicationScoped;
-import jakarta.inject.Inject;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -43,41 +43,42 @@ public HistoricalContentListReader( PreSeedConfig preSeedConfig )
         this.preSeedConfig = preSeedConfig;
     }
 
-    public Map<String, String> readPaths( HistoricalContentDTO content  )
+    public Map<String, HistoricalEntryDTO> readEntries( HistoricalContentDTO content )
     {
-        Map<String, String> pathMap = new HashMap<>();
+        Map<String, HistoricalEntryDTO> pathMap = new HashMap<>();
         HistoricalEntryDTO[] downloads = content.getDownloads();
 
-        if ( downloads != null )
+        if ( downloads == null )
         {
-            for ( HistoricalEntryDTO download : downloads )
-            {
-                String path = download.getPath();
-                String packageType = download.getStoreKey().getPackageType();
+            return pathMap;
+        }
+        for ( HistoricalEntryDTO download : downloads )
+        {
+            String path = download.getPath();
+            String packageType = download.getStoreKey().getPackageType();
 
-                if ( packageType.equals( NPM_PKG_KEY ) && !path.endsWith( ".tgz" ) )
-                {
-                    // Ignore the npm package metadata in archive
-                    continue;
-                }
-                if ( path.contains( "maven-metadata.xml" ) )
-                {
-                    // Ignore maven-metadata.xml in archive
-                    continue;
-                }
-                // ensure every entry has an available localUrl
-                buildDownloadUrl( download );
+            if ( packageType.equals( NPM_PKG_KEY ) && !path.endsWith( ".tgz" ) )
+            {
+                // Ignore the npm package metadata in archive
+                continue;
+            }
+            if ( path.contains( "maven-metadata.xml" ) )
+            {
+                // Ignore maven-metadata.xml in archive
+                continue;
+            }
+            // ensure every entry has an available localUrl
+            buildDownloadUrl( download );
 
-                // local url would be preferred to download artifact
-                String url = download.getLocalUrl();
-                if ( url == null )
-                {
-                    url = download.getOriginUrl();
-                }
-                if ( url != null )
-                {
-                    pathMap.put( url, download.getPath() );
-                }
+            // local url would be preferred to download artifact
+            String url = download.getLocalUrl();
+            if ( url == null )
+            {
+                url = download.getOriginUrl();
+            }
+            if ( url != null )
+            {
+                pathMap.put( url, download );
             }
         }
         return pathMap;
diff --git a/src/test/java/org/commonjava/indy/service/archive/util/HistoricalContentListReaderTest.java b/src/test/java/org/commonjava/indy/service/archive/util/HistoricalContentListReaderTest.java
index b7743da..2e76fe3 100644
--- a/src/test/java/org/commonjava/indy/service/archive/util/HistoricalContentListReaderTest.java
+++ b/src/test/java/org/commonjava/indy/service/archive/util/HistoricalContentListReaderTest.java
@@ -16,7 +16,6 @@
 package org.commonjava.indy.service.archive.util;
 
 import io.quarkus.test.junit.QuarkusTest;
-import org.commonjava.indy.service.archive.config.PreSeedConfig;
 import org.commonjava.indy.service.archive.model.StoreKey;
 import org.commonjava.indy.service.archive.model.StoreType;
 import org.commonjava.indy.service.archive.model.dto.HistoricalContentDTO;
@@ -24,6 +23,7 @@
 import org.junit.jupiter.api.Test;
 
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
@@ -58,13 +58,15 @@ public void testMavenReadPaths()
         entryDTOs.add( entry );
         entryDTOs.add( metaEntry );
 
-        HistoricalContentDTO contentDTO = new HistoricalContentDTO( "8888", entryDTOs.toArray(
-                        new HistoricalEntryDTO[entryDTOs.size()] ) );
+        HistoricalContentDTO contentDTO =
+                new HistoricalContentDTO( "8888", entryDTOs.toArray( new HistoricalEntryDTO[entryDTOs.size()] ) );
 
         TestPreSeedConfig preSeedConfig = new TestPreSeedConfig( Optional.of( MAIN_INDY ) );
         HistoricalContentListReader reader = new HistoricalContentListReader( preSeedConfig );
 
-        Map<String, String> paths = reader.readPaths( contentDTO );
+        Map<String, HistoricalEntryDTO> entryMaps = reader.readEntries( contentDTO );
+        Map<String, String> paths = new HashMap<>();
+        entryMaps.forEach( ( key, value ) -> paths.put( key, value.getPath() ) );
 
         assertThat( paths.size(), equalTo( 1 ) );
         String storePath = MAIN_INDY + "/api/content" + entry.getStorePath();
@@ -86,13 +88,15 @@ public void testNPMReadPaths()
         entryDTOs.add( npmEntry );
         entryDTOs.add( npmMetaEntry );
 
-        HistoricalContentDTO contentDTO = new HistoricalContentDTO( "8888", entryDTOs.toArray(
-                        new HistoricalEntryDTO[entryDTOs.size()] ) );
+        HistoricalContentDTO contentDTO =
+                new HistoricalContentDTO( "8888", entryDTOs.toArray( new HistoricalEntryDTO[entryDTOs.size()] ) );
 
         TestPreSeedConfig preSeedConfig = new TestPreSeedConfig( Optional.of( MAIN_INDY ) );
         HistoricalContentListReader reader = new HistoricalContentListReader( preSeedConfig );
 
-        Map<String, String> paths = reader.readPaths( contentDTO );
+        Map<String, HistoricalEntryDTO> entryMaps = reader.readEntries( contentDTO );
+        Map<String, String> paths = new HashMap<>();
+        entryMaps.forEach( ( key, value ) -> paths.put( key, value.getPath() ) );
 
         assertThat( paths.size(), equalTo( 1 ) );
         String storePath = MAIN_INDY + "/api/content" + npmEntry.getStorePath();

From fb9068e3c8d05775e46ebf19baacfb879ea5b56c Mon Sep 17 00:00:00 2001
From: yma <yma@redhat.com>
Date: Mon, 2 Dec 2024 14:04:17 +0800
Subject: [PATCH 2/8] Fix archive generation process with synchronized lock

---
 .../archive/controller/ArchiveController.java | 85 ++++++++++++++-----
 .../archive/jaxrs/ArchiveManageResources.java | 39 ++++-----
 2 files changed, 82 insertions(+), 42 deletions(-)

diff --git a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
index 09a6838..2560f2e 100644
--- a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
+++ b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
@@ -29,7 +29,6 @@
 import org.apache.http.impl.client.BasicCookieStore;
 import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.HttpClients;
-import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
 import org.commonjava.indy.service.archive.config.PreSeedConfig;
 import org.commonjava.indy.service.archive.model.ArchiveStatus;
 import org.commonjava.indy.service.archive.model.dto.HistoricalContentDTO;
@@ -47,6 +46,9 @@
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
+import java.security.KeyManagementException;
+import java.security.KeyStoreException;
+import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -58,6 +60,7 @@
 import java.util.Optional;
 import java.util.Set;
 import java.util.concurrent.Callable;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorCompletionService;
 import java.util.concurrent.ExecutorService;
@@ -94,6 +97,18 @@ public class ArchiveController
         }
     } );
 
+    private static final Map<String, Object> buildConfigLocks = new ConcurrentHashMap<>();
+
+    private static final int threads = 4 * Runtime.getRuntime().availableProcessors();
+
+    private static final ExecutorService generateExecutor =
+            Executors.newFixedThreadPool( threads, ( final Runnable r ) -> {
+                final Thread t = new Thread( r );
+                t.setName( "Generate-" + t.getName() );
+                t.setDaemon( true );
+                return t;
+            } );
+
     @Inject
     HistoricalContentListReader reader;
 
@@ -115,7 +130,7 @@ public class ArchiveController
 
     @PostConstruct
     public void init()
-            throws IOException
+            throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException
     {
         int threads = 4 * Runtime.getRuntime().availableProcessors();
         executorService = Executors.newFixedThreadPool( threads, ( final Runnable r ) -> {
@@ -125,11 +140,8 @@ public void init()
             return t;
         } );
 
-        final PoolingHttpClientConnectionManager ccm = new PoolingHttpClientConnectionManager();
-        ccm.setMaxTotal( 500 );
-
         RequestConfig rc = RequestConfig.custom().build();
-        client = HttpClients.custom().setConnectionManager( ccm ).setDefaultRequestConfig( rc ).build();
+        client = HttpClients.custom().setDefaultRequestConfig( rc ).build();
 
         String storeDir = preSeedConfig.storageDir().orElse( "data" );
         contentDir = String.format( "%s%s", storeDir, CONTENT_DIR );
@@ -145,21 +157,48 @@ public void destroy()
 
     public void generate( HistoricalContentDTO content )
     {
-        int threads = 4 * Runtime.getRuntime().availableProcessors();
-        ExecutorService generateExecutor = Executors.newFixedThreadPool( threads, ( final Runnable r ) -> {
-            final Thread t = new Thread( r );
-            t.setName( "Generate-" + t.getName() );
-            t.setDaemon( true );
-            return t;
-        } );
-        generateExecutor.execute( () -> doGenerate( content ) );
+        String buildConfigId = content.getBuildConfigId();
+        Object lock = buildConfigLocks.computeIfAbsent( buildConfigId, k -> new Object() );
+        synchronized ( lock )
+        {
+            while ( isInProgress( buildConfigId ) )
+            {
+                logger.info( "There is already generation process in progress for buildConfigId {}, try lock wait.",
+                             buildConfigId );
+                try
+                {
+                    lock.wait();
+                }
+                catch ( InterruptedException e )
+                {
+                    Thread.currentThread().interrupt();
+                    return;
+                }
+            }
+            recordInProgress( content.getBuildConfigId() );
+            generateExecutor.execute( () -> {
+                try
+                {
+                    doGenerate( content );
+                }
+                finally
+                {
+                    synchronized ( lock )
+                    {
+                        recordCompleted( content.getBuildConfigId() );
+                        buildConfigLocks.remove( buildConfigId );
+                        lock.notifyAll();
+                        logger.info( "lock released, buildConfigId {}", buildConfigId );
+                    }
+                }
+            } );
+        }
     }
 
     protected Boolean doGenerate( HistoricalContentDTO content )
     {
         logger.info( "Handle generate event: {}, build config id: {}", EVENT_GENERATE_ARCHIVE,
                      content.getBuildConfigId() );
-        recordInProgress( content.getBuildConfigId() );
 
         Map<String, HistoricalEntryDTO> entryDTOs = reader.readEntries( content );
         Map<String, String> downloadPaths = new HashMap<>();
@@ -195,7 +234,6 @@ protected Boolean doGenerate( HistoricalContentDTO content )
             created = renderArchive( archive.get(), content.getBuildConfigId() );
         }
 
-        recordCompleted( content.getBuildConfigId() );
         return created;
     }
 
@@ -239,11 +277,17 @@ public boolean statusExists( final String buildConfigId )
         return treated.containsKey( buildConfigId );
     }
 
-    public String getStatus( String buildConfigId )
+    public String getStatus( final String buildConfigId )
     {
         return treated.get( buildConfigId );
     }
 
+    public boolean isInProgress( final String buildConfigId )
+    {
+        return statusExists( buildConfigId ) && getStatus( buildConfigId ).equals(
+                ArchiveStatus.inProgress.getArchiveStatus() );
+    }
+
     private void downloadArtifacts( final Map<String, HistoricalEntryDTO> entryDTOs,
                                     final Map<String, String> downloadPaths, final HistoricalContentDTO content )
             throws InterruptedException, ExecutionException, IOException
@@ -533,12 +577,7 @@ private void restoreGenerateStatusFromDisk() throws IOException
         List<File> contents = walkAllFiles( archiveDir );
         for ( File content : contents )
         {
-            if ( content.getName().endsWith( PART_ARCHIVE_SUFFIX ) )
-            {
-                treated.put( content.getName().split( PART_ARCHIVE_SUFFIX )[0],
-                             ArchiveStatus.inProgress.getArchiveStatus() );
-            }
-            else if ( content.getName().endsWith( ARCHIVE_SUFFIX ) )
+            if ( content.getName().endsWith( ARCHIVE_SUFFIX ) )
             {
                 treated.put( content.getName().split( ARCHIVE_SUFFIX )[0], ArchiveStatus.completed.getArchiveStatus() );
             }
diff --git a/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java b/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java
index 6b0a383..b01d36d 100644
--- a/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java
+++ b/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java
@@ -18,6 +18,19 @@
 import com.fasterxml.jackson.databind.ObjectMapper;
 import io.smallrye.mutiny.Uni;
 import io.vertx.core.eventbus.EventBus;
+import jakarta.inject.Inject;
+import jakarta.ws.rs.Consumes;
+import jakarta.ws.rs.DELETE;
+import jakarta.ws.rs.GET;
+import jakarta.ws.rs.POST;
+import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.Produces;
+import jakarta.ws.rs.core.Context;
+import jakarta.ws.rs.core.MediaType;
+import jakarta.ws.rs.core.Response;
+import jakarta.ws.rs.core.Response.ResponseBuilder;
+import jakarta.ws.rs.core.UriInfo;
 import org.apache.commons.io.FileUtils;
 import org.commonjava.indy.service.archive.controller.ArchiveController;
 import org.commonjava.indy.service.archive.model.dto.HistoricalContentDTO;
@@ -31,19 +44,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import jakarta.ws.rs.PathParam;
-import jakarta.inject.Inject;
-import jakarta.ws.rs.Consumes;
-import jakarta.ws.rs.DELETE;
-import jakarta.ws.rs.GET;
-import jakarta.ws.rs.POST;
-import jakarta.ws.rs.Path;
-import jakarta.ws.rs.Produces;
-import jakarta.ws.rs.core.Context;
-import jakarta.ws.rs.core.MediaType;
-import jakarta.ws.rs.core.Response;
-import jakarta.ws.rs.core.Response.ResponseBuilder;
-import jakarta.ws.rs.core.UriInfo;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
@@ -72,14 +72,15 @@ public class ArchiveManageResources
     @Inject
     EventBus bus;
 
-
     @Operation( description = "Generate archive based on tracked content" )
     @APIResponse( responseCode = "202", description = "The archive created request is accepted" )
-    @RequestBody( description = "The tracked content definition JSON", name = "body", required = true, content = @Content( mediaType = APPLICATION_JSON, example =
-                    "{" + "\"buildConfigId\": \"XXX\"," + "\"downloads\":" + "[{" + "    \"storeKey\": \"\","
-                                    + "    \"path\": \"\"," + "    \"md5\": \"\"," + "    \"sha256\": \"\","
-                                    + "    \"sha1\": \"\"," + "    \"size\": 001" + "  }," + "..."
-                                    + "]}", schema = @Schema( implementation = HistoricalContentDTO.class ) ) )
+    @RequestBody( description = "The tracked content definition JSON", name = "body", required = true,
+                  content = @Content( mediaType = APPLICATION_JSON,
+                                      example = "{" + "\"buildConfigId\": \"XXX\"," + "\"downloads\":" + "[{"
+                                              + "    \"storeKey\": \"\"," + "    \"path\": \"\"," + "    \"md5\": \"\","
+                                              + "    \"sha256\": \"\"," + "    \"sha1\": \"\"," + "    \"size\": 001"
+                                              + "  }," + "..." + "]}",
+                                      schema = @Schema( implementation = HistoricalContentDTO.class ) ) )
     @POST
     @Path( "generate" )
     @Consumes( APPLICATION_JSON )

From 6c61b709b6b9cc635d74cbd0aa5545fc31653823 Mon Sep 17 00:00:00 2001
From: yma <yma@redhat.com>
Date: Tue, 3 Dec 2024 00:48:38 +0800
Subject: [PATCH 3/8] Add API to delete archive with checksum validation

---
 .../archive/controller/ArchiveController.java | 95 ++++++++++++++++---
 .../archive/jaxrs/ArchiveManageResources.java | 21 ++++
 2 files changed, 104 insertions(+), 12 deletions(-)

diff --git a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
index 2560f2e..85ffc69 100644
--- a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
+++ b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
@@ -29,6 +29,7 @@
 import org.apache.http.impl.client.BasicCookieStore;
 import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.HttpClients;
+import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
 import org.commonjava.indy.service.archive.config.PreSeedConfig;
 import org.commonjava.indy.service.archive.model.ArchiveStatus;
 import org.commonjava.indy.service.archive.model.dto.HistoricalContentDTO;
@@ -43,11 +44,12 @@
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
+import java.nio.MappedByteBuffer;
+import java.nio.channels.FileChannel;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import java.security.KeyManagementException;
-import java.security.KeyStoreException;
+import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -88,6 +90,16 @@ public class ArchiveController
 
     private final String PART_ARCHIVE_SUFFIX = PART_SUFFIX + ARCHIVE_SUFFIX;
 
+    private static final int threads = 4 * Runtime.getRuntime().availableProcessors();
+
+    private final ExecutorService generateExecutor =
+            Executors.newFixedThreadPool( threads, ( final Runnable r ) -> {
+                final Thread t = new Thread( r );
+                t.setName( "Generate-" + t.getName() );
+                t.setDaemon( true );
+                return t;
+            } );
+
     private static final Set<String> CHECKSUMS = Collections.unmodifiableSet( new HashSet<String>()
     {
         {
@@ -99,15 +111,13 @@ public class ArchiveController
 
     private static final Map<String, Object> buildConfigLocks = new ConcurrentHashMap<>();
 
-    private static final int threads = 4 * Runtime.getRuntime().availableProcessors();
+    private static final String SHA_256 = "SHA-256";
 
-    private static final ExecutorService generateExecutor =
-            Executors.newFixedThreadPool( threads, ( final Runnable r ) -> {
-                final Thread t = new Thread( r );
-                t.setName( "Generate-" + t.getName() );
-                t.setDaemon( true );
-                return t;
-            } );
+    private static final Long BOLCK_SIZE = 100 * 1024 * 1024L;
+
+    private static final String HEX_DIGITS = "0123456789abcdef";
+
+    private static final char[] HEX_ARRAY = HEX_DIGITS.toCharArray();
 
     @Inject
     HistoricalContentListReader reader;
@@ -130,7 +140,7 @@ public class ArchiveController
 
     @PostConstruct
     public void init()
-            throws IOException, NoSuchAlgorithmException, KeyStoreException, KeyManagementException
+            throws IOException
     {
         int threads = 4 * Runtime.getRuntime().availableProcessors();
         executorService = Executors.newFixedThreadPool( threads, ( final Runnable r ) -> {
@@ -140,8 +150,11 @@ public void init()
             return t;
         } );
 
+        final PoolingHttpClientConnectionManager ccm = new PoolingHttpClientConnectionManager();
+        ccm.setMaxTotal( 500 );
+
         RequestConfig rc = RequestConfig.custom().build();
-        client = HttpClients.custom().setDefaultRequestConfig( rc ).build();
+        client = HttpClients.custom().setConnectionManager( ccm ).setDefaultRequestConfig( rc ).build();
 
         String storeDir = preSeedConfig.storageDir().orElse( "data" );
         contentDir = String.format( "%s%s", storeDir, CONTENT_DIR );
@@ -259,6 +272,64 @@ public void deleteArchive( final String buildConfigId )
         logger.info( "Historical archive for build config id: {} is deleted.", buildConfigId );
     }
 
+    public void deleteArchiveWithChecksum( final String buildConfigId, final String checksum )
+            throws IOException
+    {
+        logger.info( "Start to delete archive with checksum validation, buildConfigId {}, checksum {}", buildConfigId,
+                     checksum );
+        File zip = new File( archiveDir, buildConfigId + ARCHIVE_SUFFIX );
+        if ( !zip.exists() )
+        {
+            return;
+        }
+
+        try (FileChannel channel = new FileInputStream( zip ).getChannel())
+        {
+            MessageDigest digest = MessageDigest.getInstance( SHA_256 );
+            long position = 0;
+            long size = channel.size();
+
+            while ( position < size )
+            {
+                long remaining = size - position;
+                long currentBlock = Math.min( remaining, BOLCK_SIZE );
+                MappedByteBuffer buffer = channel.map( FileChannel.MapMode.READ_ONLY, position, currentBlock );
+                digest.update( buffer );
+                position += currentBlock;
+            }
+
+            String stored = bytesToHex( digest.digest() );
+            // only delete the zip once checksum is equaled
+            if ( stored.equals( checksum ) )
+            {
+                zip.delete();
+                logger.info( "Historical archive for build config id: {} is deleted, checksum {}.", buildConfigId,
+                             stored );
+            }
+            else
+            {
+                logger.info( "Don't delete the {} zip, transferred checksum {}, but stored checksum {}.", buildConfigId,
+                             checksum, stored );
+            }
+        }
+        catch ( NoSuchAlgorithmException e )
+        {
+            logger.error( "No such algorithm SHA-256 Exception", e );
+        }
+    }
+
+    private String bytesToHex( byte[] hash )
+    {
+        char[] hexChars = new char[hash.length * 2];
+        for ( int i = 0; i < hash.length; i++ )
+        {
+            int v = hash[i] & 0xFF;
+            hexChars[i * 2] = HEX_ARRAY[v >>> 4];
+            hexChars[i * 2 + 1] = HEX_ARRAY[v & 0x0F];
+        }
+        return new String( hexChars );
+    }
+
     public void cleanup()
             throws IOException
     {
diff --git a/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java b/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java
index b01d36d..96a8c60 100644
--- a/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java
+++ b/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java
@@ -188,6 +188,27 @@ public Uni<Response> delete( final @PathParam( "buildConfigId" ) String buildCon
         return Uni.createFrom().item( noContent().build() );
     }
 
+    @Operation( description = "Delete the archive by buildConfigId and checksum" )
+    @APIResponse( responseCode = "204", description = "The history archive is deleted or doesn't exist" )
+    @Path( "{buildConfigId}/{checksum}" )
+    @DELETE
+    public Uni<Response> deleteWithChecksum( final @PathParam( "buildConfigId" ) String buildConfigId,
+                                             final @PathParam( "checksum" ) String checksum,
+                                             final @Context UriInfo uriInfo )
+    {
+        try
+        {
+            controller.deleteArchiveWithChecksum( buildConfigId, checksum );
+        }
+        catch ( final IOException e )
+        {
+            final String message = "Failed to delete historical archive for build config id: " + buildConfigId;
+            logger.error( message, e );
+            return fromResponse( message );
+        }
+        return Uni.createFrom().item( noContent().build() );
+    }
+
     @Operation( description = "Clean up all the temp workplace" )
     @APIResponse( responseCode = "204", description = "The workplace cleanup is finished" )
     @Path( "cleanup" )

From 452b14d8d3f0e35b4a744d6dc357e45d7ee5928b Mon Sep 17 00:00:00 2001
From: yma <yma@redhat.com>
Date: Tue, 3 Dec 2024 11:53:16 +0800
Subject: [PATCH 4/8] Delete target to prevent the obsolete file still existed
 caused by http error

---
 .../archive/controller/ArchiveController.java  | 18 ++++++++++++++----
 1 file changed, 14 insertions(+), 4 deletions(-)

diff --git a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
index 85ffc69..54ed5c1 100644
--- a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
+++ b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
@@ -103,9 +103,9 @@ public class ArchiveController
     private static final Set<String> CHECKSUMS = Collections.unmodifiableSet( new HashSet<String>()
     {
         {
-            add( ".md5" );
             add( ".sha1" );
             add( ".sha256" );
+            add( ".md5" );
         }
     } );
 
@@ -299,7 +299,7 @@ public void deleteArchiveWithChecksum( final String buildConfigId, final String
             }
 
             String stored = bytesToHex( digest.digest() );
-            // only delete the zip once checksum is equaled
+            // only delete the zip once checksum is matched
             if ( stored.equals( checksum ) )
             {
                 zip.delete();
@@ -374,6 +374,7 @@ private void downloadArtifacts( final Map<String, HistoricalEntryDTO> entryDTOs,
         Map<String, List<String>> originalChecksumsMap = new HashMap<>();
         if ( originalTracked != null )
         {
+            logger.trace( "originalChecksumsMap generated for {}", content.getBuildConfigId() );
             Map<String, HistoricalEntryDTO> originalEntries = reader.readEntries( originalTracked );
             originalEntries.forEach( ( key, entry ) -> originalChecksumsMap.put( key, new ArrayList<>(
                     Arrays.asList( entry.getSha1(), entry.getSha256(), entry.getMd5() ) ) ) );
@@ -530,6 +531,7 @@ private HistoricalContentDTO unpackHistoricalArchive( String contentBuildDir, St
         ZipEntry entry;
         while ( ( entry = inputStream.getNextEntry() ) != null )
         {
+            logger.trace( "entry path:" + entry.getName() );
             File outputFile = new File( contentBuildDir, entry.getName() );
             outputFile.getParentFile().mkdirs();
             try ( FileOutputStream outputStream = new FileOutputStream( outputFile ) )
@@ -554,7 +556,7 @@ private HistoricalContentDTO unpackHistoricalArchive( String contentBuildDir, St
 
     private boolean validateChecksum( final String filePath, final List<String> current, final List<String> original )
     {
-        if ( CHECKSUMS.stream().anyMatch( suffix -> filePath.toLowerCase().endsWith( "." + suffix ) ) )
+        if ( CHECKSUMS.stream().anyMatch( suffix -> filePath.toLowerCase().endsWith( suffix ) ) )
         {
             // skip to validate checksum files
             return false;
@@ -563,14 +565,17 @@ private boolean validateChecksum( final String filePath, final List<String> curr
         {
             return false;
         }
+        // once sha1 is matched, skip downloading
         if ( original.get( 0 ) != null && original.get( 0 ).equals( current.get( 0 ) ) )
         {
             return true;
         }
+        // once sha256 is matched, skip downloading
         if ( original.get( 1 ) != null && original.get( 1 ).equals( current.get( 1 ) ) )
         {
             return true;
         }
+        // once md5 is matched, skip downloading
         return original.get( 2 ) != null && original.get( 2 ).equals( current.get( 2 ) );
     }
 
@@ -584,7 +589,7 @@ private Callable<Boolean> download( final String contentBuildDir, final String p
             if ( target.exists() && validateChecksum( filePath, checksums, originalChecksums ) )
             {
                 logger.debug(
-                        "<<<Already existed in historical archive, and checksum equals, skip downloading, path: {}.",
+                        "<<<Already existed in historical archive, and checksum matches, skip downloading, path: {}.",
                         path );
                 return true;
             }
@@ -596,6 +601,11 @@ private Callable<Boolean> download( final String contentBuildDir, final String p
             context.setCookieStore( cookieStore );
             final HttpGet request = new HttpGet( path );
             InputStream input = null;
+            if ( target.exists() )
+            {
+                // prevent the obsolete file still existed caused by http error
+                target.delete();
+            }
             try
             {
                 CloseableHttpResponse response = client.execute( request, context );

From 47f98686ea98191aecd0dcce15de9e7b1a719e5f Mon Sep 17 00:00:00 2001
From: yma <yma@redhat.com>
Date: Tue, 3 Dec 2024 12:09:05 +0800
Subject: [PATCH 5/8] Use DigestUtils to compute checksum

---
 .../archive/controller/ArchiveController.java | 53 +++----------------
 1 file changed, 7 insertions(+), 46 deletions(-)

diff --git a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
index 54ed5c1..0e7f533 100644
--- a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
+++ b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
@@ -20,6 +20,7 @@
 import jakarta.annotation.PreDestroy;
 import jakarta.enterprise.context.ApplicationScoped;
 import jakarta.inject.Inject;
+import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.commons.io.IOUtils;
 import org.apache.http.client.CookieStore;
 import org.apache.http.client.config.RequestConfig;
@@ -44,13 +45,9 @@
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
-import java.nio.MappedByteBuffer;
-import java.nio.channels.FileChannel;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -111,14 +108,6 @@ public class ArchiveController
 
     private static final Map<String, Object> buildConfigLocks = new ConcurrentHashMap<>();
 
-    private static final String SHA_256 = "SHA-256";
-
-    private static final Long BOLCK_SIZE = 100 * 1024 * 1024L;
-
-    private static final String HEX_DIGITS = "0123456789abcdef";
-
-    private static final char[] HEX_ARRAY = HEX_DIGITS.toCharArray();
-
     @Inject
     HistoricalContentListReader reader;
 
@@ -283,51 +272,23 @@ public void deleteArchiveWithChecksum( final String buildConfigId, final String
             return;
         }
 
-        try (FileChannel channel = new FileInputStream( zip ).getChannel())
+        try (FileInputStream fis = new FileInputStream( zip ))
         {
-            MessageDigest digest = MessageDigest.getInstance( SHA_256 );
-            long position = 0;
-            long size = channel.size();
-
-            while ( position < size )
-            {
-                long remaining = size - position;
-                long currentBlock = Math.min( remaining, BOLCK_SIZE );
-                MappedByteBuffer buffer = channel.map( FileChannel.MapMode.READ_ONLY, position, currentBlock );
-                digest.update( buffer );
-                position += currentBlock;
-            }
+            String storedChecksum = DigestUtils.sha256Hex( fis );
 
-            String stored = bytesToHex( digest.digest() );
             // only delete the zip once checksum is matched
-            if ( stored.equals( checksum ) )
+            if ( storedChecksum.equals( checksum ) )
             {
                 zip.delete();
                 logger.info( "Historical archive for build config id: {} is deleted, checksum {}.", buildConfigId,
-                             stored );
+                             storedChecksum );
             }
             else
             {
-                logger.info( "Don't delete the {} zip, transferred checksum {}, but stored checksum {}.", buildConfigId,
-                             checksum, stored );
+                logger.info( "Don't delete the {} zip, transferred checksum {}, but stored checksum {}.",
+                             buildConfigId, checksum, storedChecksum );
             }
         }
-        catch ( NoSuchAlgorithmException e )
-        {
-            logger.error( "No such algorithm SHA-256 Exception", e );
-        }
-    }
-
-    private String bytesToHex( byte[] hash )
-    {
-        char[] hexChars = new char[hash.length * 2];
-        for ( int i = 0; i < hash.length; i++ )
-        {
-            int v = hash[i] & 0xFF;
-            hexChars[i * 2] = HEX_ARRAY[v >>> 4];
-            hexChars[i * 2 + 1] = HEX_ARRAY[v & 0x0F];
-        }
-        return new String( hexChars );
     }
 
     public void cleanup()

From dac7f702a450ac3c888298ffd9c44b8be945ceff Mon Sep 17 00:00:00 2001
From: yma <yma@redhat.com>
Date: Tue, 3 Dec 2024 12:35:22 +0800
Subject: [PATCH 6/8] Add configurable thread-multiplier for executors

---
 .../service/archive/config/PreSeedConfig.java |  3 ++
 .../archive/controller/ArchiveController.java | 30 ++++++++-----------
 src/main/resources/application.yaml           |  3 +-
 .../archive/util/TestPreSeedConfig.java       |  6 ++++
 4 files changed, 24 insertions(+), 18 deletions(-)

diff --git a/src/main/java/org/commonjava/indy/service/archive/config/PreSeedConfig.java b/src/main/java/org/commonjava/indy/service/archive/config/PreSeedConfig.java
index f7baaaa..d56b8a1 100644
--- a/src/main/java/org/commonjava/indy/service/archive/config/PreSeedConfig.java
+++ b/src/main/java/org/commonjava/indy/service/archive/config/PreSeedConfig.java
@@ -31,4 +31,7 @@ public interface PreSeedConfig
 
     @WithName( "not-used-days-cleanup" )
     Optional<Long> notUsedDaysCleanup();
+
+    @WithName( "thread-multiplier" )
+    Optional<Integer> threadMultiplier();
 }
diff --git a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
index 0e7f533..b6fab58 100644
--- a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
+++ b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
@@ -72,6 +72,7 @@
 @ApplicationScoped
 public class ArchiveController
 {
+    private final Logger logger = LoggerFactory.getLogger( getClass() );
 
     public final static String EVENT_GENERATE_ARCHIVE = "generate-archive";
 
@@ -79,24 +80,12 @@ public class ArchiveController
 
     public final static String ARCHIVE_DIR = "/archive";
 
-    private final Logger logger = LoggerFactory.getLogger( getClass() );
-
     private final String ARCHIVE_SUFFIX = ".zip";
 
     private final String PART_SUFFIX = ".part";
 
     private final String PART_ARCHIVE_SUFFIX = PART_SUFFIX + ARCHIVE_SUFFIX;
 
-    private static final int threads = 4 * Runtime.getRuntime().availableProcessors();
-
-    private final ExecutorService generateExecutor =
-            Executors.newFixedThreadPool( threads, ( final Runnable r ) -> {
-                final Thread t = new Thread( r );
-                t.setName( "Generate-" + t.getName() );
-                t.setDaemon( true );
-                return t;
-            } );
-
     private static final Set<String> CHECKSUMS = Collections.unmodifiableSet( new HashSet<String>()
     {
         {
@@ -117,7 +106,9 @@ public class ArchiveController
     @Inject
     ObjectMapper objectMapper;
 
-    private ExecutorService executorService;
+    private ExecutorService downloadExecutor;
+
+    private ExecutorService generateExecutor;
 
     private CloseableHttpClient client;
 
@@ -131,17 +122,22 @@ public class ArchiveController
     public void init()
             throws IOException
     {
-        int threads = 4 * Runtime.getRuntime().availableProcessors();
-        executorService = Executors.newFixedThreadPool( threads, ( final Runnable r ) -> {
+        int threads = preSeedConfig.threadMultiplier().orElse( 4 ) * Runtime.getRuntime().availableProcessors();
+        downloadExecutor = Executors.newFixedThreadPool( threads, ( final Runnable r ) -> {
             final Thread t = new Thread( r );
             t.setName( "Download-" + t.getName() );
             t.setDaemon( true );
             return t;
         } );
+        generateExecutor = Executors.newFixedThreadPool( threads, ( final Runnable r ) -> {
+            final Thread t = new Thread( r );
+            t.setName( "Generate-" + t.getName() );
+            t.setDaemon( true );
+            return t;
+        } );
 
         final PoolingHttpClientConnectionManager ccm = new PoolingHttpClientConnectionManager();
         ccm.setMaxTotal( 500 );
-
         RequestConfig rc = RequestConfig.custom().build();
         client = HttpClients.custom().setConnectionManager( ccm ).setDefaultRequestConfig( rc ).build();
 
@@ -325,7 +321,7 @@ private void downloadArtifacts( final Map<String, HistoricalEntryDTO> entryDTOs,
             throws InterruptedException, ExecutionException, IOException
     {
         BasicCookieStore cookieStore = new BasicCookieStore();
-        ExecutorCompletionService<Boolean> executor = new ExecutorCompletionService<>( executorService );
+        ExecutorCompletionService<Boolean> executor = new ExecutorCompletionService<>( downloadExecutor );
 
         String contentBuildDir = String.format( "%s/%s", contentDir, content.getBuildConfigId() );
         File dir = new File( contentBuildDir );
diff --git a/src/main/resources/application.yaml b/src/main/resources/application.yaml
index b82efe5..ac6f007 100644
--- a/src/main/resources/application.yaml
+++ b/src/main/resources/application.yaml
@@ -46,4 +46,5 @@ quarkus:
 pre-seed:
   main-indy: https://indy-gateway-master-devel.psi.redhat.com
   storage-dir: data
-  not-used-days-cleanup: 10
\ No newline at end of file
+  not-used-days-cleanup: 10
+  thread-multiplier: 4
\ No newline at end of file
diff --git a/src/test/java/org/commonjava/indy/service/archive/util/TestPreSeedConfig.java b/src/test/java/org/commonjava/indy/service/archive/util/TestPreSeedConfig.java
index a9d8a5b..8bab7d4 100644
--- a/src/test/java/org/commonjava/indy/service/archive/util/TestPreSeedConfig.java
+++ b/src/test/java/org/commonjava/indy/service/archive/util/TestPreSeedConfig.java
@@ -46,4 +46,10 @@ public Optional<Long> notUsedDaysCleanup()
         return Optional.of( 365l );
     }
 
+    @Override
+    public Optional<Integer> threadMultiplier()
+    {
+        return Optional.of( 4 );
+    }
+
 }

From c6a9cd93eb571b31e5aee6520ff4db87a08cb77f Mon Sep 17 00:00:00 2001
From: yma <yma@redhat.com>
Date: Wed, 4 Dec 2024 19:11:52 +0800
Subject: [PATCH 7/8] Change response for synchronized generation and add
 timeout error handling

---
 .../archive/controller/ArchiveController.java | 67 +++++++++++++------
 .../archive/jaxrs/ArchiveManageResources.java | 16 +++--
 2 files changed, 57 insertions(+), 26 deletions(-)

diff --git a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
index b6fab58..b8a75f4 100644
--- a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
+++ b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
@@ -64,6 +64,9 @@
 import java.util.concurrent.ExecutorCompletionService;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
 import java.util.stream.Collectors;
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipInputStream;
@@ -153,44 +156,59 @@ public void destroy()
         IOUtils.closeQuietly( client, null );
     }
 
-    public void generate( HistoricalContentDTO content )
+    public boolean generate( HistoricalContentDTO content )
     {
         String buildConfigId = content.getBuildConfigId();
         Object lock = buildConfigLocks.computeIfAbsent( buildConfigId, k -> new Object() );
         synchronized ( lock )
         {
-            while ( isInProgress( buildConfigId ) )
+            if ( isInProgress( buildConfigId ) )
             {
                 logger.info( "There is already generation process in progress for buildConfigId {}, try lock wait.",
                              buildConfigId );
-                try
-                {
-                    lock.wait();
-                }
-                catch ( InterruptedException e )
-                {
-                    Thread.currentThread().interrupt();
-                    return;
-                }
+                // Conflicted generation, just return with expected http response immediately
+                return false;
             }
-            recordInProgress( content.getBuildConfigId() );
-            generateExecutor.execute( () -> {
+
+            recordInProgress( buildConfigId );
+            Future<?> future = generateExecutor.submit( () -> {
                 try
                 {
                     doGenerate( content );
                 }
+                catch ( Exception e )
+                {
+                    // If error happens on generation, remove the status to make sure coming generation
+                    removeStatus( buildConfigId );
+                    logger.error( "Generation failed for buildConfigId {}", buildConfigId, e );
+                }
                 finally
                 {
-                    synchronized ( lock )
-                    {
-                        recordCompleted( content.getBuildConfigId() );
-                        buildConfigLocks.remove( buildConfigId );
-                        lock.notifyAll();
-                        logger.info( "lock released, buildConfigId {}", buildConfigId );
-                    }
+                    recordCompleted( buildConfigId );
+                    buildConfigLocks.remove( buildConfigId );
+                    logger.info( "lock released, buildConfigId {}", buildConfigId );
                 }
             } );
+            try
+            {
+                future.get( 60, TimeUnit.MINUTES );
+            }
+            catch ( TimeoutException e )
+            {
+                // If timeout happens on generation, cancel and remove the status to make sure coming generation
+                future.cancel( true );
+                removeStatus( buildConfigId );
+                logger.error( "Generation timeout for buildConfigId {}", buildConfigId, e );
+            }
+            catch ( InterruptedException | ExecutionException e )
+            {
+                // If future task level error happens on generation, cancel and remove the status to make sure coming generation
+                future.cancel( true );
+                removeStatus( buildConfigId );
+                logger.error( "Generation future task level failed for buildConfigId {}", buildConfigId, e );
+            }
         }
+        return true;
     }
 
     protected Boolean doGenerate( HistoricalContentDTO content )
@@ -622,15 +640,20 @@ private void restoreGenerateStatusFromDisk() throws IOException
         }
     }
 
-    private void recordInProgress( String buildConfigId )
+    private void recordInProgress( final String buildConfigId )
     {
         treated.remove( buildConfigId );
         treated.put( buildConfigId, ArchiveStatus.inProgress.getArchiveStatus() );
     }
 
-    private void recordCompleted( String buildConfigId )
+    private void recordCompleted( final String buildConfigId )
     {
         treated.remove( buildConfigId );
         treated.put( buildConfigId, ArchiveStatus.completed.getArchiveStatus() );
     }
+
+    private void removeStatus( final String buildConfigId )
+    {
+        treated.remove( buildConfigId );
+    }
 }
diff --git a/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java b/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java
index 96a8c60..be4dbe8 100644
--- a/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java
+++ b/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java
@@ -51,6 +51,7 @@
 
 import static jakarta.ws.rs.core.MediaType.APPLICATION_JSON;
 import static jakarta.ws.rs.core.MediaType.APPLICATION_OCTET_STREAM;
+import static jakarta.ws.rs.core.Response.Status.CONFLICT;
 import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
 import static jakarta.ws.rs.core.Response.accepted;
 import static jakarta.ws.rs.core.Response.noContent;
@@ -74,6 +75,7 @@ public class ArchiveManageResources
 
     @Operation( description = "Generate archive based on tracked content" )
     @APIResponse( responseCode = "202", description = "The archive created request is accepted" )
+    @APIResponse( responseCode = "409", description = "The archive created request is conflicted" )
     @RequestBody( description = "The tracked content definition JSON", name = "body", required = true,
                   content = @Content( mediaType = APPLICATION_JSON,
                                       example = "{" + "\"buildConfigId\": \"XXX\"," + "\"downloads\":" + "[{"
@@ -104,11 +106,17 @@ public Uni<Response> create( final @Context UriInfo uriInfo, final InputStream b
             return fromResponse( message );
         }
 
-        controller.generate( content );
+        boolean accepted = controller.generate( content );
         return Uni.createFrom()
-                  .item( accepted().type( MediaType.TEXT_PLAIN )
-                                   .entity( "Archive created request is accepted." )
-                                   .build() );
+                  .item( accepted ?
+                                 accepted().type( MediaType.TEXT_PLAIN )
+                                           .entity( "Archive created request is accepted." )
+                                           .build() :
+                                 Response.status( CONFLICT )
+                                         .type( MediaType.TEXT_PLAIN )
+                                         .entity(
+                                                 "Another generation with same build config ID was already in progress, request is conflicted." )
+                                         .build() );
     }
 
     @Operation( description = "Get the status of generating archive based on build config Id" )

From 5ce8feba6d7ff0fb0c1e3bc249410f36f0d5e843 Mon Sep 17 00:00:00 2001
From: yma <yma@redhat.com>
Date: Fri, 6 Dec 2024 09:27:57 +0800
Subject: [PATCH 8/8] Fix response to accepted for concurrent generation

---
 .../archive/controller/ArchiveController.java    |  7 +++----
 .../archive/jaxrs/ArchiveManageResources.java    | 16 ++++------------
 2 files changed, 7 insertions(+), 16 deletions(-)

diff --git a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
index b8a75f4..1b76c48 100644
--- a/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
+++ b/src/main/java/org/commonjava/indy/service/archive/controller/ArchiveController.java
@@ -156,7 +156,7 @@ public void destroy()
         IOUtils.closeQuietly( client, null );
     }
 
-    public boolean generate( HistoricalContentDTO content )
+    public void generate( HistoricalContentDTO content )
     {
         String buildConfigId = content.getBuildConfigId();
         Object lock = buildConfigLocks.computeIfAbsent( buildConfigId, k -> new Object() );
@@ -166,8 +166,8 @@ public boolean generate( HistoricalContentDTO content )
             {
                 logger.info( "There is already generation process in progress for buildConfigId {}, try lock wait.",
                              buildConfigId );
-                // Conflicted generation, just return with expected http response immediately
-                return false;
+                // Conflicted generation, just return immediately
+                return;
             }
 
             recordInProgress( buildConfigId );
@@ -208,7 +208,6 @@ public boolean generate( HistoricalContentDTO content )
                 logger.error( "Generation future task level failed for buildConfigId {}", buildConfigId, e );
             }
         }
-        return true;
     }
 
     protected Boolean doGenerate( HistoricalContentDTO content )
diff --git a/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java b/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java
index be4dbe8..96a8c60 100644
--- a/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java
+++ b/src/main/java/org/commonjava/indy/service/archive/jaxrs/ArchiveManageResources.java
@@ -51,7 +51,6 @@
 
 import static jakarta.ws.rs.core.MediaType.APPLICATION_JSON;
 import static jakarta.ws.rs.core.MediaType.APPLICATION_OCTET_STREAM;
-import static jakarta.ws.rs.core.Response.Status.CONFLICT;
 import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
 import static jakarta.ws.rs.core.Response.accepted;
 import static jakarta.ws.rs.core.Response.noContent;
@@ -75,7 +74,6 @@ public class ArchiveManageResources
 
     @Operation( description = "Generate archive based on tracked content" )
     @APIResponse( responseCode = "202", description = "The archive created request is accepted" )
-    @APIResponse( responseCode = "409", description = "The archive created request is conflicted" )
     @RequestBody( description = "The tracked content definition JSON", name = "body", required = true,
                   content = @Content( mediaType = APPLICATION_JSON,
                                       example = "{" + "\"buildConfigId\": \"XXX\"," + "\"downloads\":" + "[{"
@@ -106,17 +104,11 @@ public Uni<Response> create( final @Context UriInfo uriInfo, final InputStream b
             return fromResponse( message );
         }
 
-        boolean accepted = controller.generate( content );
+        controller.generate( content );
         return Uni.createFrom()
-                  .item( accepted ?
-                                 accepted().type( MediaType.TEXT_PLAIN )
-                                           .entity( "Archive created request is accepted." )
-                                           .build() :
-                                 Response.status( CONFLICT )
-                                         .type( MediaType.TEXT_PLAIN )
-                                         .entity(
-                                                 "Another generation with same build config ID was already in progress, request is conflicted." )
-                                         .build() );
+                  .item( accepted().type( MediaType.TEXT_PLAIN )
+                                   .entity( "Archive created request is accepted." )
+                                   .build() );
     }
 
     @Operation( description = "Get the status of generating archive based on build config Id" )