diff --git a/.github/dependabot.yml b/.github/dependabot.yml index b3c016d2..fdc58d1e 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,8 +1,12 @@ +# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + version: 2 updates: - package-ecosystem: "maven" directory: "/" - reviewers: - - "dwnusbaum" schedule: - interval: "daily" + interval: "weekly" + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml new file mode 100644 index 00000000..0d0b1c99 --- /dev/null +++ b/.github/release-drafter.yml @@ -0,0 +1 @@ +_extends: .github diff --git a/.github/workflows/cd.yaml b/.github/workflows/cd.yaml new file mode 100644 index 00000000..0279984d --- /dev/null +++ b/.github/workflows/cd.yaml @@ -0,0 +1,15 @@ +# Note: additional setup is required, see https://www.jenkins.io/redirect/continuous-delivery-of-plugins + +name: cd +on: + workflow_dispatch: + check_run: + types: + - completed + +jobs: + maven-cd: + uses: jenkins-infra/github-reusable-workflows/.github/workflows/maven-cd.yml@v1 + secrets: + MAVEN_USERNAME: ${{ secrets.MAVEN_USERNAME }} + MAVEN_TOKEN: ${{ secrets.MAVEN_TOKEN }} diff --git a/.mvn/extensions.xml b/.mvn/extensions.xml index a2d496cc..90787cbb 100644 --- a/.mvn/extensions.xml +++ b/.mvn/extensions.xml @@ -2,6 +2,6 @@ io.jenkins.tools.incrementals git-changelist-maven-extension - 1.0-beta-4 + 1.6 diff --git a/.mvn/maven.config b/.mvn/maven.config index 2a0299c4..f7daf60d 100644 --- a/.mvn/maven.config +++ b/.mvn/maven.config @@ -1,2 +1,3 @@ -Pconsume-incrementals -Pmight-produce-incrementals +-Dchangelist.format=%d.v%s diff --git a/Jenkinsfile b/Jenkinsfile index a229fa51..b94810a2 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1 +1,5 @@ -buildPlugin() +buildPlugin(useContainerAgent: true, configurations: [ + [ platform: "linux", jdk: "8" ], + [ platform: "windows", jdk: "8" ], + [ platform: "linux", jdk: "11" ] +]) diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..e1d10771 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013-2021 CloudBees, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index 23f7d5d0..51651327 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Pipeline API Plugin [![Jenkins Plugin](https://img.shields.io/jenkins/plugin/v/workflow-api)](https://plugins.jenkins.io/workflow-api) -[![Changelog](https://img.shields.io/github/v/tag/jenkinsci/workflow-api-plugin?label=changelog)](https://github.com/jenkinsci/workflow-api-plugin/blob/master/CHANGELOG.md) +[![GitHub Release](https://img.shields.io/github/v/tag/jenkinsci/workflow-api-plugin?label=changelog)](https://github.com/jenkinsci/workflow-api-plugin/releases/latest) [![Jenkins Plugin Installs](https://img.shields.io/jenkins/plugin/i/workflow-api?color=blue)](https://plugins.jenkins.io/workflow-api) # Introduction @@ -10,3 +10,8 @@ Plugin that defines Pipeline API. A component of [Pipeline Plugin](https://plugins.jenkins.io/workflow-aggregator). + +# Changelog + +* For new versions, see [GitHub Releases](https://github.com/jenkinsci/workflow-api-plugin/releases) +* For versions 2.41 and older, see the [CHANGELOG](https://github.com/jenkinsci/workflow-api-plugin/blob/master/CHANGELOG.md) diff --git a/docs/flowgraph.md b/docs/flowgraph.md index 2bb4653f..3c2e6550 100644 --- a/docs/flowgraph.md +++ b/docs/flowgraph.md @@ -2,7 +2,7 @@ The "Flow Graph" is how Pipeline stores structural information about Pipelines that have run or *are* running -- this is used for visualization and runtime activities. You won't find any class with this name but we refer to it like this because it's the best terminology we have. "Flow" because originally the plugin was Workflow, and describing it as a flow reminds us that execution is a directional process with a start and end, "graph" because the data is structured as a directed acyclic graph to allow for parallel execution. -This stucture describes all possible Pipelines, whether a simple set of steps, or a complex set of nested parallels, and is designed to grow at runtime as the Pipeline executes. This also means that for a running Pipeline it will be in an incomplete state, with un-terminated blocks. +This structure describes all possible Pipelines, whether a simple set of steps, or a complex set of nested parallels, and is designed to grow at runtime as the Pipeline executes. This also means that for a running Pipeline it will be in an incomplete state, with un-terminated blocks. # Concepts @@ -12,8 +12,8 @@ This may seem backwards, but it enables us to freely append to the Flow Graph as **Key information is stored in a couple ways:** -* The specific subclass of the `FlowNode` used for a particular node is structurally imporant - for example `BlockStartNode` represents the start of a block -* `StepDescriptor`: most `FlowNode`s come from running `Steps` and implement `StepNode`. This means they you can get the `StepDescriptor` to determine their `Step` that produced them. +* The specific subclass of the `FlowNode` used for a particular node is structurally important - for example `BlockStartNode` represents the start of a block +* `StepDescriptor`: most `FlowNode`s come from running `Steps` and implement `StepNode`. This means they can get the `StepDescriptor` to determine their `Step` that produced them. * Parent relationships allow us to so split into parallel branches and track them independently. We may have multiple nodes with a given parent (at the start of a parallel) or one node with multiple parents (the end of a parallel) * `Action`s give us all the key attributes of the `FlowNode`, see the section below for a quick reference @@ -75,7 +75,7 @@ This may seem backwards, but it enables us to freely append to the Flow Graph as * **There are no bounds on the size of the flow graph, it may have thousands of nodes in it.** Real users with complex Pipelines will really generate graphs this size. Yes, really. * **Repeat: there are no bounds on the size of the flow graph. This means if you use recursive function calls to iterate over the Flow Graph you will get a `StackOverFlowError`!!!** Use the `AbstractFlowScanner` implementations - they're free of stack overflows and well-tested. * As a back of napkin estimate, most Flow Graphs fall in the 200-700 `FlowNode` range -* `GraphListener` gotcha: because the listener is invoked for each new `FlowNode`, if you implement some operation that iterates over a lot of the Flow Graph then you've just done an O(n^2) operation and it can result in very high CPU use. This can bog down a Jenkins master if not done carefully. +* `GraphListener` gotcha: because the listener is invoked for each new `FlowNode`, if you implement some operation that iterates over a lot of the Flow Graph then you've just done an O(n^2) operation and it can result in very high CPU use. This can bog down a Jenkins controller if not done carefully. - Careful use of the methods above to iterate/find enclosing flownodes can make this much safer @@ -83,4 +83,4 @@ This may seem backwards, but it enables us to freely append to the Flow Graph as * It may seem intimidating but the Flow Graph has a TON of useful information ripe for analysis and visualization! * The gotchas above are mostly a problem for plugins that try to manipulate the Flow Graph or do automated analysis while Pipelines are running -- if you use the utilities and wait for Pipelines to complete then most of the problems go away. -* The actual data model is quite simple. \ No newline at end of file +* The actual data model is quite simple. diff --git a/pom.xml b/pom.xml index e93dcf5a..0454b41d 100644 --- a/pom.xml +++ b/pom.xml @@ -23,17 +23,17 @@ ~ THE SOFTWARE. --> - + 4.0.0 - org.jenkins-ci.plugins - plugin - 4.16 - + org.jenkins-ci.plugins + plugin + 4.48 + org.jenkins-ci.plugins.workflow workflow-api - ${revision}${changelist} + ${changelist} hpi Pipeline: API https://github.com/jenkinsci/${project.artifactId}-plugin @@ -44,9 +44,9 @@ - scm:git:git://github.com/jenkinsci/${project.artifactId}-plugin.git - scm:git:git@github.com:jenkinsci/${project.artifactId}-plugin.git - https://github.com/jenkinsci/${project.artifactId}-plugin + scm:git:https://github.com/${gitHubRepo}.git + scm:git:git@github.com:${gitHubRepo}.git + https://github.com/${gitHubRepo} ${scmTag} @@ -62,19 +62,18 @@ - 2.42 - -SNAPSHOT - 2.176.4 - 8 + 999999-SNAPSHOT + 2.332.1 false true + jenkinsci/${project.artifactId}-plugin io.jenkins.tools.bom - bom-2.176.x - 16 + bom-2.332.x + 1466.v85a_616ea_b_87c import pom @@ -119,7 +118,7 @@ org.jenkins-ci.test docker-fixtures - 1.10 + 166.v912b_95083ffe test diff --git a/src/main/java/org/jenkinsci/plugins/workflow/FilePathUtils.java b/src/main/java/org/jenkinsci/plugins/workflow/FilePathUtils.java index 1cac64e7..a12de116 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/FilePathUtils.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/FilePathUtils.java @@ -32,15 +32,14 @@ import hudson.remoting.Channel; import hudson.remoting.VirtualChannel; import hudson.slaves.ComputerListener; -import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.WeakHashMap; import java.util.logging.Level; import java.util.logging.Logger; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import jenkins.model.Jenkins; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.NoExternalUse; @@ -63,24 +62,24 @@ public class FilePathUtils { * Note that if an administrator disconnects an agent, configures and connects an unrelated agent with the same name, * and then this method is called on a path created against the original connection, the result may be misleading. * @param f a file, possibly remote - * @return a node name ({@code ""} for master), if known, else null + * @return a node name ({@code ""} for the controller), if known, else null */ - public static @CheckForNull String getNodeNameOrNull(@Nonnull FilePath f) { + public static @CheckForNull String getNodeNameOrNull(@NonNull FilePath f) { return Listener.getChannelName(f.getChannel()); } /** * Same as {@link #getNodeNameOrNull} but throws a diagnostic exception in case of failure. * @param f a file, possible remote - * @return a node name ({@code ""} for master), if known + * @return a node name ({@code ""} for the controller), if known * @throws IllegalStateException if the association to a node is unknown */ - public static @Nonnull String getNodeName(@Nonnull FilePath f) throws IllegalStateException { + public static @NonNull String getNodeName(@NonNull FilePath f) throws IllegalStateException { String name = getNodeNameOrNull(f); if (name != null) { return name; } else { - throw new IllegalStateException("no known slave for " + f + " among " + Listener.getChannelNames()); + throw new IllegalStateException("no known agent for " + f + " among " + Listener.getChannelNames()); } } @@ -90,7 +89,7 @@ public class FilePathUtils { * @param path a path as returned by {@link FilePath#getRemote} * @return a corresponding file handle, if a node with that name is online, else null */ - public static @CheckForNull FilePath find(@Nonnull String node, @Nonnull String path) { + public static @CheckForNull FilePath find(@NonNull String node, @NonNull String path) { Jenkins j = Jenkins.getInstanceOrNull(); if (j == null) { return null; @@ -113,7 +112,7 @@ private FilePathUtils() {} private static final Map channelNames = Collections.synchronizedMap(new WeakHashMap<>()); - static String getChannelName(@Nonnull VirtualChannel channel) { + static String getChannelName(@NonNull VirtualChannel channel) { String channelName = channelNames.get(channel); if (channelName == null) { @@ -143,7 +142,7 @@ static Collection getChannelNames() { addChannel(c.getChannel(), c.getName()); } } - @Override public void preOnline(Computer c, Channel channel, FilePath root, TaskListener listener) throws IOException, InterruptedException { + @Override public void preOnline(Computer c, Channel channel, FilePath root, TaskListener listener) { addChannel(channel, c.getName()); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/actions/ArgumentsAction.java b/src/main/java/org/jenkinsci/plugins/workflow/actions/ArgumentsAction.java index ad95ddc2..b926b9dc 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/actions/ArgumentsAction.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/actions/ArgumentsAction.java @@ -36,8 +36,8 @@ import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import org.apache.commons.collections.CollectionUtils; import org.jenkinsci.plugins.structs.describable.DescribableModel; import org.jenkinsci.plugins.structs.describable.UninstantiatedDescribable; @@ -131,7 +131,7 @@ public String getUrlName() { * supplied in the executed pipeline step if that value is filtered for size or security. * @return The arguments for the {@link Step} as with {@link StepDescriptor#defineArguments(Step)} */ - @Nonnull + @NonNull public Map getArguments() { Map args = getArgumentsInternal(); if (args.isEmpty()) { @@ -147,8 +147,8 @@ public Map getArguments() { * * @param n FlowNode to fetch Step arguments for (including placeholders for masked values). */ - @Nonnull - public static Map getArguments(@Nonnull FlowNode n) { + @NonNull + public static Map getArguments(@NonNull FlowNode n) { ArgumentsAction aa = n.getPersistentAction(ArgumentsAction.class); return aa != null ? aa.getArguments() : Collections.emptyMap(); } @@ -158,7 +158,7 @@ public static Map getArguments(@Nonnull FlowNode n) { * This means the arguments with all {@link NotStoredReason} or null values removed * @return Map of all completely stored arguments */ - @Nonnull + @NonNull public Map getFilteredArguments() { Map internalArgs = this.getArgumentsInternal(); if (internalArgs.size() == 0) { @@ -180,8 +180,8 @@ public Map getFilteredArguments() { * @param n FlowNode to get arguments for * @return Map of all completely stored arguments */ - @Nonnull - public static Map getFilteredArguments(@Nonnull FlowNode n) { + @NonNull + public static Map getFilteredArguments(@NonNull FlowNode n) { ArgumentsAction act = n.getPersistentAction(ArgumentsAction.class); return act != null ? act.getFilteredArguments() : Collections.emptyMap(); } @@ -190,7 +190,7 @@ public static Map getFilteredArguments(@Nonnull FlowNode n) { * See {@link StepDescriptor#argumentsToString(Map)} for the rules */ @CheckForNull - public static String getStepArgumentsAsString(@Nonnull FlowNode n) { + public static String getStepArgumentsAsString(@NonNull FlowNode n) { if (n instanceof StepNode) { StepDescriptor descriptor = ((StepNode) n).getDescriptor(); if (descriptor != null) { // Null if plugin providing descriptor was uninstalled @@ -205,7 +205,7 @@ public static String getStepArgumentsAsString(@Nonnull FlowNode n) { * Return a fast view of internal arguments, without creating immutable wrappers * @return Internal arguments */ - @Nonnull + @NonNull protected abstract Map getArgumentsInternal(); /** @@ -215,7 +215,7 @@ public static String getStepArgumentsAsString(@Nonnull FlowNode n) { * @return Argument value or null if not present/not stored. */ @CheckForNull - public Object getArgumentValue(@Nonnull String argumentName) { + public Object getArgumentValue(@NonNull String argumentName) { Object val = getArgumentValueOrReason(argumentName); return (val instanceof NotStoredReason) ? null : val; } @@ -226,16 +226,16 @@ public Object getArgumentValue(@Nonnull String argumentName) { * @return Argument value, null if nonexistent/null, or NotStoredReason if it existed by was masked out. */ @CheckForNull - public Object getArgumentValueOrReason(@Nonnull String argumentName) { + public Object getArgumentValueOrReason(@NonNull String argumentName) { Object ob = getArgumentsInternal().get(argumentName); if (ob instanceof Map) { - return Collections.unmodifiableMap((Map)ob); + return Collections.unmodifiableMap((Map)ob); } else if (ob instanceof Set) { - return Collections.unmodifiableSet((Set)ob); + return Collections.unmodifiableSet((Set)ob); } else if (ob instanceof List) { - return Collections.unmodifiableList((List)ob); + return Collections.unmodifiableList((List)ob); } else if (ob instanceof Collection) { - return Collections.unmodifiableCollection((Collection)ob); + return Collections.unmodifiableCollection((Collection)ob); } return ob; } @@ -246,7 +246,7 @@ public Object getArgumentValueOrReason(@Nonnull String argumentName) { * @param namedArgs Set of argument name and argument value pairs, as from {@link StepDescriptor#defineArguments(Step)} * @return True if no argument has a {@link NotStoredReason} placeholder value, else false */ - static boolean checkArgumentsLackPlaceholders(@Nonnull Map namedArgs) { + static boolean checkArgumentsLackPlaceholders(@NonNull Map namedArgs) { for(Object ob : namedArgs.values()) { if (ob instanceof NotStoredReason) { return false; @@ -279,8 +279,8 @@ public boolean isUnmodifiedArguments() { * You could use {@link UninstantiatedDescribable#getModel} (where available) and {@link DescribableModel#getType} to access live classes. * Where information is missing, this will just return the best it can. */ - @Nonnull - public static Map getResolvedArguments(@Nonnull FlowNode n) { + @NonNull + public static Map getResolvedArguments(@NonNull FlowNode n) { ArgumentsAction aa = n.getPersistentAction(ArgumentsAction.class); if (aa == null) { return Collections.emptyMap(); @@ -302,8 +302,8 @@ public boolean isUnmodifiedArguments() { return args; } // helper method to capture generic type - private static UninstantiatedDescribable resolve(DescribableModel model, Map arguments) throws Exception { - return model.uninstantiate2(model.instantiate(arguments)); + private static UninstantiatedDescribable resolve(DescribableModel model, Map arguments) { + return model.uninstantiate2(model.instantiate(arguments, null)); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/actions/ErrorAction.java b/src/main/java/org/jenkinsci/plugins/workflow/actions/ErrorAction.java index df3049c8..d35f9395 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/actions/ErrorAction.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/actions/ErrorAction.java @@ -28,12 +28,17 @@ import groovy.lang.MissingMethodException; import groovy.lang.MissingPropertyException; import hudson.remoting.ProxyException; -import javax.annotation.CheckForNull; +import edu.umd.cs.findbugs.annotations.CheckForNull; import org.codehaus.groovy.control.MultipleCompilationErrorsException; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.NonNull; +import hudson.Functions; import jenkins.model.Jenkins; import org.apache.commons.io.output.NullOutputStream; +import org.jenkinsci.plugins.workflow.flow.FlowExecution; +import org.jenkinsci.plugins.workflow.graph.AtomNode; +import org.jenkinsci.plugins.workflow.graph.BlockEndNode; +import org.jenkinsci.plugins.workflow.graphanalysis.ForkScanner; /** * Attached to {@link FlowNode} that caused an error. @@ -42,9 +47,9 @@ */ public class ErrorAction implements PersistentAction { - private final @Nonnull Throwable error; + private final @NonNull Throwable error; - public ErrorAction(@Nonnull Throwable error) { + public ErrorAction(@NonNull Throwable error) { if (isUnserializableException(error)) { error = new ProxyException(error); } else if (error != null) { @@ -75,6 +80,7 @@ private boolean isUnserializableException(@CheckForNull Throwable error) { // contains references to the class loader for the Pipeline Script. Storing it leads // to memory leaks. if (error instanceof MissingPropertyException && + ((MissingPropertyException)error).getType() != null && ((MissingPropertyException)error).getType().getClassLoader() instanceof GroovyClassLoader) { return true; } @@ -92,7 +98,7 @@ private boolean isUnserializableException(@CheckForNull Throwable error) { return false; } - public @Nonnull Throwable getError() { + public @NonNull Throwable getError() { return error; } @@ -110,4 +116,41 @@ public String getDisplayName() { public String getUrlName() { return null; } + + /** + * Attempts to locate the first node of a build which threw an error. + * Typically an error will be rethrown by enclosing blocks, + * so this will look for the original node with an {@link ErrorAction} + * matching the given stack trace. + * @param error an error thrown at some point during a build + * @param execution the build + * @return the originating node, if one can be located; + * typically an {@link AtomNode} or {@link BlockEndNode} + * (in the latter case you may want to use {@link BlockEndNode#getStartNode} to look up actions) + */ + public static @CheckForNull FlowNode findOrigin(@NonNull Throwable error, @NonNull FlowExecution execution) { + FlowNode candidate = null; + for (FlowNode n : new ForkScanner().allNodes(execution)) { + ErrorAction errorAction = n.getPersistentAction(ErrorAction.class); + if (errorAction != null && equals(error, errorAction.getError())) { + candidate = n; // continue search for earlier one + } + } + return candidate; + } + + /** + * {@link Throwable#equals} might not be reliable if the program has resumed + * and stuff is deserialized. + */ + private static boolean equals(Throwable t1, Throwable t2) { + if (t1 == t2) { + return true; + } else if (t1.getClass() != t2.getClass()) { + return false; + } else { + return Functions.printThrowable(t1).equals(Functions.printThrowable(t2)); + } + } + } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/actions/QueueItemAction.java b/src/main/java/org/jenkinsci/plugins/workflow/actions/QueueItemAction.java index 30d18e75..105aa920 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/actions/QueueItemAction.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/actions/QueueItemAction.java @@ -4,8 +4,8 @@ import hudson.model.Queue; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; /** * Records information for a {@code node} block. @@ -57,8 +57,8 @@ public enum QueueState { * @param node A non-null {@link FlowNode} * @return The current queue state of the flownode. */ - @Nonnull - public static QueueState getNodeState(@Nonnull FlowNode node) { + @NonNull + public static QueueState getNodeState(@NonNull FlowNode node) { WorkspaceAction workspaceAction = node.getPersistentAction(WorkspaceAction.class); if (workspaceAction != null) { return QueueState.LAUNCHED; @@ -88,7 +88,7 @@ public static QueueState getNodeState(@Nonnull FlowNode node) { } @CheckForNull - public static Queue.Item getQueueItem(@Nonnull FlowNode node) { + public static Queue.Item getQueueItem(@NonNull FlowNode node) { QueueItemAction action = node.getPersistentAction(QueueItemAction.class); return action != null ? action.itemInQueue() : null; } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/actions/TagsAction.java b/src/main/java/org/jenkinsci/plugins/workflow/actions/TagsAction.java index ca3054e9..b1380e73 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/actions/TagsAction.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/actions/TagsAction.java @@ -26,8 +26,8 @@ import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; @@ -41,7 +41,7 @@ public class TagsAction implements PersistentAction { private static final String displayName = "Tags"; private static final String urlSuffix = "tags"; - private LinkedHashMap tags = new LinkedHashMap<>(); + private final LinkedHashMap tags = new LinkedHashMap<>(); /** * Add a tag key:value pair to this FlowNode, null or empty values are ignored @@ -86,7 +86,7 @@ public String getTagValue(@CheckForNull String tag) { * Get the tag-value mappings * @return Unmodifiable view of tag-value mappings */ - @Nonnull + @NonNull public Map getTags() { return Collections.unmodifiableMap(tags); } @@ -97,8 +97,8 @@ public Map getTags() { * Get the set of tag-value mappings for a node * @return Unmodifiable view of tag-value mappings */ - @Nonnull - public static Map getTags(@Nonnull FlowNode node) { + @NonNull + public static Map getTags(@NonNull FlowNode node) { TagsAction tagAction = node.getAction(TagsAction.class); return (tagAction == null) ? Collections.emptyMap() : tagAction.getTags(); } @@ -110,7 +110,7 @@ public static Map getTags(@Nonnull FlowNode node) { * @return Tag value or null if not set */ @CheckForNull - public static String getTagValue(@Nonnull FlowNode node, @CheckForNull String tag) { + public static String getTagValue(@NonNull FlowNode node, @CheckForNull String tag) { if (tag == null || tag.isEmpty()) { return null; } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/actions/ThreadNameAction.java b/src/main/java/org/jenkinsci/plugins/workflow/actions/ThreadNameAction.java index 4e9b703d..5b7747bf 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/actions/ThreadNameAction.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/actions/ThreadNameAction.java @@ -23,7 +23,7 @@ */ package org.jenkinsci.plugins.workflow.actions; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.NonNull; /** * Thread name action. @@ -32,6 +32,6 @@ */ public interface ThreadNameAction extends PersistentAction { - @Nonnull + @NonNull String getThreadName(); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/actions/WarningAction.java b/src/main/java/org/jenkinsci/plugins/workflow/actions/WarningAction.java index 182f0b71..7eeaff1a 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/actions/WarningAction.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/actions/WarningAction.java @@ -1,8 +1,8 @@ package org.jenkinsci.plugins.workflow.actions; import hudson.model.Result; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import org.jenkinsci.plugins.workflow.graph.FlowNode; /** @@ -13,10 +13,10 @@ * Visualizations should treat FlowNodes with this action as if the FlowNode's result was {@link #result}. */ public class WarningAction implements PersistentAction { - private @Nonnull Result result; + private @NonNull Result result; private @CheckForNull String message; - public WarningAction(@Nonnull Result result) { + public WarningAction(@NonNull Result result) { this.result = result; } @@ -29,7 +29,7 @@ public WarningAction withMessage(String message) { return message; } - public @Nonnull Result getResult() { + public @NonNull Result getResult() { return result; } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/actions/WorkspaceAction.java b/src/main/java/org/jenkinsci/plugins/workflow/actions/WorkspaceAction.java index e41ccbb0..86917457 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/actions/WorkspaceAction.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/actions/WorkspaceAction.java @@ -28,8 +28,8 @@ import hudson.model.Node; import hudson.model.labels.LabelAtom; import java.util.Set; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import jenkins.model.Jenkins; import org.jenkinsci.plugins.workflow.FilePathUtils; @@ -39,18 +39,18 @@ public abstract class WorkspaceAction implements PersistentAction { /** The {@link Node#getNodeName} of the workspace. */ - public abstract @Nonnull String getNode(); + public abstract @NonNull String getNode(); /** The {@link FilePath#getRemote} of the workspace. */ - public abstract @Nonnull String getPath(); + public abstract @NonNull String getPath(); /** * The {@link Node#getAssignedLabels} of the node owning the workspace. * {@link Node#getSelfLabel} should be exempted, so this set may be empty in the typical case. * (Could be reconstructed in most cases via {@link Jenkins#getNode} on {@link #getNode}, - * but not for a slave which has since been removed, common with clouds.) + * but not for an agent which has since been removed, common with clouds.) */ - public abstract @Nonnull Set getLabels(); + public abstract @NonNull Set getLabels(); /** Reconstructs the live workspace, if possible. */ public final @CheckForNull FilePath getWorkspace() { diff --git a/src/main/java/org/jenkinsci/plugins/workflow/flow/DirectExecutor.java b/src/main/java/org/jenkinsci/plugins/workflow/flow/DirectExecutor.java new file mode 100644 index 00000000..fdf1532a --- /dev/null +++ b/src/main/java/org/jenkinsci/plugins/workflow/flow/DirectExecutor.java @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2007 The Guava Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +package org.jenkinsci.plugins.workflow.flow; + +import com.google.common.annotations.GwtCompatible; +import org.kohsuke.accmod.Restricted; +import org.kohsuke.accmod.restrictions.NoExternalUse; +import java.util.concurrent.Executor; + +/** + * An {@link Executor} that runs each task in the thread that invokes {@link Executor#execute + * execute}. + */ +@GwtCompatible +@Restricted(NoExternalUse.class) +enum DirectExecutor implements Executor { + INSTANCE; + + @Override + public void execute(Runnable command) { + command.run(); + } + + @Override + public String toString() { + return "MoreExecutors.directExecutor()"; + } +} \ No newline at end of file diff --git a/src/main/java/org/jenkinsci/plugins/workflow/flow/DurabilityHintProvider.java b/src/main/java/org/jenkinsci/plugins/workflow/flow/DurabilityHintProvider.java index 4eb5a440..0494c919 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/flow/DurabilityHintProvider.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/flow/DurabilityHintProvider.java @@ -4,8 +4,8 @@ import hudson.ExtensionPoint; import hudson.model.Item; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; /** * Provides a way to indirectly register durability settings to apply to pipelines. @@ -16,9 +16,9 @@ public interface DurabilityHintProvider extends ExtensionPoint { int ordinal(); @CheckForNull - FlowDurabilityHint suggestFor(@Nonnull Item x); + FlowDurabilityHint suggestFor(@NonNull Item x); - static @Nonnull FlowDurabilityHint suggestedFor(@Nonnull Item x) { + static @NonNull FlowDurabilityHint suggestedFor(@NonNull Item x) { int ordinal = Integer.MAX_VALUE; FlowDurabilityHint hint = GlobalDefaultFlowDurabilityLevel.getDefaultDurabilityHint(); diff --git a/src/main/java/org/jenkinsci/plugins/workflow/flow/ErrorCondition.java b/src/main/java/org/jenkinsci/plugins/workflow/flow/ErrorCondition.java new file mode 100644 index 00000000..e24f3e1b --- /dev/null +++ b/src/main/java/org/jenkinsci/plugins/workflow/flow/ErrorCondition.java @@ -0,0 +1,64 @@ +/* + * The MIT License + * + * Copyright 2022 CloudBees, Inc. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ + +package org.jenkinsci.plugins.workflow.flow; + +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; +import hudson.ExtensionPoint; +import hudson.model.AbstractDescribableImpl; +import hudson.model.Descriptor; +import java.io.IOException; +import java.io.Serializable; +import org.jenkinsci.plugins.workflow.actions.ErrorAction; +import org.jenkinsci.plugins.workflow.steps.StepContext; +import org.kohsuke.accmod.Restricted; +import org.kohsuke.accmod.restrictions.Beta; + +/** + * User-configurable predicate for errors that may occur during a build. + * Implementations could check for agent-related outages, for example. + * Step callers could use a condition to decide whether to ignore or report an error, retry, etc. + */ +@Restricted(Beta.class) +public abstract class ErrorCondition extends AbstractDescribableImpl implements ExtensionPoint, Serializable { + + /** + * Checks whether a given error matches this condition. + * @param error some exception thrown during a build + * @param context the context in which the error is being handled, if available; + * {@link ErrorAction#findOrigin} could be used to determine the part of the build in which the error was thrown + * @return true if the error is recognized + * @throws IOException as per {@link StepContext#get} + * @throws InterruptedException as per {@link StepContext#get} + */ + public abstract boolean test(@NonNull Throwable error, @CheckForNull StepContext context) throws IOException, InterruptedException; + + @Override public ErrorConditionDescriptor getDescriptor() { + return (ErrorConditionDescriptor) super.getDescriptor(); + } + + public static abstract class ErrorConditionDescriptor extends Descriptor {} + +} diff --git a/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowCopier.java b/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowCopier.java index 63717759..762ed61c 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowCopier.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowCopier.java @@ -24,7 +24,6 @@ package org.jenkinsci.plugins.workflow.flow; -import com.google.common.collect.ImmutableList; import hudson.Extension; import hudson.ExtensionPoint; import hudson.model.Action; @@ -33,6 +32,8 @@ import hudson.model.Run; import hudson.model.TaskListener; import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; import jenkins.scm.api.SCMRevisionAction; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.NoExternalUse; @@ -65,7 +66,7 @@ public static abstract class ByRun extends FlowCopier { Queue.Executable originalExec = original.getExecutable(); Queue.Executable copyExec = copy.getExecutable(); if (originalExec instanceof Run && copyExec instanceof Run) { - copy((Run) originalExec, (Run) copyExec, copy.getListener()); + copy((Run) originalExec, (Run) copyExec, copy.getListener()); } } @@ -75,10 +76,10 @@ public static abstract class ByRun extends FlowCopier { @Extension public static class StandardActions extends FlowCopier.ByRun { // TODO cloned from ReplayAction; consider whether it is appropriate to share these (related but not identical case) - private static final Iterable> COPIED_ACTIONS = ImmutableList.of( + private static final Iterable> COPIED_ACTIONS = Collections.unmodifiableList(Arrays.asList( ParametersAction.class, SCMRevisionAction.class - ); + )); @Override public void copy(Run original, Run copy, TaskListener listener) throws IOException, InterruptedException { for (Class type : COPIED_ACTIONS) { diff --git a/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowDefinition.java b/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowDefinition.java index c9444e0e..4ece9202 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowDefinition.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowDefinition.java @@ -30,8 +30,11 @@ import hudson.model.Action; import hudson.model.TaskListener; import hudson.util.LogTaskListener; +import hudson.scm.SCM; import java.io.IOException; +import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; @@ -76,4 +79,17 @@ public FlowExecution create(FlowExecutionOwner handle, List ac return (FlowDefinitionDescriptor) super.getDescriptor(); } + /** + * Returns a list of all {@link SCM SCMs} that are part of the static configuration of the {@link FlowDefinition}. + * Subclasses of {@link FlowDefinition} may override this method to return statically configured SCMs + * that they may be aware of. For example, {@code CpsScmFlowDefinition} returns SCM used to retrieve + * Jenkinsfile. + * Does not include any SCMs used dynamically during Pipeline execution. + * May be empty (or not overridden) if the Pipeline does not include any statically configured SCMs. + * This method is used in {@code WorkflowJob} class which will combine lists of static and dynamic SCMs. + */ + public Collection getSCMs() { + return Collections.emptyList(); + } + } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowDurabilityHint.java b/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowDurabilityHint.java index 9abca387..c1c47f9e 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowDurabilityHint.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowDurabilityHint.java @@ -23,10 +23,10 @@ */ package org.jenkinsci.plugins.workflow.flow; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.NonNull; /** - * Provides hints about just how hard we should try to protect our workflow from failures of the master. + * Provides hints about just how hard we should try to protect our workflow from failures of the controller. * There is a trade-off between durability and performance, with higher levels carrying much higher overheads to execution. * Storage and persistence of data should try to provide at least the specified level (may offer more). * @@ -47,7 +47,7 @@ public enum FlowDurabilityHint { private final String tooltip; - FlowDurabilityHint (boolean useAtomicWrite, boolean persistWithEveryStep, @Nonnull String description, String tooltip) { + FlowDurabilityHint (boolean useAtomicWrite, boolean persistWithEveryStep, @NonNull String description, String tooltip) { this.atomicWrite = useAtomicWrite; this.persistWithEveryStep = persistWithEveryStep; this.description = description; diff --git a/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecution.java b/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecution.java index 0b105686..27273754 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecution.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecution.java @@ -43,8 +43,8 @@ import java.io.IOException; import java.util.List; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import jenkins.model.Jenkins; import jenkins.model.queue.AsynchronousExecution; import org.acegisecurity.Authentication; @@ -91,7 +91,7 @@ protected synchronized GraphLookupView getInternalGraphLookup() { * Get the durability level we're aiming for, or a default value if none is set (defaults may change as implementation evolve). * @return Durability level we are aiming for with this execution. */ - @Nonnull + @NonNull public FlowDurabilityHint getDurabilityHint() { // MAX_SURVIVABILITY is the behavior of builds before the change was introduced. return (durabilityHint != null) ? durabilityHint : FlowDurabilityHint.MAX_SURVIVABILITY; @@ -250,7 +250,7 @@ public boolean blocksRestart() { * A flow run triggered by a user manually might be associated with the runtime, or it might not. * @return an authentication; {@link ACL#SYSTEM} as a fallback, or {@link Jenkins#ANONYMOUS} if the flow is supposed to be limited to a specific user but that user cannot now be looked up */ - public abstract @Nonnull Authentication getAuthentication(); + public abstract @NonNull Authentication getAuthentication(); /** @see GraphLookupView#isActive(FlowNode) @@ -258,7 +258,7 @@ public boolean blocksRestart() { */ @Override @Restricted(NoExternalUse.class) // Only public because graph, flow, and graphanalysis are separate packages - public boolean isActive(@Nonnull FlowNode node) { + public boolean isActive(@NonNull FlowNode node) { if (!this.equals(node.getExecution())) { throw new IllegalArgumentException("Can't look up info for a FlowNode that doesn't belong to this execution!"); } @@ -271,7 +271,7 @@ public boolean isActive(@Nonnull FlowNode node) { @CheckForNull @Override @Restricted(NoExternalUse.class) // Only public because graph, flow, and graphanalysis are separate packages - public BlockEndNode getEndNode(@Nonnull BlockStartNode startNode) { + public BlockEndNode getEndNode(@NonNull BlockStartNode startNode) { if (!this.equals(startNode.getExecution())) { throw new IllegalArgumentException("Can't look up info for a FlowNode that doesn't belong to this execution!"); } @@ -284,7 +284,7 @@ public BlockEndNode getEndNode(@Nonnull BlockStartNode startNode) { @CheckForNull @Override @Restricted(NoExternalUse.class) // Only public because graph, flow, and graphanalysis are separate packages - public BlockStartNode findEnclosingBlockStart(@Nonnull FlowNode node) { + public BlockStartNode findEnclosingBlockStart(@NonNull FlowNode node) { if (!this.equals(node.getExecution())) { throw new IllegalArgumentException("Can't look up info for a FlowNode that doesn't belong to this execution!"); } @@ -294,10 +294,10 @@ public BlockStartNode findEnclosingBlockStart(@Nonnull FlowNode node) { /** @see GraphLookupView#findAllEnclosingBlockStarts(FlowNode) * @throws IllegalArgumentException If the input {@link FlowNode} does not belong to this execution */ - @Nonnull + @NonNull @Override @Restricted(NoExternalUse.class) // Only public because graph, flow, and graphanalysis are separate packages - public List findAllEnclosingBlockStarts(@Nonnull FlowNode node) { + public List findAllEnclosingBlockStarts(@NonNull FlowNode node) { if (!this.equals(node.getExecution())) { throw new IllegalArgumentException("Can't look up info for a FlowNode that doesn't belong to this execution!"); } @@ -307,10 +307,10 @@ public List findAllEnclosingBlockStarts(@Nonnull FlowNode node) /** @see GraphLookupView#iterateEnclosingBlocks(FlowNode) * @throws IllegalArgumentException If the input {@link FlowNode} does not belong to this execution */ - @Nonnull + @NonNull @Override @Restricted(NoExternalUse.class) - public Iterable iterateEnclosingBlocks(@Nonnull FlowNode node) { + public Iterable iterateEnclosingBlocks(@NonNull FlowNode node) { if (!this.equals(node.getExecution())) { throw new IllegalArgumentException("Can't look up info for a FlowNode that doesn't belong to this execution!"); } @@ -322,4 +322,12 @@ public Iterable iterateEnclosingBlocks(@Nonnull FlowNode node) { protected void notifyShutdown() { // Default is no-op } + + /** + * Called after a restart and any attempts at {@link StepExecution#onResume} have completed. + * This is a signal that it is safe to resume program execution. + * By default, does nothing. + */ + protected void afterStepExecutionsResumed() {} + } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionList.java b/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionList.java index e33661d1..220a2272 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionList.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionList.java @@ -5,11 +5,14 @@ import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; -import com.google.inject.Inject; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.Extension; import hudson.ExtensionList; import hudson.XmlFile; +import hudson.init.InitMilestone; import hudson.init.Terminator; +import hudson.model.Computer; import hudson.model.listeners.ItemListener; import hudson.remoting.SingleLaneExecutorService; import hudson.util.CopyOnWriteList; @@ -21,17 +24,23 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.CancellationException; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; +import org.jenkinsci.plugins.workflow.graph.FlowNode; +import org.jenkinsci.plugins.workflow.graphanalysis.LinearBlockHoppingScanner; -import static java.util.logging.Level.*; -import javax.annotation.CheckForNull; import org.kohsuke.accmod.Restricted; +import org.kohsuke.accmod.restrictions.Beta; import org.kohsuke.accmod.restrictions.DoNotUse; /** @@ -45,6 +54,8 @@ public class FlowExecutionList implements Iterable { private final SingleLaneExecutorService executor = new SingleLaneExecutorService(Timer.get()); private XmlFile configFile; + private transient volatile boolean resumptionComplete; + public FlowExecutionList() { load(); } @@ -69,7 +80,7 @@ protected FlowExecution computeNext() { return e; } } catch (Throwable e) { - LOGGER.log(WARNING, "Failed to load " + o + ". Unregistering", e); + LOGGER.log(Level.WARNING, "Failed to load " + o + ". Unregistering", e); unregister(o); } } @@ -97,9 +108,9 @@ private synchronized void load() { if (cf.exists()) { try { runningTasks.replaceBy((List) cf.read()); - LOGGER.log(FINE, "loaded: {0}", runningTasks); + LOGGER.log(Level.FINE, "loaded: {0}", runningTasks); } catch (Exception x) { - LOGGER.log(WARNING, "ignoring broken " + cf, x); + LOGGER.log(Level.WARNING, "ignoring broken " + cf, x); } } } @@ -111,7 +122,7 @@ private synchronized void load() { */ public synchronized void register(final FlowExecutionOwner self) { if (runningTasks.contains(self)) { - LOGGER.log(WARNING, "{0} was already in the list: {1}", new Object[] {self, runningTasks.getView()}); + LOGGER.log(Level.WARNING, "{0} was already in the list: {1}", new Object[] {self, runningTasks.getView()}); } else { runningTasks.add(self); saveLater(); @@ -120,37 +131,33 @@ public synchronized void register(final FlowExecutionOwner self) { public synchronized void unregister(final FlowExecutionOwner self) { if (runningTasks.remove(self)) { - LOGGER.log(FINE, "unregistered {0}", new Object[] {self}); + LOGGER.log(Level.FINE, "unregistered {0}", new Object[] {self}); saveLater(); } else { - LOGGER.log(WARNING, "{0} was not in the list to begin with: {1}", new Object[] {self, runningTasks.getView()}); + LOGGER.log(Level.WARNING, "{0} was not in the list to begin with: {1}", new Object[] {self, runningTasks.getView()}); } } private synchronized void saveLater() { final List copy = new ArrayList<>(runningTasks.getView()); - LOGGER.log(FINE, "scheduling save of {0}", copy); + LOGGER.log(Level.FINE, "scheduling save of {0}", copy); try { - executor.submit(new Runnable() { - @Override public void run() { - save(copy); - } - }); + executor.submit(() -> save(copy)); } catch (RejectedExecutionException x) { - LOGGER.log(FINE, "could not schedule save, perhaps because Jenkins is shutting down; saving immediately", x); + LOGGER.log(Level.FINE, "could not schedule save, perhaps because Jenkins is shutting down; saving immediately", x); save(copy); } } private void save(List copy) { XmlFile cf = configFile(); - LOGGER.log(FINE, "saving {0} to {1}", new Object[] {copy, cf}); + LOGGER.log(Level.FINE, "saving {0} to {1}", new Object[] {copy, cf}); if (cf == null) { return; // oh well } try { cf.write(copy); } catch (IOException x) { - LOGGER.log(WARNING, null, x); + LOGGER.log(Level.WARNING, null, x); } } @@ -164,38 +171,34 @@ public static FlowExecutionList get() { return l; } + /** + * Returns true if all executions that were present in this {@link FlowExecutionList} have been loaded. + * + *

This takes place slightly after {@link InitMilestone#COMPLETED} is reached during Jenkins startup. + * + *

Useful to avoid resuming Pipelines in contexts that may lead to deadlock. + * + *

It is not guaranteed that {@link FlowExecution#afterStepExecutionsResumed} has been called at this point. + */ + @Restricted(Beta.class) + public boolean isResumptionComplete() { + return resumptionComplete; + } + /** * When Jenkins starts up and everything is loaded, be sure to proactively resurrect * all the ongoing {@link FlowExecution}s so that they start running again. */ @Extension public static class ItemListenerImpl extends ItemListener { - @Inject - FlowExecutionList list; - @Override public void onLoaded() { + FlowExecutionList list = FlowExecutionList.get(); for (final FlowExecution e : list) { - LOGGER.log(FINE, "Eager loading {0}", e); - Futures.addCallback(e.getCurrentExecutions(false), new FutureCallback>() { - @Override - public void onSuccess(List result) { - LOGGER.log(FINE, "Will resume {0}", result); - for (StepExecution se : result) { - se.onResume(); - } - } - - @Override - public void onFailure(Throwable t) { - if (t instanceof CancellationException) { - LOGGER.log(Level.FINE, "Cancelled load of " + e, t); - } else { - LOGGER.log(WARNING, "Failed to load " + e, t); - } - } - }); + // The call to FlowExecutionOwner.get in the implementation of iterator() is sufficent to load the Pipeline. + LOGGER.log(Level.FINE, "Eagerly loaded {0}", e); } + list.resumptionComplete = true; } } @@ -204,19 +207,16 @@ public void onFailure(Throwable t) { */ @Extension public static class StepExecutionIteratorImpl extends StepExecutionIterator { - @Inject - FlowExecutionList list; - @Override public ListenableFuture apply(final Function f) { List> all = new ArrayList<>(); - for (FlowExecution e : list) { + for (FlowExecution e : FlowExecutionList.get()) { ListenableFuture> execs = e.getCurrentExecutions(false); all.add(execs); Futures.addCallback(execs,new FutureCallback>() { @Override - public void onSuccess(List result) { + public void onSuccess(@NonNull List result) { for (StepExecution e : result) { try { f.apply(e); @@ -227,10 +227,10 @@ public void onSuccess(List result) { } @Override - public void onFailure(Throwable t) { + public void onFailure(@NonNull Throwable t) { LOGGER.log(Level.WARNING, null, t); } - }); + }, MoreExecutors.directExecutor()); } return Futures.allAsList(all); @@ -254,4 +254,140 @@ public void onFailure(Throwable t) { executor.awaitTermination(1, TimeUnit.MINUTES); } + /** + * Whenever a Pipeline resumes, resume all incomplete steps in its {@link FlowExecution}. + * + *

Called by {@code WorkflowRun.onLoad}, so guaranteed to run if a Pipeline resumes + * regardless of its presence in {@link FlowExecutionList}. + */ + @Extension + public static class ResumeStepExecutionListener extends FlowExecutionListener { + @Override + public void onResumed(@NonNull FlowExecution e) { + Futures.addCallback(e.getCurrentExecutions(false), new FutureCallback>() { + @Override + public void onSuccess(@NonNull List result) { + if (e.isComplete()) { + // WorkflowRun.onLoad will not fireResumed if the execution was already complete when loaded, + // and CpsFlowExecution should not then complete until afterStepExecutionsResumed, so this is defensive. + return; + } + FlowExecutionList list = FlowExecutionList.get(); + FlowExecutionOwner owner = e.getOwner(); + if (!list.runningTasks.contains(owner)) { + LOGGER.log(Level.WARNING, "Resuming {0}, which is missing from FlowExecutionList ({1}), so registering it now.", new Object[] {owner, list.runningTasks.getView()}); + list.register(owner); + } + LOGGER.log(Level.FINE, "Will resume {0}", result); + new ParallelResumer(result, e::afterStepExecutionsResumed).run(); + } + + @Override + public void onFailure(@NonNull Throwable t) { + if (t instanceof CancellationException) { + LOGGER.log(Level.FINE, "Cancelled load of " + e, t); + } else { + LOGGER.log(Level.WARNING, "Failed to load " + e, t); + } + e.afterStepExecutionsResumed(); + } + + }, MoreExecutors.directExecutor()); + } + } + + /** Calls {@link StepExecution#onResume} for each step in a running build. + * Does so in parallel, but always completing enclosing blocks before the enclosed step. + * A simplified version of https://stackoverflow.com/a/67449067/12916, since this should be a tree not a general DAG. + */ + private static final class ParallelResumer { + + private final Runnable onCompletion; + /** Step nodes mapped to the step execution. Entries removed when they are ready to be resumed. */ + private final Map nodes = new HashMap<>(); + /** Step nodes currently being resumed. Removed after resumption completes. */ + private final Set processing = new HashSet<>(); + /** Step nodes mapped to the nearest enclosing step node (no entry if at root). */ + private final Map enclosing = new HashMap<>(); + + ParallelResumer(Collection executions, Runnable onCompletion) { + this.onCompletion = onCompletion; + // First look up positions in the flow graph, so that we can compute dependencies: + for (StepExecution se : executions) { + try { + FlowNode n = se.getContext().get(FlowNode.class); + if (n != null) { + nodes.put(n, se); + } else { + LOGGER.warning(() -> "Could not find FlowNode for " + se + " so it will not be resumed"); + } + } catch (IOException | InterruptedException x) { + LOGGER.log(Level.WARNING, "Could not look up FlowNode for " + se + " so it will not be resumed", x); + } + } + for (FlowNode n : nodes.keySet()) { + LinearBlockHoppingScanner scanner = new LinearBlockHoppingScanner(); + scanner.setup(n); + for (FlowNode parent : scanner) { + if (parent != n && nodes.containsKey(parent)) { + enclosing.put(n, parent); + break; + } + } + } + } + + synchronized void run() { + if (Jenkins.get().isTerminating()) { + LOGGER.fine("Skipping step resumption during shutdown"); + return; + } + if (Jenkins.get().getInitLevel() != InitMilestone.COMPLETED || Jenkins.get().isQuietingDown()) { + LOGGER.fine("Waiting to resume step until Jenkins completes startup and is not in quiet mode"); + Timer.get().schedule(this::run, 100, TimeUnit.MILLISECONDS); + return; + } + LOGGER.fine(() -> "Checking status with nodes=" + nodes + " enclosing=" + enclosing + " processing=" + processing); + if (nodes.isEmpty()) { + if (processing.isEmpty()) { + LOGGER.fine("Done"); + onCompletion.run(); + } + return; + } + Map ready = new HashMap<>(); + for (Map.Entry entry : nodes.entrySet()) { + FlowNode n = entry.getKey(); + FlowNode parent = enclosing.get(n); + if (parent == null || !nodes.containsKey(parent)) { + ready.put(n, entry.getValue()); + } + } + LOGGER.fine(() -> "Ready to resume: " + ready); + nodes.keySet().removeAll(ready.keySet()); + for (Map.Entry entry : ready.entrySet()) { + FlowNode n = entry.getKey(); + StepExecution exec = entry.getValue(); + processing.add(n); + // Strictly speaking threadPoolForRemoting should be used for agent communications. + // In practice the only onResume impl known to block is in ExecutorStepExecution. + // Avoid jenkins.util.Timer since it is capped at 10 threads and should not be used for long tasks. + Computer.threadPoolForRemoting.submit(() -> { + LOGGER.fine(() -> "About to resume " + n + " ~ " + exec); + try { + exec.onResume(); + } catch (Throwable x) { + exec.getContext().onFailure(x); + } + LOGGER.fine(() -> "Finished resuming " + n + " ~ " + exec); + synchronized (ParallelResumer.this) { + processing.remove(n); + run(); + } + }); + } + } + + } + } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionListener.java b/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionListener.java index 35afcae0..bfdc5035 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionListener.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionListener.java @@ -5,7 +5,7 @@ import org.jenkinsci.plugins.workflow.graph.FlowEndNode; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.NonNull; /** * Listens for significant status updates for a {@link FlowExecution}, such as started running or completed. @@ -24,7 +24,7 @@ public abstract class FlowExecutionListener implements ExtensionPoint { * * @param execution The {@link FlowExecution} that has been created. */ - public void onCreated(@Nonnull FlowExecution execution) { + public void onCreated(@NonNull FlowExecution execution) { } /** @@ -34,7 +34,7 @@ public void onCreated(@Nonnull FlowExecution execution) { * * @param execution The {@link FlowExecution} that has started running. */ - public void onRunning(@Nonnull FlowExecution execution) { + public void onRunning(@NonNull FlowExecution execution) { } /** @@ -42,7 +42,7 @@ public void onRunning(@Nonnull FlowExecution execution) { * * @param execution The {@link FlowExecution} that has resumed. */ - public void onResumed(@Nonnull FlowExecution execution) { + public void onResumed(@NonNull FlowExecution execution) { } /** @@ -55,13 +55,14 @@ public void onResumed(@Nonnull FlowExecution execution) { * * @param execution The {@link FlowExecution} that has completed. */ - public void onCompleted(@Nonnull FlowExecution execution) { + public void onCompleted(@NonNull FlowExecution execution) { } /** * Fires the {@link #onCreated(FlowExecution)} event. */ - public static void fireCreated(@Nonnull FlowExecution execution) { + public static void fireCreated(@NonNull FlowExecution execution) { + // TODO Jenkins 2.325+ use Listeners.notify for (FlowExecutionListener listener : ExtensionList.lookup(FlowExecutionListener.class)) { listener.onCreated(execution); } @@ -70,7 +71,7 @@ public static void fireCreated(@Nonnull FlowExecution execution) { /** * Fires the {@link #onRunning(FlowExecution)} event. */ - public static void fireRunning(@Nonnull FlowExecution execution) { + public static void fireRunning(@NonNull FlowExecution execution) { for (FlowExecutionListener listener : ExtensionList.lookup(FlowExecutionListener.class)) { listener.onRunning(execution); } @@ -79,7 +80,7 @@ public static void fireRunning(@Nonnull FlowExecution execution) { /** * Fires the {@link #onResumed(FlowExecution)} event. */ - public static void fireResumed(@Nonnull FlowExecution execution) { + public static void fireResumed(@NonNull FlowExecution execution) { for (FlowExecutionListener listener : ExtensionList.lookup(FlowExecutionListener.class)) { listener.onResumed(execution); } @@ -88,7 +89,7 @@ public static void fireResumed(@Nonnull FlowExecution execution) { /** * Fires the {@link #onCompleted(FlowExecution)} event. */ - public static void fireCompleted(@Nonnull FlowExecution execution) { + public static void fireCompleted(@NonNull FlowExecution execution) { for (FlowExecutionListener listener : ExtensionList.lookup(FlowExecutionListener.class)) { listener.onCompleted(execution); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionOwner.java b/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionOwner.java index 2a09113e..f8a0e322 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionOwner.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionOwner.java @@ -32,8 +32,8 @@ import java.io.Serializable; import java.util.logging.Level; import java.util.logging.Logger; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import jenkins.model.TransientActionFactory; import org.jenkinsci.plugins.workflow.log.LogStorage; import org.jenkinsci.plugins.workflow.steps.StepContext; @@ -50,7 +50,7 @@ public abstract class FlowExecutionOwner implements Serializable { * @throws IOException * if fails to find {@link FlowExecution}. */ - @Nonnull + @NonNull public abstract FlowExecution get() throws IOException; /** Invoked in {@link FlowExecutionList#saveAll()} to notify that execution has been suspended */ @@ -75,7 +75,7 @@ void notifyShutdown() { } /** - * A directory (on the master) where information may be persisted. + * A directory (on the controller) where information may be persisted. * @see Run#getRootDir */ public abstract File getRootDir() throws IOException; @@ -125,7 +125,7 @@ public String getUrlOfExecution() throws IOException { *

The listener should be remotable: if sent to an agent, messages printed to it should still appear in the log. * The same will then apply to calls to {@link StepContext#get} on {@link TaskListener}. */ - public @Nonnull TaskListener getListener() throws IOException { + public @NonNull TaskListener getListener() throws IOException { try { return LogStorage.of(this).overallListener(); } catch (InterruptedException x) { @@ -156,6 +156,7 @@ public static FlowExecutionOwner dummyOwner() { private static class DummyOwner extends FlowExecutionOwner { DummyOwner() {} + @NonNull @Override public FlowExecution get() throws IOException { throw new IOException("not implemented"); } @@ -174,7 +175,8 @@ private static class DummyOwner extends FlowExecutionOwner { @Override public int hashCode() { return 0; } - @Override public TaskListener getListener() throws IOException { + @NonNull + @Override public TaskListener getListener() { return TaskListener.NULL; } } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/flow/GlobalDefaultFlowDurabilityLevel.java b/src/main/java/org/jenkinsci/plugins/workflow/flow/GlobalDefaultFlowDurabilityLevel.java index 3c00074a..d07c751f 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/flow/GlobalDefaultFlowDurabilityLevel.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/flow/GlobalDefaultFlowDurabilityLevel.java @@ -4,14 +4,18 @@ import hudson.model.AbstractDescribableImpl; import hudson.model.Descriptor; import hudson.security.Permission; -import hudson.util.ReflectionUtils; +import hudson.util.FormValidation; +import hudson.util.ListBoxModel; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; import jenkins.model.Jenkins; import net.sf.json.JSONObject; +import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.StaplerRequest; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; -import java.lang.reflect.InvocationTargetException; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; /** * Supports a global default durability level for users @@ -32,6 +36,7 @@ public DescriptorImpl() { /** Null to use the platform default, which may change over time as enhanced options are available. */ private FlowDurabilityHint durabilityHint = null; + @NonNull @Override public String getDisplayName() { return "Global Default Pipeline Durability Level"; @@ -47,8 +52,17 @@ public void setDurabilityHint(FlowDurabilityHint hint){ save(); } + public FormValidation doCheckDurabilityHint(@QueryParameter("durabilityHint") String durabilityHint) { + FlowDurabilityHint flowDurabilityHint = Arrays.stream(FlowDurabilityHint.values()) + .filter(f -> f.name().equals(durabilityHint)) + .findFirst() + .orElse(GlobalDefaultFlowDurabilityLevel.SUGGESTED_DURABILITY_HINT); + + return FormValidation.ok(flowDurabilityHint.getTooltip()); + } + @Override - public boolean configure(StaplerRequest req, JSONObject json) throws FormException { + public boolean configure(StaplerRequest req, JSONObject json) { // TODO verify if this is covered by permissions checks or we need an explicit check here. Object ob = json.opt("durabilityHint"); FlowDurabilityHint hint = null; @@ -65,29 +79,24 @@ public boolean configure(StaplerRequest req, JSONObject json) throws FormExcepti return true; } - public static FlowDurabilityHint getSuggestedDurabilityHint() { - return GlobalDefaultFlowDurabilityLevel.SUGGESTED_DURABILITY_HINT; - } + public ListBoxModel doFillDurabilityHintItems() { + ListBoxModel options = new ListBoxModel(); - public static FlowDurabilityHint[] getDurabilityHintValues() { - return FlowDurabilityHint.values(); - } + options.add("None: use pipeline default (" + GlobalDefaultFlowDurabilityLevel.SUGGESTED_DURABILITY_HINT.name() + ")", "null"); - @Nonnull - // TODO: Add @Override when Jenkins core baseline is 2.222+ - public Permission getRequiredGlobalConfigPagePermission() { - return getJenkinsManageOrAdmin(); + List mappedOptions = Arrays.stream(FlowDurabilityHint.values()) + .map(hint -> new ListBoxModel.Option(hint.getDescription(), hint.name())) + .collect(Collectors.toList()); + + options.addAll(mappedOptions); + + return options; } - // TODO: remove when Jenkins core baseline is 2.222+ - Permission getJenkinsManageOrAdmin() { - Permission manage; - try { // Manage is available starting from Jenkins 2.222 (https://jenkins.io/changelog/#v2.222). See JEP-223 for more info - manage = (Permission) ReflectionUtils.getPublicProperty(Jenkins.get(), "MANAGE"); - } catch (IllegalArgumentException | InvocationTargetException | NoSuchMethodException | IllegalAccessException e) { - manage = Jenkins.ADMINISTER; - } - return manage; + @NonNull + @Override + public Permission getRequiredGlobalConfigPagePermission() { + return Jenkins.MANAGE; } } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/flow/MoreExecutors.java b/src/main/java/org/jenkinsci/plugins/workflow/flow/MoreExecutors.java new file mode 100644 index 00000000..1d95f6f0 --- /dev/null +++ b/src/main/java/org/jenkinsci/plugins/workflow/flow/MoreExecutors.java @@ -0,0 +1,86 @@ +/* + * Copyright (C) 2007 The Guava Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +package org.jenkinsci.plugins.workflow.flow; + +import com.google.common.annotations.GwtCompatible; +import org.kohsuke.accmod.Restricted; +import org.kohsuke.accmod.restrictions.NoExternalUse; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; + +/** + * Factory and utility methods for {@link java.util.concurrent.Executor}, {@link ExecutorService}, + * and {@link java.util.concurrent.ThreadFactory}. + * + * @author Eric Fellheimer + * @author Kyle Littlefield + * @author Justin Mahoney + * @since 3.0 + */ +@GwtCompatible(emulated = true) +@Restricted(NoExternalUse.class) +public final class MoreExecutors { + private MoreExecutors() {} + + /** + * Returns an {@link Executor} that runs each task in the thread that invokes {@link + * Executor#execute execute}, as in {@code ThreadPoolExecutor.CallerRunsPolicy}. + * + *

This executor is appropriate for tasks that are lightweight and not deeply chained. + * Inappropriate {@code directExecutor} usage can cause problems, and these problems can be + * difficult to reproduce because they depend on timing. For example: + * + *

    + *
  • A call like {@code future.transform(function, directExecutor())} may execute the function + * immediately in the thread that is calling {@code transform}. (This specific case happens + * if the future is already completed.) If {@code transform} call was made from a UI thread + * or other latency-sensitive thread, a heavyweight function can harm responsiveness. + *
  • If the task will be executed later, consider which thread will trigger the execution -- + * since that thread will execute the task inline. If the thread is a shared system thread + * like an RPC network thread, a heavyweight task can stall progress of the whole system or + * even deadlock it. + *
  • If many tasks will be triggered by the same event, one heavyweight task may delay other + * tasks -- even tasks that are not themselves {@code directExecutor} tasks. + *
  • If many such tasks are chained together (such as with {@code + * future.transform(...).transform(...).transform(...)....}), they may overflow the stack. + * (In simple cases, callers can avoid this by registering all tasks with the same {@code + * MoreExecutors#newSequentialExecutor} wrapper around {@code directExecutor()}. More + * complex cases may require using thread pools or making deeper changes.) + *
+ * + * Additionally, beware of executing tasks with {@code directExecutor} while holding a lock. Since + * the task you submit to the executor (or any other arbitrary work the executor does) may do slow + * work or acquire other locks, you risk deadlocks. + * + *

This instance is equivalent to: + * + *

{@code
+   * final class DirectExecutor implements Executor {
+   *   public void execute(Runnable r) {
+   *     r.run();
+   *   }
+   * }
+   * }
+ * + *

This should be preferred to {@code #newDirectExecutorService()} because implementing the + * {@link ExecutorService} subinterface necessitates significant performance overhead. + * + * + * @since 18.0 + */ + public static Executor directExecutor() { + return DirectExecutor.INSTANCE; + } +} \ No newline at end of file diff --git a/src/main/java/org/jenkinsci/plugins/workflow/flow/StashManager.java b/src/main/java/org/jenkinsci/plugins/workflow/flow/StashManager.java index bdbc0d01..996175df 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/flow/StashManager.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/flow/StashManager.java @@ -50,8 +50,9 @@ import java.util.HashMap; import java.util.Map; import java.util.TreeMap; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; +import java.nio.file.Files; import jenkins.model.ArtifactManager; import jenkins.model.Jenkins; import jenkins.util.BuildListenerAdapter; @@ -77,19 +78,19 @@ public class StashManager { @Deprecated - public static void stash(@Nonnull Run build, @Nonnull String name, @Nonnull FilePath workspace, @Nonnull TaskListener listener, + public static void stash(@NonNull Run build, @NonNull String name, @NonNull FilePath workspace, @NonNull TaskListener listener, @CheckForNull String includes, @CheckForNull String excludes) throws IOException, InterruptedException { stash(build, name, workspace, listener, includes, excludes, true, false); } @Deprecated - public static void stash(@Nonnull Run build, @Nonnull String name, @Nonnull FilePath workspace, @Nonnull TaskListener listener, + public static void stash(@NonNull Run build, @NonNull String name, @NonNull FilePath workspace, @NonNull TaskListener listener, @CheckForNull String includes, @CheckForNull String excludes, boolean useDefaultExcludes) throws IOException, InterruptedException { stash(build, name, workspace, listener, includes, excludes, useDefaultExcludes, false); } @Deprecated - public static void stash(@Nonnull Run build, @Nonnull String name, @Nonnull FilePath workspace, @Nonnull TaskListener listener, + public static void stash(@NonNull Run build, @NonNull String name, @NonNull FilePath workspace, @NonNull TaskListener listener, @CheckForNull String includes, @CheckForNull String excludes, boolean useDefaultExcludes, boolean allowEmpty) throws IOException, InterruptedException { stash(build, name, workspace, launcherFor(workspace, listener), envFor(build, workspace, listener), listener, includes, excludes, useDefaultExcludes, allowEmpty); } @@ -109,7 +110,7 @@ public static void stash(@Nonnull Run build, @Nonnull String name, @Nonnull * @see StashAwareArtifactManager#stash */ @SuppressFBWarnings(value="RV_RETURN_VALUE_IGNORED_BAD_PRACTICE", justification="fine if mkdirs returns false") - public static void stash(@Nonnull Run build, @Nonnull String name, @Nonnull FilePath workspace, @Nonnull Launcher launcher, @Nonnull EnvVars env, @Nonnull TaskListener listener, + public static void stash(@NonNull Run build, @NonNull String name, @NonNull FilePath workspace, @NonNull Launcher launcher, @NonNull EnvVars env, @NonNull TaskListener listener, @CheckForNull String includes, @CheckForNull String excludes, boolean useDefaultExcludes, boolean allowEmpty) throws IOException, InterruptedException { Jenkins.checkGoodName(name); StashAwareArtifactManager saam = stashAwareArtifactManager(build); @@ -132,7 +133,7 @@ public static void stash(@Nonnull Run build, @Nonnull String name, @Nonnull } @Deprecated - public static void unstash(@Nonnull Run build, @Nonnull String name, @Nonnull FilePath workspace, @Nonnull TaskListener listener) throws IOException, InterruptedException { + public static void unstash(@NonNull Run build, @NonNull String name, @NonNull FilePath workspace, @NonNull TaskListener listener) throws IOException, InterruptedException { unstash(build, name, workspace, launcherFor(workspace, listener), envFor(build, workspace, listener), listener); } @@ -147,7 +148,7 @@ public static void unstash(@Nonnull Run build, @Nonnull String name, @Nonnu * @throws AbortException in case there is no such saved stash * @see StashAwareArtifactManager#unstash */ - public static void unstash(@Nonnull Run build, @Nonnull String name, @Nonnull FilePath workspace, @Nonnull Launcher launcher, @Nonnull EnvVars env, @Nonnull TaskListener listener) throws IOException, InterruptedException { + public static void unstash(@NonNull Run build, @NonNull String name, @NonNull FilePath workspace, @NonNull Launcher launcher, @NonNull EnvVars env, @NonNull TaskListener listener) throws IOException, InterruptedException { Jenkins.checkGoodName(name); StashAwareArtifactManager saam = stashAwareArtifactManager(build); if (saam != null) { @@ -162,7 +163,7 @@ public static void unstash(@Nonnull Run build, @Nonnull String name, @Nonnu } @Deprecated - public static void clearAll(@Nonnull Run build) throws IOException { + public static void clearAll(@NonNull Run build) throws IOException { try { clearAll(build, TaskListener.NULL); } catch (InterruptedException x) { @@ -176,7 +177,7 @@ public static void clearAll(@Nonnull Run build) throws IOException { * @param listener a way to report progress or problems * @see StashAwareArtifactManager#clearAllStashes */ - public static void clearAll(@Nonnull Run build, @Nonnull TaskListener listener) throws IOException, InterruptedException { + public static void clearAll(@NonNull Run build, @NonNull TaskListener listener) throws IOException, InterruptedException { StashAwareArtifactManager saam = stashAwareArtifactManager(build); if (saam != null) { saam.clearAllStashes(listener); @@ -186,7 +187,7 @@ public static void clearAll(@Nonnull Run build, @Nonnull TaskListener liste } @Deprecated - public static void maybeClearAll(@Nonnull Run build) throws IOException { + public static void maybeClearAll(@NonNull Run build) throws IOException { try { maybeClearAll(build, TaskListener.NULL); } catch (InterruptedException x) { @@ -200,7 +201,7 @@ public static void maybeClearAll(@Nonnull Run build) throws IOException { * @param build a build possibly passed to {@link #stash} in the past * @see #clearAll(Run, TaskListener) */ - public static void maybeClearAll(@Nonnull Run build, @Nonnull TaskListener listener) throws IOException, InterruptedException { + public static void maybeClearAll(@NonNull Run build, @NonNull TaskListener listener) throws IOException, InterruptedException { for (StashBehavior behavior : ExtensionList.lookup(StashBehavior.class)) { if (!behavior.shouldClearAll(build)) { return; @@ -213,7 +214,7 @@ public static void maybeClearAll(@Nonnull Run build, @Nonnull TaskListener * @deprecated without replacement; only used from {@link CopyStashesAndArtifacts} anyway */ @Deprecated - public static void copyAll(@Nonnull Run from, @Nonnull Run to) throws IOException { + public static void copyAll(@NonNull Run from, @NonNull Run to) throws IOException { File fromStorage = storage(from); if (!fromStorage.isDirectory()) { return; @@ -223,7 +224,7 @@ public static void copyAll(@Nonnull Run from, @Nonnull Run to) throws @Restricted(DoNotUse.class) // just for tests, and incompatible with StashAwareArtifactManager @SuppressFBWarnings(value="DM_DEFAULT_ENCODING", justification="test code") - public static Map> stashesOf(@Nonnull Run build) throws IOException { + public static Map> stashesOf(@NonNull Run build) throws IOException { Map> result = new TreeMap<>(); File[] kids = storage(build).listFiles(); if (kids != null) { @@ -248,12 +249,12 @@ public static Map> stashesOf(@Nonnull Run build) return result; } - private static @Nonnull File storage(@Nonnull Run build) throws IOException { + private static @NonNull File storage(@NonNull Run build) throws IOException { assert stashAwareArtifactManager(build) == null; return new File(build.getRootDir(), "stashes"); } - private static @Nonnull File storage(@Nonnull Run build, @Nonnull String name) throws IOException { + private static @NonNull File storage(@NonNull Run build, @NonNull String name) throws IOException { File dir = storage(build); File f = new File(dir, name + SUFFIX); if (!f.getParentFile().equals(dir)) { @@ -276,7 +277,7 @@ public static abstract class StashBehavior implements ExtensionPoint { * @param build a build which has finished * @return true (the default) to go ahead and call {@link #clearAll}, false to stop */ - public boolean shouldClearAll(@Nonnull Run build) { + public boolean shouldClearAll(@NonNull Run build) { return true; } @@ -298,13 +299,13 @@ public boolean shouldClearAll(@Nonnull Run build) { public interface StashAwareArtifactManager /* extends ArtifactManager */ { /** @see StashManager#stash(Run, String, FilePath, Launcher, EnvVars, TaskListener, String, String, boolean, boolean) */ - void stash(@Nonnull String name, @Nonnull FilePath workspace, @Nonnull Launcher launcher, @Nonnull EnvVars env, @Nonnull TaskListener listener, @CheckForNull String includes, @CheckForNull String excludes, boolean useDefaultExcludes, boolean allowEmpty) throws IOException, InterruptedException; + void stash(@NonNull String name, @NonNull FilePath workspace, @NonNull Launcher launcher, @NonNull EnvVars env, @NonNull TaskListener listener, @CheckForNull String includes, @CheckForNull String excludes, boolean useDefaultExcludes, boolean allowEmpty) throws IOException, InterruptedException; /** @see StashManager#unstash(Run, String, FilePath, Launcher, EnvVars, TaskListener) */ - void unstash(@Nonnull String name, @Nonnull FilePath workspace, @Nonnull Launcher launcher, @Nonnull EnvVars env, @Nonnull TaskListener listener) throws IOException, InterruptedException; + void unstash(@NonNull String name, @NonNull FilePath workspace, @NonNull Launcher launcher, @NonNull EnvVars env, @NonNull TaskListener listener) throws IOException, InterruptedException; /** @see StashManager#clearAll(Run, TaskListener) */ - void clearAllStashes(@Nonnull TaskListener listener) throws IOException, InterruptedException; + void clearAllStashes(@NonNull TaskListener listener) throws IOException, InterruptedException; /** * Copy all stashes and artifacts from one build to another. @@ -312,17 +313,17 @@ public interface StashAwareArtifactManager /* extends ArtifactManager */ { * If the implementation cannot handle {@code to} for whatever reason, it may throw {@link AbortException}. * @see CopyStashesAndArtifacts */ - void copyAllArtifactsAndStashes(@Nonnull Run to, @Nonnull TaskListener listener) throws IOException, InterruptedException; + void copyAllArtifactsAndStashes(@NonNull Run to, @NonNull TaskListener listener) throws IOException, InterruptedException; } - private static @CheckForNull StashAwareArtifactManager stashAwareArtifactManager(@Nonnull Run build) throws IOException { + private static @CheckForNull StashAwareArtifactManager stashAwareArtifactManager(@NonNull Run build) throws IOException { ArtifactManager am = build.pickArtifactManager(); return am instanceof StashAwareArtifactManager ? (StashAwareArtifactManager) am : null; } @Deprecated - private static @Nonnull Launcher launcherFor(@Nonnull FilePath workspace, @Nonnull TaskListener listener) { + private static @NonNull Launcher launcherFor(@NonNull FilePath workspace, @NonNull TaskListener listener) { Computer c = workspace.toComputer(); if (c != null) { Node n = c.getNode(); @@ -339,7 +340,7 @@ public interface StashAwareArtifactManager /* extends ArtifactManager */ { } @Deprecated - private static @Nonnull EnvVars envFor(@Nonnull Run build, @Nonnull FilePath workspace, @Nonnull TaskListener listener) throws IOException, InterruptedException { + private static @NonNull EnvVars envFor(@NonNull Run build, @NonNull FilePath workspace, @NonNull TaskListener listener) throws IOException, InterruptedException { Computer c = workspace.toComputer(); if (c != null) { EnvVars e = c.getEnvironment(); @@ -362,16 +363,13 @@ public interface StashAwareArtifactManager /* extends ArtifactManager */ { return; } VirtualFile srcroot = original.getArtifactManager().root(); - FilePath dstDir = createTmpDir(); + FilePath dstDir = new FilePath(Files.createTempDirectory("artifact-copy").toFile()); try { Map files = new HashMap<>(); for (String path : srcroot.list("**/*", null, false)) { files.put(path, path); - InputStream in = srcroot.child(path).open(); - try { + try(InputStream in = srcroot.child(path).open()) { dstDir.child(path).copyFrom(in); - } finally { - IOUtils.closeQuietly(in); } } if (!files.isEmpty()) { @@ -385,14 +383,6 @@ public interface StashAwareArtifactManager /* extends ArtifactManager */ { StashManager.copyAll(original, copy); } - private FilePath createTmpDir() throws IOException { - File dir = File.createTempFile("artifact", "copy"); - if (!(dir.delete() && dir.mkdirs())) { - throw new IOException("Failed to create temporary directory " + dir.getPath()); - } - return new FilePath(dir); - } - } } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/flow/StepListener.java b/src/main/java/org/jenkinsci/plugins/workflow/flow/StepListener.java index ac6bb7fb..e514aa9d 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/flow/StepListener.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/flow/StepListener.java @@ -29,7 +29,7 @@ import org.jenkinsci.plugins.workflow.steps.Step; import org.jenkinsci.plugins.workflow.steps.StepContext; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.NonNull; /** * {@link StepListener}s are fired before invoking a step but after the {@link FlowNode} has been created and the @@ -41,5 +41,5 @@ public interface StepListener extends ExtensionPoint { * Called before a {@link Step} is invoked, but after its {@link FlowNode} and {@link StepContext} have been created. * Listeners can make the step fail by calling {@link StepContext#onFailure}. */ - void notifyOfNewStep(@Nonnull Step step, @Nonnull StepContext context); + void notifyOfNewStep(@NonNull Step step, @NonNull StepContext context); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graph/BlockEndNode.java b/src/main/java/org/jenkinsci/plugins/workflow/graph/BlockEndNode.java index 2c0d2494..bcfe4bea 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graph/BlockEndNode.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graph/BlockEndNode.java @@ -28,7 +28,7 @@ import org.jenkinsci.plugins.workflow.flow.FlowExecution; import java.util.List; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.NonNull; /** * End of a block. @@ -55,7 +55,7 @@ public BlockEndNode(FlowExecution exec, String id, START start, List p * @return an earlier node matching this block * @throws IllegalStateException if the start node could not be reloaded after deserialization */ - public @Nonnull START getStartNode() { + public @NonNull START getStartNode() { if (start == null) { try { start = (START) getExecution().getNode(startId); diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graph/BlockStartNode.java b/src/main/java/org/jenkinsci/plugins/workflow/graph/BlockStartNode.java index 69553974..b40135ae 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graph/BlockStartNode.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graph/BlockStartNode.java @@ -26,7 +26,7 @@ import org.jenkinsci.plugins.workflow.flow.FlowExecution; -import javax.annotation.CheckForNull; +import edu.umd.cs.findbugs.annotations.CheckForNull; import java.util.List; /** diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graph/FlowNode.java b/src/main/java/org/jenkinsci/plugins/workflow/graph/FlowNode.java index 271ef53f..c5655a67 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graph/FlowNode.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graph/FlowNode.java @@ -43,8 +43,8 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.logging.Level; import java.util.logging.Logger; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import org.jenkinsci.plugins.workflow.actions.ErrorAction; import org.jenkinsci.plugins.workflow.actions.LabelAction; @@ -143,14 +143,14 @@ public final boolean isActive() { return getPersistentAction(ErrorAction.class); } - public @Nonnull FlowExecution getExecution() { + public @NonNull FlowExecution getExecution() { return exec; } /** * Returns a read-only view of parents. */ - @Nonnull + @NonNull public List getParents() { if (parents == null) { parents = loadParents(parentIds); @@ -158,7 +158,7 @@ public List getParents() { return parents; } - @Nonnull + @NonNull private List loadParents(List parentIds) { try { if (parentIds.size() == 1) { @@ -190,14 +190,14 @@ public String getEnclosingId() { * Get the list of enclosing {@link BlockStartNode}s, starting from innermost, for this node. * May be empty if we are the {@link FlowStartNode} or {@link FlowEndNode} */ - @Nonnull + @NonNull public List getEnclosingBlocks() { return this.exec.findAllEnclosingBlockStarts(this); } /** Return an iterator over all enclosing blocks, from the nearest-enclosing outward ("inside-out" order). * Prefer this to {@link #getEnclosingBlocks()} unless you need ALL nodes, because it can evaluate lazily. */ - @Nonnull + @NonNull public Iterable iterateEnclosingBlocks() { return this.exec.iterateEnclosingBlocks(this); } @@ -205,7 +205,7 @@ public Iterable iterateEnclosingBlocks() { /** * Returns a read-only view of the IDs for enclosing blocks of this flow node, innermost first. May be empty. */ - @Nonnull + @NonNull public List getAllEnclosingIds() { List nodes = getEnclosingBlocks(); ArrayList output = new ArrayList<>(nodes.size()); @@ -217,7 +217,7 @@ public List getAllEnclosingIds() { @Restricted(DoNotUse.class) @Exported(name="parents") - @Nonnull + @NonNull public List getParentIds() { if (parentIds != null) { return Collections.unmodifiableList(parentIds); @@ -297,7 +297,7 @@ public BallColor getIconColor() { return c; } - private static BallColor resultToBallColor(@Nonnull Result result) { + private static BallColor resultToBallColor(@NonNull Result result) { if (result == Result.SUCCESS) { return BallColor.BLUE; } else if (result == Result.UNSTABLE) { @@ -370,7 +370,7 @@ protected synchronized void setActions(List actions) { * @return First nontransient action or null if not found. */ @CheckForNull - public final T getPersistentAction(@Nonnull Class type) { + public final T getPersistentAction(@NonNull Class type) { loadActions(); for (Action a : actions) { if (type.isInstance(a)) { @@ -416,6 +416,7 @@ private synchronized void loadActions() { } @Exported + @NonNull @SuppressWarnings("deprecation") // of override @Override @SuppressFBWarnings(value = "UG_SYNC_SET_UNSYNC_GET", justification = "CopyOnWrite ArrayList, and field load & modification is synchronized") @@ -466,7 +467,7 @@ public int size() { } @Override - public boolean removeAll(Collection c) { + public boolean removeAll(@NonNull Collection c) { boolean changed = actions.removeAll(c); if (changed) { persistSafe(); diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graph/FlowStartNode.java b/src/main/java/org/jenkinsci/plugins/workflow/graph/FlowStartNode.java index 96d3f916..f93c1249 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graph/FlowStartNode.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graph/FlowStartNode.java @@ -24,6 +24,7 @@ package org.jenkinsci.plugins.workflow.graph; +import edu.umd.cs.findbugs.annotations.NonNull; import org.jenkinsci.plugins.workflow.flow.FlowExecution; import java.util.List; @@ -44,6 +45,7 @@ public FlowStartNode(FlowExecution exec, String id) { * @deprecated * Why are you calling a method that always return empty list? */ + @NonNull @Override public List getParents() { return super.getParents(); diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graph/GraphLookupView.java b/src/main/java/org/jenkinsci/plugins/workflow/graph/GraphLookupView.java index 9573a94c..2b77ff7e 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graph/GraphLookupView.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graph/GraphLookupView.java @@ -1,7 +1,7 @@ package org.jenkinsci.plugins.workflow.graph; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; @@ -21,13 +21,13 @@ */ public interface GraphLookupView { /** Tests if the node is a currently running head, or the start of a block that has not completed executing */ - boolean isActive(@Nonnull FlowNode node); + boolean isActive(@NonNull FlowNode node); /** Find the end node corresponding to a start node, and can be used to tell if the block is completed. * @return {@link BlockEndNode} matching the given start node, or null if block hasn't completed */ @CheckForNull - BlockEndNode getEndNode(@Nonnull BlockStartNode startNode); + BlockEndNode getEndNode(@NonNull BlockStartNode startNode); /** * Find the immediately enclosing {@link BlockStartNode} around a {@link FlowNode} @@ -35,7 +35,7 @@ public interface GraphLookupView { * @return Null if node is a {@link FlowStartNode} or {@link FlowEndNode} */ @CheckForNull - BlockStartNode findEnclosingBlockStart(@Nonnull FlowNode node); + BlockStartNode findEnclosingBlockStart(@NonNull FlowNode node); /** * Provide an {@link Iterable} over all enclosing blocks, which can be used similarly to {@link #findAllEnclosingBlockStarts(FlowNode)} but @@ -45,15 +45,15 @@ public interface GraphLookupView { * @param node Node to find enclosing blocks for * @return Iterable over enclosing blocks, from the nearest-enclosing outward ("inside-out" order) */ - Iterable iterateEnclosingBlocks(@Nonnull FlowNode node); + Iterable iterateEnclosingBlocks(@NonNull FlowNode node); /** Return all enclosing block start nodes, as with {@link #findEnclosingBlockStart(FlowNode)}. *

Usage note:Prefer using {@link #iterateEnclosingBlocks(FlowNode)} unless you know you need ALL blocks, since that can lazy-load. * @param node Node to find enclosing blocks for * @return All enclosing block starts from the nearest-enclosing outward ("inside-out" order), or EMPTY_LIST if this is a start or end node */ - @Nonnull - List findAllEnclosingBlockStarts(@Nonnull FlowNode node); + @NonNull + List findAllEnclosingBlockStarts(@NonNull FlowNode node); /** Provides a trivial implementation to facilitate implementing {@link #iterateEnclosingBlocks(FlowNode)}*/ class EnclosingBlocksIterable implements Iterable { @@ -90,7 +90,7 @@ public void remove() { } } - public EnclosingBlocksIterable(@Nonnull GraphLookupView view, @Nonnull FlowNode node) { + public EnclosingBlocksIterable(@NonNull GraphLookupView view, @NonNull FlowNode node) { this.view = view; this.node = node; } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graph/StandardGraphLookupView.java b/src/main/java/org/jenkinsci/plugins/workflow/graph/StandardGraphLookupView.java index 5d6e2ffc..e721410a 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graph/StandardGraphLookupView.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graph/StandardGraphLookupView.java @@ -6,17 +6,17 @@ import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.NoExternalUse; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.List; +import java.util.concurrent.ConcurrentHashMap; /** * Provides overall insight into the structure of a flow graph... but with limited visibility so we can change implementation. - * Designed to work entirely on the basis of the {@link FlowNode#id} rather than the {@link FlowNode}s themselves. + * Designed to work entirely on the basis of the {@link FlowNode#getId()} rather than the {@link FlowNode}s themselves. */ @SuppressFBWarnings(value = "ES_COMPARING_STRINGS_WITH_EQ", justification = "Can can use instance identity when comparing to a final constant") @Restricted(NoExternalUse.class) @@ -25,10 +25,10 @@ public final class StandardGraphLookupView implements GraphLookupView, GraphList static final String INCOMPLETE = ""; /** Map the blockStartNode to its endNode, to accellerate a range of operations */ - HashMap blockStartToEnd = new HashMap<>(); + ConcurrentHashMap blockStartToEnd = new ConcurrentHashMap<>(); /** Map a node to its nearest enclosing block */ - HashMap nearestEnclosingBlock = new HashMap<>(); + ConcurrentHashMap nearestEnclosingBlock = new ConcurrentHashMap<>(); public void clearCache() { blockStartToEnd.clear(); @@ -37,12 +37,13 @@ public void clearCache() { /** Update with a new node added to the flowgraph */ @Override - public void onNewHead(@Nonnull FlowNode newHead) { + public void onNewHead(@NonNull FlowNode newHead) { if (newHead instanceof BlockEndNode) { - blockStartToEnd.put(((BlockEndNode)newHead).getStartNode().getId(), newHead.getId()); - String overallEnclosing = nearestEnclosingBlock.get(((BlockEndNode) newHead).getStartNode().getId()); - if (overallEnclosing != null) { - nearestEnclosingBlock.put(newHead.getId(), overallEnclosing); + String startNodeId = ((BlockEndNode)newHead).getStartNode().getId(); + blockStartToEnd.put(startNodeId, newHead.getId()); + String enclosingId = nearestEnclosingBlock.get(startNodeId); + if (enclosingId != null) { + nearestEnclosingBlock.put(newHead.getId(), enclosingId); } } else { if (newHead instanceof BlockStartNode) { @@ -76,7 +77,7 @@ public StandardGraphLookupView() { } @Override - public boolean isActive(@Nonnull FlowNode node) { + public boolean isActive(@NonNull FlowNode node) { if (node instanceof FlowEndNode) { // cf. JENKINS-26139 return !node.getExecution().isComplete(); } else if (node instanceof BlockStartNode){ // BlockStartNode @@ -87,7 +88,7 @@ public boolean isActive(@Nonnull FlowNode node) { } // Do a brute-force scan for the block end matching the start, caching info along the way for future use - BlockEndNode bruteForceScanForEnd(@Nonnull BlockStartNode start) { + BlockEndNode bruteForceScanForEnd(@NonNull BlockStartNode start) { DepthFirstScanner scan = new DepthFirstScanner(); scan.setup(start.getExecution().getCurrentHeads()); for (FlowNode f : scan) { @@ -116,7 +117,7 @@ BlockEndNode bruteForceScanForEnd(@Nonnull BlockStartNode start) { /** Do a brute-force scan for the enclosing blocks **/ - BlockStartNode bruteForceScanForEnclosingBlock(@Nonnull final FlowNode node) { + BlockStartNode bruteForceScanForEnclosingBlock(@NonNull final FlowNode node) { FlowNode current = node; while (!(current instanceof FlowStartNode)) { // Hunt back for enclosing blocks, a potentially expensive operation @@ -157,7 +158,7 @@ BlockStartNode bruteForceScanForEnclosingBlock(@Nonnull final FlowNode node) { @CheckForNull @Override - public BlockEndNode getEndNode(@Nonnull final BlockStartNode startNode) { + public BlockEndNode getEndNode(@NonNull final BlockStartNode startNode) { String id = blockStartToEnd.get(startNode.getId()); if (id != null) { @@ -167,17 +168,15 @@ public BlockEndNode getEndNode(@Nonnull final BlockStartNode startNode) { throw new RuntimeException(ioe); } } else { - BlockEndNode node = bruteForceScanForEnd(startNode); - if (node != null) { - blockStartToEnd.put(startNode.getId(), node.getId()); - } - return node; + // returns the end node or null + // if this returns end node, it also adds start and end to blockStartToEnd + return bruteForceScanForEnd(startNode); } } @CheckForNull @Override - public BlockStartNode findEnclosingBlockStart(@Nonnull FlowNode node) { + public BlockStartNode findEnclosingBlockStart(@NonNull FlowNode node) { if (node instanceof FlowStartNode || node instanceof FlowEndNode) { return null; } @@ -191,22 +190,18 @@ public BlockStartNode findEnclosingBlockStart(@Nonnull FlowNode node) { } } - BlockStartNode enclosing = bruteForceScanForEnclosingBlock(node); - if (enclosing != null) { - nearestEnclosingBlock.put(node.getId(), enclosing.getId()); - return enclosing; - } - return null; + // when scan completes, enclosing is in the cache if it exists + return bruteForceScanForEnclosingBlock(node); } @Override - public Iterable iterateEnclosingBlocks(@Nonnull FlowNode node) { + public Iterable iterateEnclosingBlocks(@NonNull FlowNode node) { return new EnclosingBlocksIterable(this, node); } - @Nonnull + @NonNull @Override - public List findAllEnclosingBlockStarts(@Nonnull FlowNode node) { + public List findAllEnclosingBlockStarts(@NonNull FlowNode node) { if (node instanceof FlowStartNode || node instanceof FlowEndNode) { return Collections.emptyList(); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graph/StepNode.java b/src/main/java/org/jenkinsci/plugins/workflow/graph/StepNode.java index 9c24b67b..81db7d4d 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graph/StepNode.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graph/StepNode.java @@ -26,7 +26,7 @@ import org.jenkinsci.plugins.workflow.steps.Step; import org.jenkinsci.plugins.workflow.steps.StepDescriptor; -import javax.annotation.CheckForNull; +import edu.umd.cs.findbugs.annotations.CheckForNull; /** * Optional interface for a {@link FlowNode} that has an associated {@link StepDescriptor}. diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/AbstractFlowScanner.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/AbstractFlowScanner.java index 088b5bb5..c0b3e328 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/AbstractFlowScanner.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/AbstractFlowScanner.java @@ -28,9 +28,9 @@ import org.jenkinsci.plugins.workflow.flow.FlowExecution; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; -import javax.annotation.concurrent.NotThreadSafe; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.NotThreadSafe; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -57,8 +57,8 @@ * * *

All APIs visit the parent nodes, walking backward from heads(inclusive) until they they hit {@link #myBlackList} nodes (exclusive) or reach the end of the DAG. - * If blackList nodes are an empty collection or null, APIs will walk to the beginning of the FlowGraph. - * Multiple blackList nodes are helpful for putting separate bounds on walking different parallel branches. + * If denyList nodes are an empty collection or null, APIs will walk to the beginning of the FlowGraph. + * Multiple denyList nodes are helpful for putting separate bounds on walking different parallel branches. * *

Key Points: *

  • There are many helper methods offering syntactic sugar for the above APIs in common use cases (simpler method signatures).
  • @@ -83,7 +83,7 @@ *
  • Scan through all nodes *just* within a block *
      *
    • Use the {@link org.jenkinsci.plugins.workflow.graph.BlockEndNode} as the head
    • - *
    • Use the {@link org.jenkinsci.plugins.workflow.graph.BlockStartNode} as its blacklist with {@link Collections#singleton(Object)}
    • + *
    • Use the {@link org.jenkinsci.plugins.workflow.graph.BlockStartNode} as its denylist with {@link Collections#singleton(Object)}
    • *
  • *
* @@ -99,11 +99,11 @@ public abstract class AbstractFlowScanner implements Iterable , Filter protected Collection myBlackList = Collections.emptySet(); - /** When checking for blacklist membership, we convert to a hashset when checking more than this many elements */ + /** When checking for denylist membership, we convert to a hashset when checking more than this many elements */ protected static final int MAX_LIST_CHECK_SIZE = 5; /** Helper: convert stop nodes to a collection that can efficiently be checked for membership, handling null if needed */ - @Nonnull + @NonNull protected Collection convertToFastCheckable(@CheckForNull Collection nodeCollection) { if (nodeCollection == null || nodeCollection.size()==0) { return Collections.emptySet(); @@ -142,7 +142,7 @@ public boolean setup(@CheckForNull Collection heads, @CheckForNull Col } /** - * Helper: version of {@link #setup(Collection, Collection)} where we don't have any nodes to blacklist + * Helper: version of {@link #setup(Collection, Collection)} where we don't have any nodes to denylist */ public boolean setup(@CheckForNull Collection heads) { if (heads == null) { @@ -152,7 +152,7 @@ public boolean setup(@CheckForNull Collection heads) { } /** - * Helper: version of {@link #setup(Collection, Collection)} where we don't have any nodes to blacklist, and have just a single head + * Helper: version of {@link #setup(Collection, Collection)} where we don't have any nodes to denylist, and have just a single head */ public boolean setup(@CheckForNull FlowNode head, @CheckForNull Collection blackList) { if (head == null) { @@ -162,7 +162,7 @@ public boolean setup(@CheckForNull FlowNode head, @CheckForNull Collection filteredHeads); + protected abstract void setHeads(@NonNull Collection filteredHeads); /** * Actual meat of the iteration, get the next node to visit, using and updating state as needed @@ -194,7 +194,7 @@ public boolean setup(@CheckForNull FlowNode head) { * @return Next node to visit, or null if we've exhausted the node list */ @CheckForNull - protected abstract FlowNode next(@Nonnull FlowNode current, @Nonnull Collection blackList); + protected abstract FlowNode next(@NonNull FlowNode current, @NonNull Collection blackList); @Override public boolean hasNext() { @@ -218,7 +218,7 @@ public final void remove() { } @Override - @Nonnull + @NonNull public Iterator iterator() { return this; } @@ -229,8 +229,8 @@ public Iterator iterator() { * @return A {@link Filterator} against this FlowScanner, which can be filtered in additional ways. */ @Override - @Nonnull - public Filterator filter(@Nonnull Predicate filterCondition) { + @NonNull + public Filterator filter(@NonNull Predicate filterCondition) { return new FilteratorImpl<>(this, filterCondition); } @@ -261,21 +261,21 @@ public FlowNode findFirstMatch(@CheckForNull Collection heads, // Polymorphic methods for syntactic sugar - /** Syntactic sugar for {@link #findFirstMatch(Collection, Collection, Predicate)} where there is no blackList */ + /** Syntactic sugar for {@link #findFirstMatch(Collection, Collection, Predicate)} where there is no denyList */ @CheckForNull - public FlowNode findFirstMatch(@CheckForNull Collection heads, @Nonnull Predicate matchPredicate) { + public FlowNode findFirstMatch(@CheckForNull Collection heads, @NonNull Predicate matchPredicate) { return this.findFirstMatch(heads, null, matchPredicate); } - /** Syntactic sugar for {@link #findFirstMatch(Collection, Collection, Predicate)} where there is a single head and no blackList */ + /** Syntactic sugar for {@link #findFirstMatch(Collection, Collection, Predicate)} where there is a single head and no denyList */ @CheckForNull - public FlowNode findFirstMatch(@CheckForNull FlowNode head, @Nonnull Predicate matchPredicate) { + public FlowNode findFirstMatch(@CheckForNull FlowNode head, @NonNull Predicate matchPredicate) { return this.findFirstMatch(Collections.singleton(head), null, matchPredicate); } - /** Syntactic sugar for {@link #findFirstMatch(Collection, Collection, Predicate)} using {@link FlowExecution#getCurrentHeads()} to get heads and no blackList */ + /** Syntactic sugar for {@link #findFirstMatch(Collection, Collection, Predicate)} using {@link FlowExecution#getCurrentHeads()} to get heads and no denyList */ @CheckForNull - public FlowNode findFirstMatch(@CheckForNull FlowExecution exec, @Nonnull Predicate matchPredicate) { + public FlowNode findFirstMatch(@CheckForNull FlowExecution exec, @NonNull Predicate matchPredicate) { if (exec != null && exec.getCurrentHeads() != null && !exec.getCurrentHeads().isEmpty()) { return this.findFirstMatch(exec.getCurrentHeads(), null, matchPredicate); } @@ -290,7 +290,7 @@ public FlowNode findFirstMatch(@CheckForNull FlowExecution exec, @Nonnull Predic * @param matchCondition Predicate that must be met for nodes to be included in output. Input is always non-null. * @return List of flownodes matching the predicate. */ - @Nonnull + @NonNull public List filteredNodes(@CheckForNull Collection heads, @CheckForNull Collection blackList, Predicate matchCondition) { @@ -308,7 +308,7 @@ public List filteredNodes(@CheckForNull Collection heads, } /** Convenience method to get the list all flownodes in the iterator order. */ - @Nonnull + @NonNull public List allNodes(@CheckForNull Collection heads) { if (!setup(heads)) { return Collections.emptyList(); @@ -321,25 +321,25 @@ public List allNodes(@CheckForNull Collection heads) { } /** Convenience method to get the list of all {@link FlowNode}s for the execution, in iterator order. */ - @Nonnull + @NonNull public List allNodes(@CheckForNull FlowExecution exec) { return (exec == null) ? Collections.emptyList() : allNodes(exec.getCurrentHeads()); } - /** Syntactic sugar for {@link #filteredNodes(Collection, Collection, Predicate)} with no blackList nodes */ - @Nonnull - public List filteredNodes(@CheckForNull Collection heads, @Nonnull Predicate matchPredicate) { + /** Syntactic sugar for {@link #filteredNodes(Collection, Collection, Predicate)} with no denyList nodes */ + @NonNull + public List filteredNodes(@CheckForNull Collection heads, @NonNull Predicate matchPredicate) { return this.filteredNodes(heads, null, matchPredicate); } - /** Syntactic sugar for {@link #filteredNodes(Collection, Collection, Predicate)} with a single head and no blackList nodes */ - @Nonnull - public List filteredNodes(@CheckForNull FlowNode head, @Nonnull Predicate matchPredicate) { + /** Syntactic sugar for {@link #filteredNodes(Collection, Collection, Predicate)} with a single head and no denyList nodes */ + @NonNull + public List filteredNodes(@CheckForNull FlowNode head, @NonNull Predicate matchPredicate) { return this.filteredNodes(Collections.singleton(head), null, matchPredicate); } - @Nonnull - public List filteredNodes(@CheckForNull FlowExecution exec, @Nonnull Predicate matchPredicate) { + @NonNull + public List filteredNodes(@CheckForNull FlowExecution exec, @NonNull Predicate matchPredicate) { if (exec == null) { return Collections.emptyList(); } @@ -356,7 +356,7 @@ public List filteredNodes(@CheckForNull FlowExecution exec, @Nonnull P * @param blackList Nodes we can't visit or pass beyond. * @param visitor Visitor that will see each FlowNode encountered. */ - public void visitAll(@CheckForNull Collection heads, @CheckForNull Collection blackList, @Nonnull FlowNodeVisitor visitor) { + public void visitAll(@CheckForNull Collection heads, @CheckForNull Collection blackList, @NonNull FlowNodeVisitor visitor) { if (!setup(heads, blackList)) { return; } @@ -368,8 +368,8 @@ public void visitAll(@CheckForNull Collection heads, @CheckForNull Col } } - /** Syntactic sugar for {@link #visitAll(Collection, Collection, FlowNodeVisitor)} where we don't blacklist any nodes */ - public void visitAll(@CheckForNull Collection heads, @Nonnull FlowNodeVisitor visitor) { + /** Syntactic sugar for {@link #visitAll(Collection, Collection, FlowNodeVisitor)} where we don't denylist any nodes */ + public void visitAll(@CheckForNull Collection heads, @NonNull FlowNodeVisitor visitor) { visitAll(heads, null, visitor); } } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/BlockChunkFinder.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/BlockChunkFinder.java index f3b4f168..e2ad8023 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/BlockChunkFinder.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/BlockChunkFinder.java @@ -4,8 +4,8 @@ import org.jenkinsci.plugins.workflow.graph.BlockStartNode; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; /** * Matches start and end of a block. Any block! @@ -21,12 +21,12 @@ public boolean isStartInsideChunk() { } @Override - public boolean isChunkStart(@Nonnull FlowNode current, @CheckForNull FlowNode previous) { + public boolean isChunkStart(@NonNull FlowNode current, @CheckForNull FlowNode previous) { return current instanceof BlockStartNode; } @Override - public boolean isChunkEnd(@Nonnull FlowNode current, @CheckForNull FlowNode previous) { + public boolean isChunkEnd(@NonNull FlowNode current, @CheckForNull FlowNode previous) { return current instanceof BlockEndNode; } } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ChunkFinder.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ChunkFinder.java index 7a696cbb..37012494 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ChunkFinder.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ChunkFinder.java @@ -2,17 +2,17 @@ import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; /** * Think of this as setting conditions to mark a region of interest in the graph of {@link FlowNode} from a {@link org.jenkinsci.plugins.workflow.flow.FlowExecution}. *

This is used to define a linear "chunk" from the graph of FlowNodes returned by a {@link ForkScanner}, after it applies ordering. *

This is done by invoking {@link ForkScanner#visitSimpleChunks(SimpleChunkVisitor, ChunkFinder)}. *

Your {@link SimpleChunkVisitor} will receive callbacks about chunk boundaries on the basis of the ChunkFinder. - * It is responsible for tracking the state based on events fired + * It is responsible for tracking the state based on events fired. * - *

Common uses: + *

Common uses: *

    *
  • Find all {@link FlowNode}s within a specific block type, such the block created by a timeout block, 'node' (executor) block, etc
  • *
  • Find all {@link FlowNode}s between specific markers, such as labels, milestones, or steps generating an error
  • @@ -42,7 +42,7 @@ public interface ChunkFinder { * @param previous Previous node, to use in testing chunk * @return True if current node is the beginning of chunk */ - boolean isChunkStart(@Nonnull FlowNode current, @CheckForNull FlowNode previous); + boolean isChunkStart(@NonNull FlowNode current, @CheckForNull FlowNode previous); /** * Test if the current node is the end of a chunk (inclusive) @@ -52,5 +52,5 @@ public interface ChunkFinder { * @param previous Previous node, to use in testing chunk * @return True if current is the end of a chunk (inclusive) */ - boolean isChunkEnd(@Nonnull FlowNode current, @CheckForNull FlowNode previous); + boolean isChunkEnd(@NonNull FlowNode current, @CheckForNull FlowNode previous); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/DepthFirstScanner.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/DepthFirstScanner.java index 5677e2d0..ca85a8bf 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/DepthFirstScanner.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/DepthFirstScanner.java @@ -27,8 +27,8 @@ import org.jenkinsci.plugins.workflow.graph.BlockStartNode; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.Nonnull; -import javax.annotation.concurrent.NotThreadSafe; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.NotThreadSafe; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; @@ -65,7 +65,7 @@ protected void reset() { } @Override - protected void setHeads(@Nonnull Collection heads) { + protected void setHeads(@NonNull Collection heads) { if (heads.isEmpty()) { return; } @@ -87,7 +87,7 @@ protected boolean testCandidate(FlowNode f, Collection blackList) { } @Override - protected FlowNode next(@Nonnull FlowNode current, @Nonnull final Collection blackList) { + protected FlowNode next(@NonNull FlowNode current, @NonNull final Collection blackList) { FlowNode output = null; // Walk through parents of current node diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/Filterator.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/Filterator.java index b77e40ae..a003e7b6 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/Filterator.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/Filterator.java @@ -26,7 +26,7 @@ import com.google.common.base.Predicate; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Iterator; /** Iterator that may be navigated through a filtered wrapper. @@ -38,6 +38,6 @@ */ public interface Filterator extends Iterator { /** Returns a filtered view of the iterator, which calls the iterator until matches are found */ - @Nonnull - Filterator filter(@Nonnull Predicate matchCondition); + @NonNull + Filterator filter(@NonNull Predicate matchCondition); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FilteratorImpl.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FilteratorImpl.java index b49ebf26..29f3e2b2 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FilteratorImpl.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FilteratorImpl.java @@ -26,8 +26,8 @@ import com.google.common.base.Predicate; -import javax.annotation.Nonnull; -import javax.annotation.concurrent.NotThreadSafe; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.NotThreadSafe; import java.util.Iterator; /** Filters an iterator against a match predicate by wrapping an iterator @@ -40,12 +40,13 @@ class FilteratorImpl implements Filterator { private Iterator wrapped = null; private Predicate matchCondition = null; + @NonNull @Override - public FilteratorImpl filter(Predicate matchCondition) { + public FilteratorImpl filter(@NonNull Predicate matchCondition) { return new FilteratorImpl<>(this, matchCondition); } - public FilteratorImpl(@Nonnull Iterator it, @Nonnull Predicate matchCondition) { + public FilteratorImpl(@NonNull Iterator it, @NonNull Predicate matchCondition) { this.wrapped = it; this.matchCondition = matchCondition; diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowChunk.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowChunk.java index 6bce6d0c..254deda6 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowChunk.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowChunk.java @@ -26,13 +26,13 @@ import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.NonNull; /** - * Common container interface for a series of {@link FlowNode}s with a logical start and end. - *

    We use this because every plugin has a different way of storing info about the nodes. + *

    Common container interface for a series of {@link FlowNode}s with a logical start and end. + *

    We use this because every plugin has a different way of storing info about the nodes. * - *

    Common uses: + *

    Common uses: *

      *
    • A single FlowNode (when coupling with timing/status APIs)
    • *
    • A block (with a {@link org.jenkinsci.plugins.workflow.graph.BlockStartNode} and {@link org.jenkinsci.plugins.workflow.graph.BlockEndNode})
    • @@ -45,9 +45,9 @@ * @author Sam Van Oort */ public interface FlowChunk { - @Nonnull + @NonNull FlowNode getFirstNode(); - @Nonnull + @NonNull FlowNode getLastNode(); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowChunkWithContext.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowChunkWithContext.java index 1285808b..f1ac1321 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowChunkWithContext.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowChunkWithContext.java @@ -2,7 +2,7 @@ import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.CheckForNull; +import edu.umd.cs.findbugs.annotations.CheckForNull; /** FlowChunk with information about what comes before/after */ public interface FlowChunkWithContext extends FlowChunk { diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowNodeVisitor.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowNodeVisitor.java index 790963e0..2362edfd 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowNodeVisitor.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowNodeVisitor.java @@ -26,7 +26,7 @@ import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Collection; /** @@ -42,5 +42,5 @@ public interface FlowNodeVisitor { * @param f Node to visit * @return False if we should stop visiting nodes */ - boolean visit(@Nonnull FlowNode f); + boolean visit(@NonNull FlowNode f); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowScanningUtils.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowScanningUtils.java index eb89b256..b9bc773c 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowScanningUtils.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowScanningUtils.java @@ -31,8 +31,8 @@ import org.jenkinsci.plugins.workflow.graph.BlockStartNode; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Comparator; import java.util.Iterator; @@ -50,14 +50,9 @@ private FlowScanningUtils() {} * @param actionClass Action class to look for * @return Predicate that will match when FlowNode has the action given */ - @Nonnull - public static Predicate hasActionPredicate(@Nonnull final Class actionClass) { - return new Predicate() { - @Override - public boolean apply(FlowNode input) { - return (input != null && input.getAction(actionClass) != null); - } - }; + @NonNull + public static Predicate hasActionPredicate(@NonNull final Class actionClass) { + return input -> (input != null && input.getAction(actionClass) != null); } // Default predicates, which may be used for common conditions @@ -119,9 +114,9 @@ public int compare(@CheckForNull FlowNode first, @CheckForNull FlowNode second) * @param f {@link FlowNode} to start from. * @return Iterator that returns all enclosing BlockStartNodes from the inside out. */ - @Nonnull + @NonNull @Deprecated - public static Filterator fetchEnclosingBlocks(@Nonnull FlowNode f) { + public static Filterator fetchEnclosingBlocks(@NonNull FlowNode f) { return new FilteratorImpl<>((Iterator) f.iterateEnclosingBlocks().iterator(), Predicates.alwaysTrue()); } } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ForkScanner.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ForkScanner.java index b161deb1..82d5da13 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ForkScanner.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ForkScanner.java @@ -34,10 +34,10 @@ import org.jenkinsci.plugins.workflow.graph.FlowNode; import org.jenkinsci.plugins.workflow.steps.StepDescriptor; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.annotation.concurrent.NotThreadSafe; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; +import net.jcip.annotations.NotThreadSafe; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; @@ -122,11 +122,11 @@ public ForkScanner() { } - public ForkScanner(@Nonnull Collection heads) { + public ForkScanner(@NonNull Collection heads) { this.setup(heads); } - public ForkScanner(@Nonnull Collection heads, @Nonnull Collection blackList) { + public ForkScanner(@NonNull Collection heads, @NonNull Collection blackList) { this.setup(heads, blackList); } @@ -158,7 +158,7 @@ public boolean apply(@Nullable FlowNode input) { * Specifically, requiring classes from workflow-cps to detect if something is a parallel step. */ @Deprecated - public static void setParallelStartPredicate(@Nonnull Predicate pred) { + public static void setParallelStartPredicate(@NonNull Predicate pred) { } // Needed because the *next* node might be a parallel start if we start in middle and we don't know it @@ -217,7 +217,7 @@ static class ParallelBlockStart { BlockStartNode forkStart; // This is the node with child branches ArrayDeque unvisited = new ArrayDeque<>(); // Remaining branches of this that we have have not visited yet - ParallelBlockStart(@Nonnull BlockStartNode forkStart) { + ParallelBlockStart(@NonNull BlockStartNode forkStart) { this.forkStart = forkStart; } @@ -251,7 +251,7 @@ public boolean isLeaf() { * @throws IllegalStateException When you try to split a segment on a node that it doesn't contain, or invalid graph structure * @return Recreated fork */ - Fork split(@Nonnull HashMap nodeMapping, @Nonnull BlockStartNode joinPoint, @Nonnull FlowPiece joiningBranch) { + Fork split(@NonNull HashMap nodeMapping, @NonNull BlockStartNode joinPoint, @NonNull FlowPiece joiningBranch) { int index = visited.lastIndexOf(joinPoint); // Fork will be closer to end, so this is better than indexOf Fork newFork = new Fork(joinPoint); @@ -360,7 +360,7 @@ ArrayDeque convertForksToBlockStarts(ArrayDeque parall *
    • Heads are all separate branches
    • *
    */ - ArrayDeque leastCommonAncestor(@Nonnull final Set heads) { + ArrayDeque leastCommonAncestor(@NonNull final Set heads) { HashMap branches = new HashMap<>(); ArrayList> iterators = new ArrayList<>(); ArrayList livePieces = new ArrayList<>(); @@ -368,7 +368,7 @@ ArrayDeque leastCommonAncestor(@Nonnull final Set ArrayDeque parallelForks = new ArrayDeque<>(); // Tracks the discovered forks in order of encounter Predicate notAHead = new Predicate() { // Filter out pre-existing heads - Collection checkHeads = convertToFastCheckable(heads); + final Collection checkHeads = convertToFastCheckable(heads); @Override public boolean apply(FlowNode input) { return !checkHeads.contains(input); } @@ -439,18 +439,21 @@ ArrayDeque leastCommonAncestor(@Nonnull final Set } } + if (parallelForks.isEmpty()) { + throw new IllegalStateException("No least common ancestor found from " + heads); + } + // If we hit issues with the ordering of blocks by depth, apply a sorting to the parallels by depth return convertForksToBlockStarts(parallelForks); } @Override - protected void setHeads(@Nonnull Collection heads) { + protected void setHeads(@NonNull Collection heads) { if (heads.size() > 1) { for (FlowNode f : heads) { headIds.add(f.getId()); } parallelBlockStartStack = leastCommonAncestor(new LinkedHashSet<>(heads)); - assert parallelBlockStartStack.size() > 0; currentParallelStart = parallelBlockStartStack.pop(); currentParallelStartNode = currentParallelStart.forkStart; myCurrent = currentParallelStart.unvisited.pop(); @@ -566,7 +569,7 @@ public FlowNode next() { @Override @SuppressFBWarnings(value = "DLS_DEAD_LOCAL_STORE", justification = "Function call to modify state, special case where we don't need the returnVal") - protected FlowNode next(@Nonnull FlowNode current, @Nonnull Collection blackList) { + protected FlowNode next(@NonNull FlowNode current, @NonNull Collection blackList) { FlowNode output = null; // First we look at the parents of the current node if present @@ -619,13 +622,13 @@ protected FlowNode next(@Nonnull FlowNode current, @Nonnull Collection return output; } - public static void visitSimpleChunks(@Nonnull Collection heads, @Nonnull Collection blacklist, @Nonnull SimpleChunkVisitor visitor, @Nonnull ChunkFinder finder) { + public static void visitSimpleChunks(@NonNull Collection heads, @NonNull Collection blacklist, @NonNull SimpleChunkVisitor visitor, @NonNull ChunkFinder finder) { ForkScanner scanner = new ForkScanner(); scanner.setup(heads, blacklist); scanner.visitSimpleChunks(visitor, finder); } - public static void visitSimpleChunks(@Nonnull Collection heads, @Nonnull SimpleChunkVisitor visitor, @Nonnull ChunkFinder finder) { + public static void visitSimpleChunks(@NonNull Collection heads, @NonNull SimpleChunkVisitor visitor, @NonNull ChunkFinder finder) { ForkScanner scanner = new ForkScanner(); scanner.setup(heads); scanner.visitSimpleChunks(visitor, finder); @@ -638,7 +641,7 @@ public static void visitSimpleChunks(@Nonnull Collection heads, @Nonnu * not just the last declared branch. (See issue JENKINS-38536) */ @CheckForNull - static FlowNode findLastRunningNode(@Nonnull List candidates) { + static FlowNode findLastRunningNode(@NonNull List candidates) { if (candidates.size() == 0) { return null; } else if (candidates.size() == 1) { @@ -679,7 +682,7 @@ List currentParallelHeads() { /** Pulls out firing the callbacks for parallels */ static void fireVisitParallelCallbacks(@CheckForNull FlowNode next, @CheckForNull FlowNode current, @CheckForNull FlowNode prev, - @Nonnull SimpleChunkVisitor visitor, @Nonnull ChunkFinder finder, @Nonnull ForkScanner scanner) { + @NonNull SimpleChunkVisitor visitor, @NonNull ChunkFinder finder, @NonNull ForkScanner scanner) { // Trigger on parallels switch (scanner.currentType) { case NORMAL: @@ -726,8 +729,8 @@ static void fireVisitParallelCallbacks(@CheckForNull FlowNode next, @CheckForNul /** Abstracts out the simpleChunkVisitor callback-triggering logic. * Note that a null value of "prev" is assumed to mean we're the last node. */ @SuppressFBWarnings(value="NP_LOAD_OF_KNOWN_NULL_VALUE", justification = "FindBugs doesn't like passing nulls to a method that can take null") - static void fireVisitChunkCallbacks(@CheckForNull FlowNode next, @Nonnull FlowNode current, @CheckForNull FlowNode prev, - @Nonnull SimpleChunkVisitor visitor, @Nonnull ChunkFinder finder, @Nonnull ForkScanner scanner) { + static void fireVisitChunkCallbacks(@CheckForNull FlowNode next, @NonNull FlowNode current, @CheckForNull FlowNode prev, + @NonNull SimpleChunkVisitor visitor, @NonNull ChunkFinder finder, @NonNull ForkScanner scanner) { boolean boundary = false; if (prev == null && finder.isStartInsideChunk()) { // Last node, need to fire end event to start inside chunk visitor.chunkEnd(current, prev, scanner); @@ -751,7 +754,7 @@ static void fireVisitChunkCallbacks(@CheckForNull FlowNode next, @Nonnull FlowNo } /** Walk through flows */ - public void visitSimpleChunks(@Nonnull SimpleChunkVisitor visitor, @Nonnull ChunkFinder finder) { + public void visitSimpleChunks(@NonNull SimpleChunkVisitor visitor, @NonNull ChunkFinder finder) { FlowNode prev; if (this.currentParallelStart != null) { diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/LabelledChunkFinder.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/LabelledChunkFinder.java index 7c8e6762..38b90fac 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/LabelledChunkFinder.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/LabelledChunkFinder.java @@ -5,8 +5,8 @@ import org.jenkinsci.plugins.workflow.graph.BlockStartNode; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; /** * Splits a flow execution into {@link FlowChunk}s whenever you have a label. @@ -24,7 +24,7 @@ public boolean isStartInsideChunk() { /** Start is anywhere with a {@link LabelAction} */ @Override - public boolean isChunkStart(@Nonnull FlowNode current, @CheckForNull FlowNode previous) { + public boolean isChunkStart(@NonNull FlowNode current, @CheckForNull FlowNode previous) { LabelAction la = current.getPersistentAction(LabelAction.class); return la != null; } @@ -32,7 +32,7 @@ public boolean isChunkStart(@Nonnull FlowNode current, @CheckForNull FlowNode pr /** End is where the previous node is a chunk start * or this is a {@link BlockEndNode} whose {@link BlockStartNode} has a label action */ @Override - public boolean isChunkEnd(@Nonnull FlowNode current, @CheckForNull FlowNode previous) { + public boolean isChunkEnd(@NonNull FlowNode current, @CheckForNull FlowNode previous) { if (previous == null) { return false; } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/LinearBlockHoppingScanner.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/LinearBlockHoppingScanner.java index 1618b748..970a0b54 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/LinearBlockHoppingScanner.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/LinearBlockHoppingScanner.java @@ -28,9 +28,9 @@ import org.jenkinsci.plugins.workflow.graph.BlockStartNode; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; -import javax.annotation.concurrent.NotThreadSafe; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.NotThreadSafe; import java.util.Collection; import java.util.Iterator; import java.util.List; @@ -70,7 +70,7 @@ public boolean setup(@CheckForNull Collection heads, @CheckForNull Col } @Override - protected void setHeads(@Nonnull Collection heads) { + protected void setHeads(@NonNull Collection heads) { Iterator it = heads.iterator(); if (it.hasNext()) { this.myCurrent = jumpBlockScan(it.next(), myBlackList); @@ -83,7 +83,7 @@ protected void setHeads(@Nonnull Collection heads) { /** Keeps jumping over blocks until we hit the first node preceding a block */ @CheckForNull - protected FlowNode jumpBlockScan(@CheckForNull FlowNode node, @Nonnull Collection blacklistNodes) { + protected FlowNode jumpBlockScan(@CheckForNull FlowNode node, @NonNull Collection blacklistNodes) { FlowNode candidate = node; // Find the first candidate node preceding a block... and filtering by blacklist @@ -113,7 +113,7 @@ protected FlowNode jumpBlockScan(@CheckForNull FlowNode node, @Nonnull Collectio } @Override - protected FlowNode next(@Nonnull FlowNode current, @Nonnull Collection blackList) { + protected FlowNode next(@CheckForNull FlowNode current, @NonNull Collection blackList) { if (current == null) { return null; } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/LinearScanner.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/LinearScanner.java index 60e1e1e6..ca8ad4db 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/LinearScanner.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/LinearScanner.java @@ -27,8 +27,9 @@ import com.google.common.base.Predicate; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.Nonnull; -import javax.annotation.concurrent.NotThreadSafe; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; +import net.jcip.annotations.NotThreadSafe; import java.util.Collection; import java.util.Collections; import java.util.Iterator; @@ -46,7 +47,7 @@ *

    Use case: we don't care about parallel branches or know they don't exist, we just want to walk through the top-level blocks. * *

    This is the fastest and simplest way to walk a flow, because you only care about a single node at a time. - * Nuance: where there are multiple parent nodes (in a parallel block), and one is blacklisted, we'll find the first non-blacklisted one. + * Nuance: where there are multiple parent nodes (in a parallel block), and one is denylisted, we'll find the first non-denylisted one. * @author Sam Van Oort */ @NotThreadSafe @@ -63,10 +64,10 @@ protected void reset() { /** * {@inheritDoc} - * @param heads Head nodes that have been filtered against blackList. Do not pass multiple heads. + * @param heads Head nodes that have been filtered against denyList. Do not pass multiple heads. */ @Override - protected void setHeads(@Nonnull Collection heads) { + protected void setHeads(@NonNull Collection heads) { Iterator it = heads.iterator(); if (it.hasNext()) { this.myCurrent = it.next(); @@ -78,7 +79,7 @@ protected void setHeads(@Nonnull Collection heads) { } @Override - protected FlowNode next(FlowNode current, @Nonnull Collection blackList) { + protected FlowNode next(@CheckForNull FlowNode current, @NonNull Collection blackList) { if (current == null) { return null; } @@ -98,8 +99,9 @@ protected FlowNode next(FlowNode current, @Nonnull Collection blackLis * @deprecated prefer {@link #filteredNodes(FlowNode, Predicate)} */ @Deprecated + @NonNull @Override - public List filteredNodes(Collection heads, Predicate matchPredicate) { + public List filteredNodes(Collection heads, @NonNull Predicate matchPredicate) { return super.filteredNodes(heads, matchPredicate); } @@ -109,6 +111,7 @@ public List filteredNodes(Collection heads, PredicateDo not pass multiple heads. */ + @NonNull @Override public List filteredNodes(Collection heads, Collection blackList, Predicate matchCondition) { return super.filteredNodes(heads, blackList, matchCondition); @@ -120,7 +123,7 @@ public List filteredNodes(Collection heads, Collection heads, Predicate matchPredicate) { + public FlowNode findFirstMatch(Collection heads, @NonNull Predicate matchPredicate) { return super.findFirstMatch(heads, matchPredicate); } @@ -142,7 +145,7 @@ public FlowNode findFirstMatch(Collection heads, Collection * @param heads Do not pass multiple heads. */ @Override - public void visitAll(Collection heads, FlowNodeVisitor visitor) { + public void visitAll(Collection heads, @NonNull FlowNodeVisitor visitor) { super.visitAll(heads, visitor); } @@ -153,7 +156,7 @@ public void visitAll(Collection heads, FlowNodeVisitor visitor) { * @param heads Nodes to start walking the DAG backwards from. Do not pass multiple heads. */ @Override - public void visitAll(Collection heads, Collection blackList, FlowNodeVisitor visitor) { + public void visitAll(Collection heads, Collection blackList, @NonNull FlowNodeVisitor visitor) { super.visitAll(heads, blackList, visitor); } @@ -163,7 +166,7 @@ public void visitAll(Collection heads, Collection blackList, */ @Deprecated @Override - public FlowNode findFirstMatch(FlowExecution exec, Predicate matchPredicate) { + public FlowNode findFirstMatch(FlowExecution exec, @NonNull Predicate matchPredicate) { return super.findFirstMatch(exec, matchPredicate); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/MemoryFlowChunk.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/MemoryFlowChunk.java index 60a4b128..9f65e888 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/MemoryFlowChunk.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/MemoryFlowChunk.java @@ -26,8 +26,8 @@ import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; /** * FlowChunk that holds direct references to the {@link FlowNode} instances and context info @@ -41,7 +41,7 @@ public class MemoryFlowChunk implements FlowChunkWithContext { protected FlowNode nodeAfter = null; private long pauseTimeMillis = 0; - public MemoryFlowChunk(@CheckForNull FlowNode before, @Nonnull FlowNode firstNode, @Nonnull FlowNode lastNode, @CheckForNull FlowNode nodeAfter) { + public MemoryFlowChunk(@CheckForNull FlowNode before, @NonNull FlowNode firstNode, @NonNull FlowNode lastNode, @CheckForNull FlowNode nodeAfter) { this.setNodeBefore(before); this.setFirstNode(firstNode); this.setLastNode(lastNode); @@ -52,6 +52,7 @@ public MemoryFlowChunk() { } + @NonNull @Override public FlowNode getFirstNode() { return firstNode; @@ -61,7 +62,7 @@ public void setFirstNode(FlowNode firstNode) { this.firstNode = firstNode; } - + @NonNull @Override public FlowNode getLastNode() { return lastNode; diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/NodeStepNamePredicate.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/NodeStepNamePredicate.java index 9890a8e1..f74fa5e9 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/NodeStepNamePredicate.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/NodeStepNamePredicate.java @@ -30,8 +30,8 @@ import org.jenkinsci.plugins.workflow.graph.StepNode; import org.jenkinsci.plugins.workflow.steps.StepDescriptor; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; @@ -42,7 +42,7 @@ public final class NodeStepNamePredicate implements Predicate { String descriptorId; - public NodeStepNamePredicate(@Nonnull String descriptorId) { + public NodeStepNamePredicate(@NonNull String descriptorId) { this.descriptorId = descriptorId; } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/NodeStepTypePredicate.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/NodeStepTypePredicate.java index c7ae8d7b..c9a0dd52 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/NodeStepTypePredicate.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/NodeStepTypePredicate.java @@ -30,8 +30,8 @@ import org.jenkinsci.plugins.workflow.graph.StepNode; import org.jenkinsci.plugins.workflow.steps.StepDescriptor; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; @@ -39,12 +39,12 @@ public final class NodeStepTypePredicate implements Predicate { StepDescriptor stepDescriptor; - public NodeStepTypePredicate(@Nonnull StepDescriptor descriptorType) { + public NodeStepTypePredicate(@NonNull StepDescriptor descriptorType) { stepDescriptor = descriptorType; } /** Create a filter predicate based on the step name */ - public NodeStepTypePredicate(@Nonnull String functionName) { + public NodeStepTypePredicate(@NonNull String functionName) { stepDescriptor = StepDescriptor.byFunctionName(functionName); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ParallelFlowChunk.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ParallelFlowChunk.java index 024d7a5c..7ab75b8e 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ParallelFlowChunk.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ParallelFlowChunk.java @@ -1,6 +1,6 @@ package org.jenkinsci.plugins.workflow.graphanalysis; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Map; /** @@ -9,9 +9,8 @@ public interface ParallelFlowChunk extends FlowChunk { /** Returns the branches of a parallel flow chunk, mapped by branch name and parallel branch block */ - @Nonnull + @NonNull Map getBranches(); - @Nonnull - void setBranch(@Nonnull String branchName, @Nonnull ChunkType branchBlock); + void setBranch(@NonNull String branchName, @NonNull ChunkType branchBlock); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ParallelMemoryFlowChunk.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ParallelMemoryFlowChunk.java index 2772d329..9026843f 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ParallelMemoryFlowChunk.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/ParallelMemoryFlowChunk.java @@ -26,8 +26,8 @@ import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; @@ -39,23 +39,23 @@ public class ParallelMemoryFlowChunk extends MemoryFlowChunk implements ParallelFlowChunk { // LinkedHashMap to preserve insert order - private LinkedHashMap branches = new LinkedHashMap<>(); + private final LinkedHashMap branches = new LinkedHashMap<>(); - public ParallelMemoryFlowChunk(@Nonnull FlowNode firstNode, @Nonnull FlowNode lastNode) { + public ParallelMemoryFlowChunk(@NonNull FlowNode firstNode, @NonNull FlowNode lastNode) { super (null,firstNode, lastNode, null); } - public ParallelMemoryFlowChunk(@CheckForNull FlowNode nodeBefore, @Nonnull FlowNode firstNode, @Nonnull FlowNode lastNode, @CheckForNull FlowNode nodeAfter) { + public ParallelMemoryFlowChunk(@CheckForNull FlowNode nodeBefore, @NonNull FlowNode firstNode, @NonNull FlowNode lastNode, @CheckForNull FlowNode nodeAfter) { super (nodeBefore,firstNode, lastNode, nodeAfter); } @Override - public void setBranch(@Nonnull String branchName, @Nonnull MemoryFlowChunk branchBlock) { + public void setBranch(@NonNull String branchName, @NonNull MemoryFlowChunk branchBlock) { branches.put(branchName, branchBlock); } @Override - @Nonnull + @NonNull public Map getBranches() { return Collections.unmodifiableMap(branches); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/SimpleChunkVisitor.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/SimpleChunkVisitor.java index 0e50dd1f..714b1f0b 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/SimpleChunkVisitor.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/SimpleChunkVisitor.java @@ -27,18 +27,18 @@ import org.jenkinsci.plugins.workflow.graph.BlockStartNode; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; /** * This visitor's callbacks are invoked as we walk through a pipeline flow graph, and it splits it into chunks. - *

    A {@link ForkScanner#visitSimpleChunks(SimpleChunkVisitor, ChunkFinder)} creates these FlowChunks using a {@link ChunkFinder} to define the chunk boundaries. + *

    A {@link ForkScanner#visitSimpleChunks(SimpleChunkVisitor, ChunkFinder)} creates these FlowChunks using a {@link ChunkFinder} to define the chunk boundaries. * *

    We walk through the {@link FlowNode}s in reverse order from end to start, so end callbacks are invoked before * their corresponding start callbacks. * *

    Callback types - *

    There are two kinds of callbacks - chunk callbacks, and parallel structure callbacks + *

    There are two kinds of callbacks - chunk callbacks, and parallel structure callbacks. *

    Chunk Callbacks: *

      *
    • {@link #chunkStart(FlowNode, FlowNode, ForkScanner)} - detected the start of a chunk beginning with a node
    • @@ -46,7 +46,7 @@ *
    • {@link #atomNode(FlowNode, FlowNode, FlowNode, ForkScanner)} - most nodes, which aren't boundaries of chunks
    • *
    * - *

    Chunk callback rules: + *

    Chunk callback rules: *

      *
    1. For a single node, it may have EITHER OR BOTH chunkStart and chunkEnd events
    2. *
    3. Every node that doesn't get a startChunk/endChunk callback gets an atomNode callback.
    4. @@ -55,7 +55,7 @@ *
    5. You cannot have a atomNode callback AND a start/end for the same flownode (application of the above).
    6. *
    * - *

    Parallel Structure Callbacks: Zero, One, or (in niche cases) several different ones may be invoked for any given FlowNode

    + *

    Parallel Structure Callbacks: Zero, One, or (in niche cases) several different ones may be invoked for any given FlowNode. *

    These are used to provide awareness of parallel/branching structures if they need special handling. *

      *
    • {@link #parallelStart(FlowNode, FlowNode, ForkScanner)}
    • @@ -63,12 +63,12 @@ *
    • {@link #parallelBranchStart(FlowNode, FlowNode, ForkScanner)}
    • *
    • {@link #parallelBranchEnd(FlowNode, FlowNode, ForkScanner)}
    • *
    - * The cases where a node triggers multiple callbacks are where it is one of several forked branches of an incomplete parallel + *

    The cases where a node triggers multiple callbacks are where it is one of several forked branches of an incomplete parallel * block. In this case it can be a parallelBranchEnd, also potentially a parallelEnd, plus whatever role that node might normally * have (such as the start of another parallel). * *

    Implementations get to decide how to use and handle chunks, and should be stateful. - *

    At a minimum they should handle:

    + *

    At a minimum they should handle:

    *
      *
    • Cases where there is no enclosing chunk (no start/end found, or outside a chunk)
    • *
    • Cases where there is no chunk end to match the start, because we haven't finished running a block
    • @@ -85,7 +85,7 @@ public interface SimpleChunkVisitor { * @param beforeBlock First node before chunk (null if none exist) * @param scanner Forkscanner used (for state tracking) */ - void chunkStart(@Nonnull FlowNode startNode, @CheckForNull FlowNode beforeBlock, @Nonnull ForkScanner scanner); + void chunkStart(@NonNull FlowNode startNode, @CheckForNull FlowNode beforeBlock, @NonNull ForkScanner scanner); /** * Called when hitting the end of a chunk. @@ -93,7 +93,7 @@ public interface SimpleChunkVisitor { * @param afterChunk Node after chunk (null if we are on the last node) * @param scanner Forkscanner used (for state tracking) */ - void chunkEnd(@Nonnull FlowNode endNode, @CheckForNull FlowNode afterChunk, @Nonnull ForkScanner scanner); + void chunkEnd(@NonNull FlowNode endNode, @CheckForNull FlowNode afterChunk, @NonNull ForkScanner scanner); /** * Notifies that we've hit the start of a parallel block (the point where it branches out). @@ -101,7 +101,7 @@ public interface SimpleChunkVisitor { * @param branchNode {@link org.jenkinsci.plugins.workflow.graph.BlockStartNode} for one of the branches (it will be labelled) * @param scanner ForkScanner used */ - void parallelStart(@Nonnull FlowNode parallelStartNode, @Nonnull FlowNode branchNode, @Nonnull ForkScanner scanner); + void parallelStart(@NonNull FlowNode parallelStartNode, @NonNull FlowNode branchNode, @NonNull ForkScanner scanner); /** * Notifies that we've seen the end of a parallel block @@ -109,7 +109,7 @@ public interface SimpleChunkVisitor { * @param parallelEndNode Last node of parallel ({@link BlockEndNode}) * @param scanner ForkScanner used */ - void parallelEnd(@Nonnull FlowNode parallelStartNode, @Nonnull FlowNode parallelEndNode, @Nonnull ForkScanner scanner); + void parallelEnd(@NonNull FlowNode parallelStartNode, @NonNull FlowNode parallelEndNode, @NonNull ForkScanner scanner); /** * Hit the start of a parallel branch @@ -117,7 +117,7 @@ public interface SimpleChunkVisitor { * @param branchStartNode BlockStartNode beginning the branch (this will have the ThreadNameAction giving its name) * @param scanner ForkScanner used */ - void parallelBranchStart(@Nonnull FlowNode parallelStartNode, @Nonnull FlowNode branchStartNode, @Nonnull ForkScanner scanner); + void parallelBranchStart(@NonNull FlowNode parallelStartNode, @NonNull FlowNode branchStartNode, @NonNull ForkScanner scanner); /** * Hit the end start of a parallel branch @@ -126,7 +126,7 @@ public interface SimpleChunkVisitor { * @param branchEndNode Final node of the branch (may be BlockEndNode if done, otherwise just the last one executed) * @param scanner ForkScanner used */ - void parallelBranchEnd(@Nonnull FlowNode parallelStartNode, @Nonnull FlowNode branchEndNode, @Nonnull ForkScanner scanner); + void parallelBranchEnd(@NonNull FlowNode parallelStartNode, @NonNull FlowNode branchEndNode, @NonNull ForkScanner scanner); /** * Called for a flownode neither start nor end. @@ -137,5 +137,5 @@ public interface SimpleChunkVisitor { * @param after Node after the current * @param scan Reference to our forkscanner, if we want to poke at the state within */ - void atomNode(@CheckForNull FlowNode before, @Nonnull FlowNode atomNode, @CheckForNull FlowNode after, @Nonnull ForkScanner scan); + void atomNode(@CheckForNull FlowNode before, @NonNull FlowNode atomNode, @CheckForNull FlowNode after, @NonNull ForkScanner scan); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/StandardChunkVisitor.java b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/StandardChunkVisitor.java index 2e9495bf..e1ab1ea3 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/StandardChunkVisitor.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/graphanalysis/StandardChunkVisitor.java @@ -2,8 +2,8 @@ import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; /** * Simple handler for linear {@link FlowChunk}s (basic stages, etc), and designed to be extended. @@ -21,11 +21,11 @@ public class StandardChunkVisitor implements SimpleChunkVisitor { /** Override me to do something once the chunk is finished (such as add it to a list). * Note: the chunk will be mutated directly, so you need to copy it if you want to do something. */ - protected void handleChunkDone(@Nonnull MemoryFlowChunk chunk) { + protected void handleChunkDone(@NonNull MemoryFlowChunk chunk) { // NO-OP initially } - protected void resetChunk(@Nonnull MemoryFlowChunk chunk) { + protected void resetChunk(@NonNull MemoryFlowChunk chunk) { chunk.setFirstNode(null); chunk.setLastNode(null); chunk.setNodeBefore(null); @@ -34,7 +34,7 @@ protected void resetChunk(@Nonnull MemoryFlowChunk chunk) { } @Override - public void chunkStart(@Nonnull FlowNode startNode, @CheckForNull FlowNode beforeBlock, @Nonnull ForkScanner scanner) { + public void chunkStart(@NonNull FlowNode startNode, @CheckForNull FlowNode beforeBlock, @NonNull ForkScanner scanner) { chunk.setNodeBefore(beforeBlock); chunk.setFirstNode(startNode); handleChunkDone(chunk); @@ -42,24 +42,24 @@ public void chunkStart(@Nonnull FlowNode startNode, @CheckForNull FlowNode befor } @Override - public void chunkEnd(@Nonnull FlowNode endNode, @CheckForNull FlowNode afterChunk, @Nonnull ForkScanner scanner) { + public void chunkEnd(@NonNull FlowNode endNode, @CheckForNull FlowNode afterChunk, @NonNull ForkScanner scanner) { chunk.setLastNode(endNode); chunk.setNodeAfter(afterChunk); } @Override - public void parallelStart(@Nonnull FlowNode parallelStartNode, @Nonnull FlowNode branchNode, @Nonnull ForkScanner scanner) {} + public void parallelStart(@NonNull FlowNode parallelStartNode, @NonNull FlowNode branchNode, @NonNull ForkScanner scanner) {} @Override - public void parallelEnd(@Nonnull FlowNode parallelStartNode, @Nonnull FlowNode parallelEndNode, @Nonnull ForkScanner scanner) {} + public void parallelEnd(@NonNull FlowNode parallelStartNode, @NonNull FlowNode parallelEndNode, @NonNull ForkScanner scanner) {} @Override - public void parallelBranchStart(@Nonnull FlowNode parallelStartNode, @Nonnull FlowNode branchStartNode, @Nonnull ForkScanner scanner) {} + public void parallelBranchStart(@NonNull FlowNode parallelStartNode, @NonNull FlowNode branchStartNode, @NonNull ForkScanner scanner) {} @Override - public void parallelBranchEnd(@Nonnull FlowNode parallelStartNode, @Nonnull FlowNode branchEndNode, @Nonnull ForkScanner scanner) {} + public void parallelBranchEnd(@NonNull FlowNode parallelStartNode, @NonNull FlowNode branchEndNode, @NonNull ForkScanner scanner) {} /** Extend me to do something with nodes inside a chunk */ @Override - public void atomNode(@CheckForNull FlowNode before, @Nonnull FlowNode atomNode, @CheckForNull FlowNode after, @Nonnull ForkScanner scan) {} + public void atomNode(@CheckForNull FlowNode before, @NonNull FlowNode atomNode, @CheckForNull FlowNode after, @NonNull ForkScanner scan) {} } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/log/BrokenLogStorage.java b/src/main/java/org/jenkinsci/plugins/workflow/log/BrokenLogStorage.java index 3b600903..d909dda1 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/log/BrokenLogStorage.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/log/BrokenLogStorage.java @@ -24,6 +24,7 @@ package org.jenkinsci.plugins.workflow.log; +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.Functions; import hudson.console.AnnotatedLargeText; import hudson.model.BuildListener; @@ -47,20 +48,24 @@ public final class BrokenLogStorage implements LogStorage { public BrokenLogStorage(Throwable x) { this.x = x; } - - @Override public BuildListener overallListener() throws IOException, InterruptedException { + + @NonNull + @Override public BuildListener overallListener() throws IOException { throw new IOException(x); } - - @Override public TaskListener nodeListener(FlowNode node) throws IOException, InterruptedException { + + @NonNull + @Override public TaskListener nodeListener(@NonNull FlowNode node) throws IOException { throw new IOException(x); } - - @Override public AnnotatedLargeText overallLog(FlowExecutionOwner.Executable build, boolean complete) { + + @NonNull + @Override public AnnotatedLargeText overallLog(@NonNull FlowExecutionOwner.Executable build, boolean complete) { return new BrokenAnnotatedLargeText<>(); } - - @Override public AnnotatedLargeText stepLog(FlowNode node, boolean complete) { + + @NonNull + @Override public AnnotatedLargeText stepLog(@NonNull FlowNode node, boolean complete) { return new BrokenAnnotatedLargeText<>(); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/log/BufferedBuildListener.java b/src/main/java/org/jenkinsci/plugins/workflow/log/BufferedBuildListener.java index 5fc2f817..85316ff8 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/log/BufferedBuildListener.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/log/BufferedBuildListener.java @@ -24,6 +24,7 @@ package org.jenkinsci.plugins.workflow.log; +import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import hudson.CloseProofOutputStream; import hudson.model.BuildListener; @@ -49,7 +50,8 @@ final class BufferedBuildListener implements BuildListener, Closeable, Serializa this.out = out; ps = new PrintStream(out, false, "UTF-8"); } - + + @NonNull @Override public PrintStream getLogger() { return ps; } @@ -67,7 +69,7 @@ private static final class Replacement implements SerializableOnlyOverRemoting { private static final long serialVersionUID = 1; private final RemoteOutputStream ros; - private final DelayBufferedOutputStream.Tuning tuning = DelayBufferedOutputStream.Tuning.DEFAULT; // load defaults on master + private final DelayBufferedOutputStream.Tuning tuning = DelayBufferedOutputStream.Tuning.DEFAULT; // load defaults on controller Replacement(BufferedBuildListener cbl) { this.ros = new RemoteOutputStream(new CloseProofOutputStream(cbl.out)); diff --git a/src/main/java/org/jenkinsci/plugins/workflow/log/ConsoleAnnotators.java b/src/main/java/org/jenkinsci/plugins/workflow/log/ConsoleAnnotators.java index 5d5869c3..f25d09f0 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/log/ConsoleAnnotators.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/log/ConsoleAnnotators.java @@ -76,7 +76,7 @@ public static ConsoleAnnotator createAnnotator(T context) throws IOExcept ClassFilter.DEFAULT)) { long timestamp = ois.readLong(); if (TimeUnit.HOURS.toMillis(1) > abs(System.currentTimeMillis() - timestamp)) { - @SuppressWarnings("unchecked") ConsoleAnnotator annotator = (ConsoleAnnotator) ois.readObject(); + @SuppressWarnings("unchecked") ConsoleAnnotator annotator = (ConsoleAnnotator) ois.readObject(); return annotator; } } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/log/DelayBufferedOutputStream.java b/src/main/java/org/jenkinsci/plugins/workflow/log/DelayBufferedOutputStream.java index 541e8782..ff366e78 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/log/DelayBufferedOutputStream.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/log/DelayBufferedOutputStream.java @@ -33,6 +33,8 @@ import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; + +import edu.umd.cs.findbugs.annotations.NonNull; import jenkins.util.Timer; import org.jenkinsci.remoting.SerializableOnlyOverRemoting; @@ -121,7 +123,7 @@ private static final class FlushControlledOutputStream extends FilterOutputStrea super(out); } - @Override public void write(byte[] b, int off, int len) throws IOException { + @Override public void write(@NonNull byte[] b, int off, int len) throws IOException { out.write(b, off, len); // super method writes one byte at a time! } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/log/FileLogStorage.java b/src/main/java/org/jenkinsci/plugins/workflow/log/FileLogStorage.java index f2789a71..afebd958 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/log/FileLogStorage.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/log/FileLogStorage.java @@ -24,6 +24,7 @@ package org.jenkinsci.plugins.workflow.log; +import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import hudson.console.AnnotatedLargeText; import hudson.console.ConsoleAnnotationOutputStream; @@ -111,11 +112,13 @@ private synchronized void open() throws IOException { } } - @Override public BuildListener overallListener() throws IOException, InterruptedException { + @NonNull + @Override public BuildListener overallListener() throws IOException { return new BufferedBuildListener(new IndexOutputStream(null)); } - @Override public TaskListener nodeListener(FlowNode node) throws IOException, InterruptedException { + @NonNull + @Override public TaskListener nodeListener(@NonNull FlowNode node) throws IOException { return new BufferedBuildListener(new IndexOutputStream(node.getId())); } @@ -153,14 +156,14 @@ private final class IndexOutputStream extends OutputStream { } } - @Override public void write(byte[] b) throws IOException { + @Override public void write(@NonNull byte[] b) throws IOException { synchronized (FileLogStorage.this) { checkId(id); bos.write(b); } } - @Override public void write(byte[] b, int off, int len) throws IOException { + @Override public void write(@NonNull byte[] b, int off, int len) throws IOException { synchronized (FileLogStorage.this) { checkId(id); bos.write(b, off, len); @@ -194,7 +197,8 @@ private void maybeFlush() { } } - @Override public AnnotatedLargeText overallLog(FlowExecutionOwner.Executable build, boolean complete) { + @NonNull + @Override public AnnotatedLargeText overallLog(@NonNull FlowExecutionOwner.Executable build, boolean complete) { maybeFlush(); return new AnnotatedLargeText(log, StandardCharsets.UTF_8, complete, build) { @Override public long writeHtmlTo(long start, Writer w) throws IOException { @@ -252,7 +256,8 @@ private void maybeFlush() { }; } - @Override public AnnotatedLargeText stepLog(FlowNode node, boolean complete) { + @NonNull + @Override public AnnotatedLargeText stepLog(@NonNull FlowNode node, boolean complete) { maybeFlush(); String id = node.getId(); try (ByteBuffer buf = new ByteBuffer(); @@ -323,7 +328,8 @@ private void maybeFlush() { } @Deprecated - @Override public File getLogFile(FlowExecutionOwner.Executable build, boolean complete) { + @NonNull + @Override public File getLogFile(@NonNull FlowExecutionOwner.Executable build, boolean complete) { return log; } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/log/GCFlushedOutputStream.java b/src/main/java/org/jenkinsci/plugins/workflow/log/GCFlushedOutputStream.java index c794c60b..7e47b7ab 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/log/GCFlushedOutputStream.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/log/GCFlushedOutputStream.java @@ -33,6 +33,8 @@ import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; + +import edu.umd.cs.findbugs.annotations.NonNull; import jenkins.util.Timer; /** @@ -48,7 +50,7 @@ final class GCFlushedOutputStream extends FilterOutputStream { FlushRef.register(this, out); } - @Override public void write(byte[] b, int off, int len) throws IOException { + @Override public void write(@NonNull byte[] b, int off, int len) throws IOException { out.write(b, off, len); // super method is surprising } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/log/LogStorage.java b/src/main/java/org/jenkinsci/plugins/workflow/log/LogStorage.java index 87e5ed50..29b7ca07 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/log/LogStorage.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/log/LogStorage.java @@ -37,7 +37,7 @@ import java.io.OutputStream; import java.util.logging.Level; import java.util.logging.Logger; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.NonNull; import org.jenkinsci.plugins.workflow.actions.LogAction; import org.jenkinsci.plugins.workflow.flow.FlowExecutionOwner; import org.jenkinsci.plugins.workflow.graph.FlowNode; @@ -62,7 +62,7 @@ public interface LogStorage { * @return a (remotable) build listener; do not bother overriding anything except {@link TaskListener#getLogger} * @see FlowExecutionOwner#getListener */ - @Nonnull BuildListener overallListener() throws IOException, InterruptedException; + @NonNull BuildListener overallListener() throws IOException, InterruptedException; /** * Provides an alternate way of emitting output from a node (such as a step). @@ -73,7 +73,7 @@ public interface LogStorage { * @return a (remotable) task listener; do not bother overriding anything except {@link TaskListener#getLogger} * @see StepContext#get */ - @Nonnull TaskListener nodeListener(@Nonnull FlowNode node) throws IOException, InterruptedException; + @NonNull TaskListener nodeListener(@NonNull FlowNode node) throws IOException, InterruptedException; /** * Provides an alternate way of retrieving output from a build. @@ -84,14 +84,14 @@ public interface LogStorage { * so implementations should be sure to retrieve final log lines * @return a log */ - @Nonnull AnnotatedLargeText overallLog(@Nonnull FlowExecutionOwner.Executable build, boolean complete); + @NonNull AnnotatedLargeText overallLog(@NonNull FlowExecutionOwner.Executable build, boolean complete); /** * Introduces an HTML block with a {@code pipeline-node-} CSS class based on {@link FlowNode#getId}. * @see #endStep * @see #overallLog */ - static @Nonnull String startStep(@Nonnull String id) { + static @NonNull String startStep(@NonNull String id) { return ""; } @@ -100,7 +100,7 @@ public interface LogStorage { * @see #startStep * @see #overallLog */ - static @Nonnull String endStep() { + static @NonNull String endStep() { return ""; } @@ -112,7 +112,7 @@ public interface LogStorage { * @return a log for this just this node * @see LogAction */ - @Nonnull AnnotatedLargeText stepLog(@Nonnull FlowNode node, boolean complete); + @NonNull AnnotatedLargeText stepLog(@NonNull FlowNode node, boolean complete); /** * Provide a file containing the log text. @@ -124,7 +124,7 @@ public interface LogStorage { */ @SuppressFBWarnings(value = "REC_CATCH_EXCEPTION", justification = "silly rule") @Deprecated - default @Nonnull File getLogFile(@Nonnull FlowExecutionOwner.Executable build, boolean complete) { + default @NonNull File getLogFile(@NonNull FlowExecutionOwner.Executable build, boolean complete) { try { AnnotatedLargeText logText = overallLog(build, complete); FlowExecutionOwner owner = build.asFlowExecutionOwner(); @@ -145,7 +145,7 @@ public interface LogStorage { } catch (Exception x) { Logger.getLogger(LogStorage.class.getName()).log(Level.WARNING, null, x); if (build instanceof Run) { - return new File(((Run) build).getRootDir(), "log"); + return new File(((Run) build).getRootDir(), "log"); } else { return new File("broken.log"); // not much we can do } @@ -158,7 +158,7 @@ public interface LogStorage { * @return the mechanism for handling this build, including any necessary fallback * @see LogStorageFactory */ - static @Nonnull LogStorage of(@Nonnull FlowExecutionOwner b) { + static @NonNull LogStorage of(@NonNull FlowExecutionOwner b) { try { for (LogStorageFactory factory : ExtensionList.lookup(LogStorageFactory.class)) { LogStorage storage = factory.forBuild(b); diff --git a/src/main/java/org/jenkinsci/plugins/workflow/log/LogStorageFactory.java b/src/main/java/org/jenkinsci/plugins/workflow/log/LogStorageFactory.java index f5382836..4b3f4c6d 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/log/LogStorageFactory.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/log/LogStorageFactory.java @@ -25,8 +25,8 @@ package org.jenkinsci.plugins.workflow.log; import hudson.ExtensionPoint; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import org.jenkinsci.plugins.workflow.flow.FlowExecutionOwner; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.Beta; @@ -42,6 +42,6 @@ public interface LogStorageFactory extends ExtensionPoint { * @param b a build about to start * @return a mechanism for handling this build, or null to fall back to the next implementation or the default */ - @CheckForNull LogStorage forBuild(@Nonnull FlowExecutionOwner b); + @CheckForNull LogStorage forBuild(@NonNull FlowExecutionOwner b); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/log/TaskListenerDecorator.java b/src/main/java/org/jenkinsci/plugins/workflow/log/TaskListenerDecorator.java index c90cd53b..1f167b00 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/log/TaskListenerDecorator.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/log/TaskListenerDecorator.java @@ -46,14 +46,16 @@ import java.util.logging.Logger; import java.util.stream.Collectors; import java.util.stream.Stream; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; +import edu.umd.cs.findbugs.annotations.Nullable; import jenkins.util.BuildListenerAdapter; import jenkins.util.JenkinsJVM; import org.jenkinsci.plugins.workflow.flow.FlowExecutionOwner; import org.jenkinsci.plugins.workflow.steps.BodyInvoker; import org.jenkinsci.plugins.workflow.steps.StepContext; +import org.kohsuke.accmod.Restricted; +import org.kohsuke.accmod.restrictions.Beta; /** * A way of decorating output from a {@link TaskListener}. @@ -62,7 +64,7 @@ * using {@link #merge} to pick up any earlier decorator in {@link StepContext#get}. *

      Expected to be serializable either locally or over Remoting, * so an implementation of {@link #decorate} cannot assume that {@link JenkinsJVM#isJenkinsJVM}. - * Any master-side configuration should thus be saved into instance fields when the decorator is constructed. + * Any controller-side configuration should thus be saved into instance fields when the decorator is constructed. * @see JENKINS-45693 */ public abstract class TaskListenerDecorator implements /* TODO Remotable */ Serializable { @@ -77,7 +79,7 @@ public abstract class TaskListenerDecorator implements /* TODO Remotable */ Seri * @param logger a base logger * @return a possibly patched result */ - public abstract @Nonnull OutputStream decorate(@Nonnull OutputStream logger) throws IOException, InterruptedException; + public abstract @NonNull OutputStream decorate(@NonNull OutputStream logger) throws IOException, InterruptedException; /** * Merges two decorators. @@ -129,8 +131,17 @@ public interface Factory extends ExtensionPoint { * @param owner a build * @return a decorator, optionally */ - @CheckForNull TaskListenerDecorator of(@Nonnull FlowExecutionOwner owner); + @CheckForNull TaskListenerDecorator of(@NonNull FlowExecutionOwner owner); + /** + * + * @return boolean, false means to apply step decorators first, then TaskListenerDecorator, true means otherwise + * @see #apply(TaskListener, FlowExecutionOwner, TaskListenerDecorator) + */ + @Restricted(Beta.class) + default boolean isAppliedBeforeMainDecorator(){ + return false; + } } /** @@ -143,13 +154,15 @@ public interface Factory extends ExtensionPoint { * @param mainDecorator an additional contextual decorator to apply, if any * @return a possibly wrapped {@code listener} */ - public static BuildListener apply(@Nonnull TaskListener listener, @Nonnull FlowExecutionOwner owner, @CheckForNull TaskListenerDecorator mainDecorator) { + public static BuildListener apply(@NonNull TaskListener listener, @NonNull FlowExecutionOwner owner, @CheckForNull TaskListenerDecorator mainDecorator) { JenkinsJVM.checkJenkinsJVM(); + List decoratorFactories = ExtensionList.lookup(TaskListenerDecorator.Factory.class); List decorators = Stream.concat( - ExtensionList.lookup(TaskListenerDecorator.Factory.class).stream().map(f -> f.of(owner)), - Stream.of(mainDecorator)). - filter(Objects::nonNull). - collect(Collectors.toCollection(ArrayList::new)); + decoratorFactories.stream().filter(f -> !f.isAppliedBeforeMainDecorator()).map(f -> f.of(owner)), + Stream.concat(Stream.of(mainDecorator), + decoratorFactories.stream().filter(f -> f.isAppliedBeforeMainDecorator()).map(f -> f.of(owner)))). + filter(Objects::nonNull). + collect(Collectors.toCollection(ArrayList::new)); if (decorators.isEmpty()) { return CloseableTaskListener.of(BuildListenerAdapter.wrap(listener), listener); } else { @@ -162,15 +175,16 @@ private static class MergedTaskListenerDecorator extends TaskListenerDecorator { private static final long serialVersionUID = 1; - private final @Nonnull TaskListenerDecorator original; - private final @Nonnull TaskListenerDecorator subsequent; + private final @NonNull TaskListenerDecorator original; + private final @NonNull TaskListenerDecorator subsequent; - MergedTaskListenerDecorator(TaskListenerDecorator original, TaskListenerDecorator subsequent) { + MergedTaskListenerDecorator(@NonNull TaskListenerDecorator original, @NonNull TaskListenerDecorator subsequent) { this.original = original; this.subsequent = subsequent; } - - @Override public OutputStream decorate(OutputStream logger) throws IOException, InterruptedException { + + @NonNull + @Override public OutputStream decorate(@NonNull OutputStream logger) throws IOException, InterruptedException { // TODO BodyInvoker.MergedFilter probably has these backwards return original.decorate(subsequent.decorate(logger)); } @@ -186,16 +200,17 @@ private static class ConsoleLogFilterAdapter extends TaskListenerDecorator { private static final long serialVersionUID = 1; @SuppressFBWarnings(value = "SE_BAD_FIELD", justification = "Explicitly checking for serializability.") - private final @Nonnull ConsoleLogFilter filter; + private final @NonNull ConsoleLogFilter filter; - ConsoleLogFilterAdapter(ConsoleLogFilter filter) { + ConsoleLogFilterAdapter(@NonNull ConsoleLogFilter filter) { assert filter instanceof Serializable; this.filter = filter; } + @NonNull @SuppressWarnings("deprecation") // the compatibility code in ConsoleLogFilter fails to delegate to the old overload when given a null argument - @Override public OutputStream decorate(OutputStream logger) throws IOException, InterruptedException { - return filter.decorateLogger((AbstractBuild) null, logger); + @Override public OutputStream decorate(@NonNull OutputStream logger) throws IOException, InterruptedException { + return filter.decorateLogger((AbstractBuild) null, logger); } @Override public String toString() { @@ -212,23 +227,24 @@ private static final class DecoratedTaskListener implements BuildListener { * The listener we are delegating to, which was expected to be remotable. * Note that we ignore all of its methods other than {@link TaskListener#getLogger}. */ - private final @Nonnull TaskListener delegate; + private final @NonNull TaskListener delegate; /** * A (nonempty) list of decorators we delegate to. * They are applied in reverse order, so the first one has the final say in what gets printed. */ - private final @Nonnull List decorators; + private final @NonNull List decorators; private transient PrintStream logger; - DecoratedTaskListener(TaskListener delegate, List decorators) { + DecoratedTaskListener(@NonNull TaskListener delegate, @NonNull List decorators) { this.delegate = delegate; assert !decorators.isEmpty(); assert !decorators.contains(null); this.decorators = decorators; } + @NonNull @Override public PrintStream getLogger() { if (logger == null) { OutputStream base = delegate.getLogger(); @@ -266,15 +282,16 @@ static BuildListener of(BuildListener mainDelegate, TaskListener closeDelegate) private static final long serialVersionUID = 1; - private final @Nonnull TaskListener mainDelegate; - private final @Nonnull TaskListener closeDelegate; + private final @NonNull TaskListener mainDelegate; + private final @NonNull TaskListener closeDelegate; - private CloseableTaskListener(TaskListener mainDelegate, TaskListener closeDelegate) { + private CloseableTaskListener(@NonNull TaskListener mainDelegate, @NonNull TaskListener closeDelegate) { this.mainDelegate = mainDelegate; this.closeDelegate = closeDelegate; assert closeDelegate instanceof AutoCloseable; } + @NonNull @Override public PrintStream getLogger() { return mainDelegate.getLogger(); } diff --git a/src/main/java/org/jenkinsci/plugins/workflow/pickles/PickleFactory.java b/src/main/java/org/jenkinsci/plugins/workflow/pickles/PickleFactory.java index fd993c70..f64d6a36 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/pickles/PickleFactory.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/pickles/PickleFactory.java @@ -27,15 +27,15 @@ import hudson.ExtensionList; import hudson.ExtensionPoint; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; /** * Provides a way of converting transient objects into {@link Pickle}s. */ public abstract class PickleFactory implements ExtensionPoint { - public abstract @CheckForNull Pickle writeReplace(@Nonnull Object object); + public abstract @CheckForNull Pickle writeReplace(@NonNull Object object); public static ExtensionList all() { return ExtensionList.lookup(PickleFactory.class); diff --git a/src/main/java/org/jenkinsci/plugins/workflow/visualization/table/FlowNodeViewColumn.java b/src/main/java/org/jenkinsci/plugins/workflow/visualization/table/FlowNodeViewColumn.java index bdb794e4..bffc6e9f 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/visualization/table/FlowNodeViewColumn.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/visualization/table/FlowNodeViewColumn.java @@ -10,13 +10,14 @@ * Extension point for adding a column to a table rendering of {@link FlowNode}s. * *

      - * This object must have the column.groovy. This view + * This object must have the column.groovy. This view * is called for each cell of this column. The {@link FlowNode} object * is passed in the "node" variable. The view should render * a {@code } tag. + *

      * *

      - * This object may have an additional columnHeader.groovy. The default column header + * This object may have an additional columnHeader.groovy. The default column header * will render {@link #getColumnCaption()}. * * @author Kohsuke Kawaguchi diff --git a/src/main/java/org/jenkinsci/plugins/workflow/visualization/table/FlowNodeViewColumnDescriptor.java b/src/main/java/org/jenkinsci/plugins/workflow/visualization/table/FlowNodeViewColumnDescriptor.java index 814f4acb..1675680f 100644 --- a/src/main/java/org/jenkinsci/plugins/workflow/visualization/table/FlowNodeViewColumnDescriptor.java +++ b/src/main/java/org/jenkinsci/plugins/workflow/visualization/table/FlowNodeViewColumnDescriptor.java @@ -5,7 +5,7 @@ import hudson.model.Descriptor; import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.CheckForNull; +import edu.umd.cs.findbugs.annotations.CheckForNull; import java.util.ArrayList; import java.util.List; diff --git a/src/main/resources/lib/flow/nodeCaption.jelly b/src/main/resources/lib/flow/nodeCaption.jelly index dbf40752..277030ae 100644 --- a/src/main/resources/lib/flow/nodeCaption.jelly +++ b/src/main/resources/lib/flow/nodeCaption.jelly @@ -23,7 +23,7 @@ THE SOFTWARE. --> - + Generate a caption for the main panel that renders the status of the flow node. @@ -31,8 +31,10 @@ THE SOFTWARE.

      - ${node.iconColor.description} - + + +

      diff --git a/src/main/resources/org/jenkinsci/plugins/workflow/flow/GlobalDefaultFlowDurabilityLevel/global.jelly b/src/main/resources/org/jenkinsci/plugins/workflow/flow/GlobalDefaultFlowDurabilityLevel/global.jelly index 2bc1d122..49be137a 100644 --- a/src/main/resources/org/jenkinsci/plugins/workflow/flow/GlobalDefaultFlowDurabilityLevel/global.jelly +++ b/src/main/resources/org/jenkinsci/plugins/workflow/flow/GlobalDefaultFlowDurabilityLevel/global.jelly @@ -1,17 +1,8 @@ - - - + + + -

      \ No newline at end of file diff --git a/src/main/resources/org/jenkinsci/plugins/workflow/graph/FlowNode/sidepanel.jelly b/src/main/resources/org/jenkinsci/plugins/workflow/graph/FlowNode/sidepanel.jelly index d3b5d626..2d5fd1ec 100644 --- a/src/main/resources/org/jenkinsci/plugins/workflow/graph/FlowNode/sidepanel.jelly +++ b/src/main/resources/org/jenkinsci/plugins/workflow/graph/FlowNode/sidepanel.jelly @@ -26,8 +26,8 @@ - - + + diff --git a/src/test/java/org/jenkinsci/plugins/workflow/ArtifactManagerTest.java b/src/test/java/org/jenkinsci/plugins/workflow/ArtifactManagerTest.java index ffd4d1a9..4ca402f1 100644 --- a/src/test/java/org/jenkinsci/plugins/workflow/ArtifactManagerTest.java +++ b/src/test/java/org/jenkinsci/plugins/workflow/ArtifactManagerTest.java @@ -24,6 +24,17 @@ package org.jenkinsci.plugins.workflow; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.arrayContainingInAnyOrder; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.endsWith; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + import com.cloudbees.plugins.credentials.CredentialsProvider; import com.cloudbees.plugins.credentials.CredentialsScope; import com.cloudbees.plugins.credentials.common.StandardUsernameCredentials; @@ -47,11 +58,13 @@ import java.io.IOException; import java.io.InputStream; import java.net.URL; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collections; import java.util.logging.Level; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import jenkins.model.ArtifactManager; import jenkins.model.ArtifactManagerConfiguration; import jenkins.model.ArtifactManagerFactory; @@ -60,12 +73,10 @@ import jenkins.security.MasterToSlaveCallable; import jenkins.util.VirtualFile; import org.apache.commons.io.IOUtils; -import static org.hamcrest.Matchers.*; import org.jenkinsci.plugins.workflow.flow.StashManager; import org.jenkinsci.test.acceptance.docker.Docker; import org.jenkinsci.test.acceptance.docker.DockerImage; import org.jenkinsci.test.acceptance.docker.fixtures.JavaContainer; -import static org.junit.Assert.*; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; @@ -103,7 +114,7 @@ public class ArtifactManagerTest { /** * Creates an agent, in a Docker container when possible, calls {@link #setUpWorkspace}, then runs some tests. */ - private static void wrapInContainer(@Nonnull JenkinsRule r, @CheckForNull ArtifactManagerFactory factory, + private static void wrapInContainer(@NonNull JenkinsRule r, @CheckForNull ArtifactManagerFactory factory, boolean weirdCharacters, TestFunction f) throws Exception { if (factory != null) { ArtifactManagerConfiguration.get().getArtifactManagerFactories().add(factory); @@ -142,7 +153,7 @@ private static void wrapInContainer(@Nonnull JenkinsRule r, @CheckForNull Artifa * @param weirdCharacters as in {@link #artifactArchiveAndDelete} * @param image as in {@link #artifactArchiveAndDelete} */ - public static void artifactArchive(@Nonnull JenkinsRule r, @CheckForNull ArtifactManagerFactory factory, boolean weirdCharacters, @CheckForNull DockerImage image) throws Exception { + public static void artifactArchive(@NonNull JenkinsRule r, @CheckForNull ArtifactManagerFactory factory, boolean weirdCharacters, @CheckForNull DockerImage image) throws Exception { wrapInContainer(r, factory, weirdCharacters, (agent, p, b, ws) -> { VirtualFile root = b.getArtifactManager().root(); new Verify(agent, root, weirdCharacters).run(); @@ -157,7 +168,7 @@ public static void artifactArchive(@Nonnull JenkinsRule r, @CheckForNull Artifac * @param weirdCharacters check behavior of files with Unicode and various unusual characters in the name * @param image use {@link #prepareImage} in a {@link BeforeClass} block */ - public static void artifactArchiveAndDelete(@Nonnull JenkinsRule r, @CheckForNull ArtifactManagerFactory factory, boolean weirdCharacters, @CheckForNull DockerImage image) throws Exception { + public static void artifactArchiveAndDelete(@NonNull JenkinsRule r, @CheckForNull ArtifactManagerFactory factory, boolean weirdCharacters, @CheckForNull DockerImage image) throws Exception { wrapInContainer(r, factory, weirdCharacters, (agent, p, b, ws) -> { VirtualFile root = b.getArtifactManager().root(); new Verify(agent, root, weirdCharacters).run(); @@ -173,7 +184,7 @@ public static void artifactArchiveAndDelete(@Nonnull JenkinsRule r, @CheckForNul * @param weirdCharacters as in {@link #artifactArchiveAndDelete} * @param image as in {@link #artifactArchiveAndDelete} */ - public static void artifactStash(@Nonnull JenkinsRule r, @CheckForNull ArtifactManagerFactory factory, boolean weirdCharacters, @CheckForNull DockerImage image) throws Exception { + public static void artifactStash(@NonNull JenkinsRule r, @CheckForNull ArtifactManagerFactory factory, boolean weirdCharacters, @CheckForNull DockerImage image) throws Exception { wrapInContainer(r, factory, weirdCharacters, new StashFunction(r, weirdCharacters, (p, b, ws, launcher, env, listener) -> { // should not have deleted @@ -187,7 +198,7 @@ public static void artifactStash(@Nonnull JenkinsRule r, @CheckForNull ArtifactM * @param weirdCharacters as in {@link #artifactArchiveAndDelete} * @param image as in {@link #artifactArchiveAndDelete} */ - public static void artifactStashAndDelete(@Nonnull JenkinsRule r, @CheckForNull ArtifactManagerFactory factory, boolean weirdCharacters, @CheckForNull DockerImage image) throws Exception { + public static void artifactStashAndDelete(@NonNull JenkinsRule r, @CheckForNull ArtifactManagerFactory factory, boolean weirdCharacters, @CheckForNull DockerImage image) throws Exception { wrapInContainer(r, factory, weirdCharacters, new StashFunction(r, weirdCharacters, (p, b, ws, launcher, env, listener) -> { try { @@ -248,7 +259,7 @@ private static class StashFunction implements TestFunction { private final boolean weirdCharacters; private final TestStashFunction f; - StashFunction(@Nonnull JenkinsRule r, boolean weirdCharacters, TestStashFunction f) { + StashFunction(@NonNull JenkinsRule r, boolean weirdCharacters, TestStashFunction f) { this.r = r; this.weirdCharacters = weirdCharacters; this.f = f; @@ -392,7 +403,7 @@ private void assertFile(VirtualFile f, String contents) throws Exception { assertEquals(contents.length(), f.length()); assertThat(f.lastModified(), not(is(0))); try (InputStream is = f.open()) { - assertEquals(contents, IOUtils.toString(is)); + assertEquals(contents, IOUtils.toString(is, StandardCharsets.UTF_8)); } URL url = f.toExternalURL(); if (url != null) { @@ -407,7 +418,7 @@ private static final class RemoteOpenURL extends MasterToSlaveCallable new NoOpenVF(vf, baseURL)).toArray(VirtualFile[]::new); } - @Override public Collection list(String includes, String excludes, boolean useDefaultExcludes) throws IOException { + @NonNull + @Override public Collection list(@NonNull String includes, String excludes, boolean useDefaultExcludes) throws IOException { return delegate.list(includes, excludes, useDefaultExcludes); } - @Override public VirtualFile child(String string) { + @NonNull + @Override public VirtualFile child(@NonNull String string) { return new NoOpenVF(delegate.child(string), baseURL); } diff --git a/src/test/java/org/jenkinsci/plugins/workflow/actions/ErrorActionTest.java b/src/test/java/org/jenkinsci/plugins/workflow/actions/ErrorActionTest.java index 1f53a0e2..2835f095 100644 --- a/src/test/java/org/jenkinsci/plugins/workflow/actions/ErrorActionTest.java +++ b/src/test/java/org/jenkinsci/plugins/workflow/actions/ErrorActionTest.java @@ -24,8 +24,8 @@ package org.jenkinsci.plugins.workflow.actions; +import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThat; import java.util.ArrayList; import java.util.List; diff --git a/src/test/java/org/jenkinsci/plugins/workflow/flow/DurabilityBasicsTest.java b/src/test/java/org/jenkinsci/plugins/workflow/flow/DurabilityBasicsTest.java index 89571c0e..97ce4bdb 100644 --- a/src/test/java/org/jenkinsci/plugins/workflow/flow/DurabilityBasicsTest.java +++ b/src/test/java/org/jenkinsci/plugins/workflow/flow/DurabilityBasicsTest.java @@ -1,25 +1,22 @@ package org.jenkinsci.plugins.workflow.flow; -import hudson.ExtensionList; import hudson.Functions; import hudson.model.Descriptor; import hudson.model.User; import hudson.security.ACL; import hudson.security.ACLContext; -import hudson.security.Permission; -import hudson.util.ReflectionUtils; import jenkins.model.Jenkins; import org.junit.Assert; -import org.junit.Assume; import org.junit.Rule; import org.junit.Test; import org.jvnet.hudson.test.MockAuthorizationStrategy; -import org.jvnet.hudson.test.RestartableJenkinsRule; +import org.jvnet.hudson.test.JenkinsSessionRule; -import java.lang.reflect.InvocationTargetException; import java.util.Collection; import java.util.Optional; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.empty; import static org.junit.Assert.assertTrue; /** @@ -28,64 +25,57 @@ public class DurabilityBasicsTest { @Rule - public RestartableJenkinsRule r = new RestartableJenkinsRule(); + public JenkinsSessionRule sessions = new JenkinsSessionRule(); @Test - public void configRoundTrip() { - r.then(r -> { - GlobalDefaultFlowDurabilityLevel.DescriptorImpl level = r.jenkins.getExtensionList(GlobalDefaultFlowDurabilityLevel.DescriptorImpl.class).get(0); + public void configRoundTrip() throws Throwable { + sessions.then(j -> { + GlobalDefaultFlowDurabilityLevel.DescriptorImpl level = j.jenkins.getExtensionList(GlobalDefaultFlowDurabilityLevel.DescriptorImpl.class).get(0); level.setDurabilityHint(FlowDurabilityHint.PERFORMANCE_OPTIMIZED); - r.configRoundtrip(); + j.configRoundtrip(); Assert.assertEquals(FlowDurabilityHint.PERFORMANCE_OPTIMIZED, level.getDurabilityHint()); level.setDurabilityHint(null); - r.configRoundtrip(); + j.configRoundtrip(); Assert.assertNull(level.getDurabilityHint()); // Customize again so we can check for persistence level.setDurabilityHint(FlowDurabilityHint.PERFORMANCE_OPTIMIZED); Assert.assertEquals(FlowDurabilityHint.PERFORMANCE_OPTIMIZED, level.getDurabilityHint()); }); - r.then(r -> { - GlobalDefaultFlowDurabilityLevel.DescriptorImpl level = r.jenkins.getExtensionList(GlobalDefaultFlowDurabilityLevel.DescriptorImpl.class).get(0); + sessions.then(j -> { + GlobalDefaultFlowDurabilityLevel.DescriptorImpl level = j.jenkins.getExtensionList(GlobalDefaultFlowDurabilityLevel.DescriptorImpl.class).get(0); Assert.assertEquals(FlowDurabilityHint.PERFORMANCE_OPTIMIZED, level.getDurabilityHint()); }); } @Test - public void defaultHandling() { - r.then(r -> { + public void defaultHandling() throws Throwable { + sessions.then(j -> { Assert.assertEquals(GlobalDefaultFlowDurabilityLevel.SUGGESTED_DURABILITY_HINT, GlobalDefaultFlowDurabilityLevel.getDefaultDurabilityHint()); - GlobalDefaultFlowDurabilityLevel.DescriptorImpl level = r.jenkins.getExtensionList(GlobalDefaultFlowDurabilityLevel.DescriptorImpl.class).get(0); + GlobalDefaultFlowDurabilityLevel.DescriptorImpl level = j.jenkins.getExtensionList(GlobalDefaultFlowDurabilityLevel.DescriptorImpl.class).get(0); level.setDurabilityHint(FlowDurabilityHint.PERFORMANCE_OPTIMIZED); Assert.assertEquals(FlowDurabilityHint.PERFORMANCE_OPTIMIZED, GlobalDefaultFlowDurabilityLevel.getDefaultDurabilityHint()); }); } @Test - public void managePermissionShouldAccessGlobalConfig() { - r.then(r -> { - Permission jenkinsManage; - try { - jenkinsManage = getJenkinsManage(); - } catch (Exception e) { - Assume.assumeTrue("Jenkins baseline is too old for this test (requires Jenkins.MANAGE)", false); - return; - } + public void managePermissionShouldAccessGlobalConfig() throws Throwable { + sessions.then(j -> { final String USER = "user"; final String MANAGER = "manager"; - r.jenkins.setSecurityRealm(r.createDummySecurityRealm()); - r.jenkins.setAuthorizationStrategy(new MockAuthorizationStrategy() + j.jenkins.setSecurityRealm(j.createDummySecurityRealm()); + j.jenkins.setAuthorizationStrategy(new MockAuthorizationStrategy() // Read access .grant(Jenkins.READ).everywhere().to(USER) // Read and Manage .grant(Jenkins.READ).everywhere().to(MANAGER) - .grant(jenkinsManage).everywhere().to(MANAGER) + .grant(Jenkins.MANAGE).everywhere().to(MANAGER) ); try (ACLContext c = ACL.as(User.getById(USER, true))) { Collection descriptors = Functions.getSortedDescriptorsForGlobalConfigUnclassified(); - assertTrue("Global configuration should not be accessible to READ users", descriptors.size() == 0); + assertThat("Global configuration should not be accessible to READ users", descriptors, empty()); } try (ACLContext c = ACL.as(User.getById(MANAGER, true))) { Collection descriptors = Functions.getSortedDescriptorsForGlobalConfigUnclassified(); @@ -94,10 +84,4 @@ public void managePermissionShouldAccessGlobalConfig() { } }); } - - // TODO: remove when Jenkins core baseline is 2.222+ - private Permission getJenkinsManage() throws NoSuchMethodException, IllegalAccessException, InvocationTargetException { - // Jenkins.MANAGE is available starting from Jenkins 2.222 (https://jenkins.io/changelog/#v2.222). See JEP-223 for more info - return (Permission) ReflectionUtils.getPublicProperty(Jenkins.get(), "MANAGE"); - } } diff --git a/src/test/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionListTest.java b/src/test/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionListTest.java index c1a1d238..8cf59f30 100644 --- a/src/test/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionListTest.java +++ b/src/test/java/org/jenkinsci/plugins/workflow/flow/FlowExecutionListTest.java @@ -24,39 +24,58 @@ package org.jenkinsci.plugins.workflow.flow; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasItem; +import static org.junit.Assert.assertNotNull; + +import hudson.AbortException; import hudson.model.ParametersAction; import hudson.model.ParametersDefinitionProperty; +import hudson.model.Result; import hudson.model.StringParameterDefinition; import hudson.model.StringParameterValue; +import hudson.model.TaskListener; import hudson.model.queue.QueueTaskFuture; +import java.io.Serializable; +import java.time.Duration; +import java.time.Instant; +import java.util.Collections; +import java.util.Set; +import java.util.function.Supplier; import java.util.logging.Level; +import org.hamcrest.Matcher; import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; import org.jenkinsci.plugins.workflow.job.WorkflowJob; import org.jenkinsci.plugins.workflow.job.WorkflowRun; +import org.jenkinsci.plugins.workflow.steps.Step; +import org.jenkinsci.plugins.workflow.steps.StepContext; +import org.jenkinsci.plugins.workflow.steps.StepDescriptor; +import org.jenkinsci.plugins.workflow.steps.StepExecution; +import org.jenkinsci.plugins.workflow.test.steps.SemaphoreStep; import org.junit.ClassRule; import org.junit.Test; -import static org.junit.Assert.*; import org.junit.Rule; -import org.junit.runners.model.Statement; import org.jvnet.hudson.test.BuildWatcher; import org.jvnet.hudson.test.Issue; import org.jvnet.hudson.test.LoggerRule; -import org.jvnet.hudson.test.RestartableJenkinsRule; +import org.jvnet.hudson.test.JenkinsSessionRule; +import org.jvnet.hudson.test.TestExtension; +import org.kohsuke.stapler.DataBoundConstructor; public class FlowExecutionListTest { @ClassRule public static BuildWatcher buildWatcher = new BuildWatcher(); - @Rule public RestartableJenkinsRule rr = new RestartableJenkinsRule(); + @Rule public JenkinsSessionRule sessions = new JenkinsSessionRule(); @Rule public LoggerRule logging = new LoggerRule().record(FlowExecutionList.class, Level.FINE); @Issue("JENKINS-40771") - @Test public void simultaneousRegister() { - rr.addStep(new Statement() { - @Override public void evaluate() throws Throwable { - WorkflowJob p = rr.j.createProject(WorkflowJob.class, "p"); + @Test public void simultaneousRegister() throws Throwable { + sessions.then(j -> { + WorkflowJob p = j.createProject(WorkflowJob.class, "p"); { // make sure there is an initial FlowExecutionList.xml p.setDefinition(new CpsFlowDefinition("", true)); - rr.j.buildAndAssertSuccess(p); + j.buildAndAssertSuccess(p); } p.setDefinition(new CpsFlowDefinition("echo params.key; sleep 5", true)); p.addProperty(new ParametersDefinitionProperty(new StringParameterDefinition("key", null))); @@ -68,19 +87,125 @@ public class FlowExecutionListTest { assertNotNull(b2); WorkflowRun b3 = p.getBuildByNumber(3); assertNotNull(b3); - rr.j.waitForMessage("Sleeping for ", b2); - rr.j.waitForMessage("Sleeping for ", b3); - } + j.waitForMessage("Sleeping for ", b2); + j.waitForMessage("Sleeping for ", b3); }); - rr.addStep(new Statement() { - @Override public void evaluate() throws Throwable { - WorkflowJob p = rr.j.jenkins.getItemByFullName("p", WorkflowJob.class); + sessions.then(j -> { + WorkflowJob p = j.jenkins.getItemByFullName("p", WorkflowJob.class); WorkflowRun b2 = p.getBuildByNumber(2); WorkflowRun b3 = p.getBuildByNumber(3); - rr.j.assertBuildStatusSuccess(rr.j.waitForCompletion(b2)); - rr.j.assertBuildStatusSuccess(rr.j.waitForCompletion(b3)); - } + j.assertBuildStatusSuccess(j.waitForCompletion(b2)); + j.assertBuildStatusSuccess(j.waitForCompletion(b3)); + }); + } + + @Test public void forceLoadRunningExecutionsAfterRestart() throws Throwable { + logging.capture(50); + sessions.then(r -> { + WorkflowJob p = r.jenkins.createProject(WorkflowJob.class, "p"); + p.setDefinition(new CpsFlowDefinition("semaphore('wait')", true)); + WorkflowRun b = p.scheduleBuild2(0).waitForStart(); + SemaphoreStep.waitForStart("wait/1", b); + }); + sessions.then(r -> { + /* + Make sure that the build gets loaded automatically by FlowExecutionList$ItemListenerImpl before we load it explictly. + Expected call stack for resuming a Pipelines and its steps: + at org.jenkinsci.plugins.workflow.flow.FlowExecutionList$ResumeStepExecutionListener$1.onSuccess(FlowExecutionList.java:250) + at org.jenkinsci.plugins.workflow.flow.FlowExecutionList$ResumeStepExecutionListener$1.onSuccess(FlowExecutionList.java:247) + at com.google.common.util.concurrent.Futures$6.run(Futures.java:975) + at org.jenkinsci.plugins.workflow.flow.DirectExecutor.execute(DirectExecutor.java:33) + ... Guava Futures API internals ... + at com.google.common.util.concurrent.Futures.addCallback(Futures.java:985) + at org.jenkinsci.plugins.workflow.flow.FlowExecutionList$ResumeStepExecutionListener.onResumed(FlowExecutionList.java:247) + at org.jenkinsci.plugins.workflow.flow.FlowExecutionListener.fireResumed(FlowExecutionListener.java:84) + at org.jenkinsci.plugins.workflow.job.WorkflowRun.onLoad(WorkflowRun.java:528) + at hudson.model.RunMap.retrieve(RunMap.java:225) + ... RunMap internals ... + at hudson.model.RunMap.getById(RunMap.java:205) + at org.jenkinsci.plugins.workflow.job.WorkflowRun$Owner.run(WorkflowRun.java:937) + at org.jenkinsci.plugins.workflow.job.WorkflowRun$Owner.get(WorkflowRun.java:948) + at org.jenkinsci.plugins.workflow.flow.FlowExecutionList$1.computeNext(FlowExecutionList.java:65) + at org.jenkinsci.plugins.workflow.flow.FlowExecutionList$1.computeNext(FlowExecutionList.java:57) + at com.google.common.collect.AbstractIterator.tryToComputeNext(AbstractIterator.java:143) + at com.google.common.collect.AbstractIterator.hasNext(AbstractIterator.java:138) + at org.jenkinsci.plugins.workflow.flow.FlowExecutionList$ItemListenerImpl.onLoaded(FlowExecutionList.java:175) + at jenkins.model.Jenkins.(Jenkins.java:1019) + */ + waitFor(logging::getMessages, hasItem(containsString("Will resume [org.jenkinsci.plugins.workflow.test.steps.SemaphoreStep"))); + WorkflowJob p = r.jenkins.getItemByFullName("p", WorkflowJob.class); + SemaphoreStep.success("wait/1", null); + WorkflowRun b = p.getBuildByNumber(1); + r.waitForCompletion(b); + r.assertBuildStatus(Result.SUCCESS, b); + }); + } + + @Issue("JENKINS-67164") + @Test public void resumeStepExecutions() throws Throwable { + sessions.then(r -> { + WorkflowJob p = r.jenkins.createProject(WorkflowJob.class, "p"); + p.setDefinition(new CpsFlowDefinition("noResume()", true)); + WorkflowRun b = p.scheduleBuild2(0).waitForStart(); + r.waitForMessage("Starting non-resumable step", b); + // TODO: Unclear how this might happen in practice. + FlowExecutionList.get().unregister(b.asFlowExecutionOwner()); + }); + sessions.then(r -> { + WorkflowJob p = r.jenkins.getItemByFullName("p", WorkflowJob.class); + WorkflowRun b = p.getBuildByNumber(1); + r.waitForCompletion(b); + r.assertBuildStatus(Result.FAILURE, b); + r.assertLogContains("Unable to resume NonResumableStep", b); }); } + public static class NonResumableStep extends Step implements Serializable { + public static final long serialVersionUID = 1L; + @DataBoundConstructor + public NonResumableStep() { } + @Override + public StepExecution start(StepContext sc) { + return new ExecutionImpl(sc); + } + + private static class ExecutionImpl extends StepExecution implements Serializable { + public static final long serialVersionUID = 1L; + private ExecutionImpl(StepContext sc) { + super(sc); + } + @Override + public boolean start() throws Exception { + getContext().get(TaskListener.class).getLogger().println("Starting non-resumable step"); + return false; + } + @Override + public void onResume() { + getContext().onFailure(new AbortException("Unable to resume NonResumableStep")); + } + } + + @TestExtension public static class DescriptorImpl extends StepDescriptor { + @Override + public Set> getRequiredContext() { + return Collections.singleton(TaskListener.class); + } + @Override + public String getFunctionName() { + return "noResume"; + } + } + } + + /** + * Wait up to 5 seconds for the given supplier to return a matching value. + */ + private static void waitFor(Supplier valueSupplier, Matcher matcher) throws InterruptedException { + Instant end = Instant.now().plus(Duration.ofSeconds(5)); + while (!matcher.matches(valueSupplier.get()) && Instant.now().isBefore(end)) { + Thread.sleep(100L); + } + assertThat("Matcher should have matched after 5s", valueSupplier.get(), matcher); + } + } diff --git a/src/test/java/org/jenkinsci/plugins/workflow/graph/FlowNodeTest.java b/src/test/java/org/jenkinsci/plugins/workflow/graph/FlowNodeTest.java index 830d5d91..f713d901 100644 --- a/src/test/java/org/jenkinsci/plugins/workflow/graph/FlowNodeTest.java +++ b/src/test/java/org/jenkinsci/plugins/workflow/graph/FlowNodeTest.java @@ -54,9 +54,13 @@ import org.junit.Test; import org.kohsuke.stapler.DataBoundConstructor; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; -import static org.junit.Assert.*; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; import org.junit.Rule; import org.junit.runners.model.Statement; @@ -589,7 +593,7 @@ public static class DoubleWarningStep extends Step { public DoubleWarningStep() {} @Override - public StepExecution start(StepContext context) throws Exception { + public StepExecution start(StepContext context) { return new StepExecution(context) { @Override public boolean start() throws Exception { diff --git a/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowScannerTest.java b/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowScannerTest.java index c0acd64d..2732725e 100644 --- a/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowScannerTest.java +++ b/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowScannerTest.java @@ -50,9 +50,6 @@ import java.util.List; import java.util.NoSuchElementException; -// Slightly dirty but it removes a ton of FlowTestUtils.* class qualifiers -import static org.jenkinsci.plugins.workflow.graphanalysis.FlowTestUtils.*; - /** * Tests for all the core parts of graph analysis except the ForkScanner, internals which is complex enough to merit its own tests * @author Sam Van Oort @@ -90,10 +87,10 @@ public void testAbstractScanner() throws Exception { AbstractFlowScanner linear = new LinearScanner(); // ## Bunch of tests for convertToFastCheckable ## - Assert.assertEquals(Collections.EMPTY_SET, linear.convertToFastCheckable(null)); - Assert.assertEquals(Collections.EMPTY_SET, linear.convertToFastCheckable(new ArrayList<>())); + Assert.assertEquals(Collections.emptySet(), linear.convertToFastCheckable(null)); + Assert.assertEquals(Collections.emptySet(), linear.convertToFastCheckable(new ArrayList<>())); - Collection coll = linear.convertToFastCheckable(Arrays.asList(intermediateNode)); + Collection coll = linear.convertToFastCheckable(Collections.singletonList(intermediateNode)); Assert.assertTrue("Singleton set used for one element", coll instanceof AbstractSet); Assert.assertEquals(1, coll.size()); @@ -128,13 +125,13 @@ public void testAbstractScanner() throws Exception { Collection nullColl = null; Assert.assertTrue(linear.setup(heads, null)); - Assert.assertTrue(linear.setup(heads, Collections.EMPTY_SET)); + Assert.assertTrue(linear.setup(heads, Collections.emptySet())); Assert.assertFalse(linear.setup(nullColl, heads)); Assert.assertFalse(linear.setup(nullColl, null)); Assert.assertFalse(linear.setup(heads, heads)); Assert.assertTrue(linear.setup(heads)); Assert.assertFalse(linear.setup(nullColl)); - Assert.assertFalse(linear.setup(Collections.EMPTY_SET)); + Assert.assertFalse(linear.setup(Collections.emptySet())); Assert.assertTrue(linear.setup(lastNode)); Assert.assertTrue(linear.setup(lastNode, nullColl)); Assert.assertFalse(linear.setup(nullNode)); @@ -148,46 +145,46 @@ public void testAbstractScanner() throws Exception { FlowNode firstEchoNode = exec.getNode("5"); FlowExecution nullExecution = null; - Assert.assertEquals(firstEchoNode, linear.findFirstMatch(heads, Collections.EMPTY_LIST, MATCH_ECHO_STEP)); - Assert.assertEquals(firstEchoNode, linear.findFirstMatch(heads, MATCH_ECHO_STEP)); - Assert.assertEquals(firstEchoNode, linear.findFirstMatch(lastNode, MATCH_ECHO_STEP)); - Assert.assertEquals(firstEchoNode, linear.findFirstMatch(exec, MATCH_ECHO_STEP)); - Assert.assertNull(linear.findFirstMatch(nullColl, MATCH_ECHO_STEP)); - Assert.assertNull(linear.findFirstMatch(Collections.EMPTY_SET, MATCH_ECHO_STEP)); - Assert.assertNull(linear.findFirstMatch(nullNode, MATCH_ECHO_STEP)); - Assert.assertNull(linear.findFirstMatch(nullExecution, MATCH_ECHO_STEP)); + Assert.assertEquals(firstEchoNode, linear.findFirstMatch(heads, Collections.emptySet(), FlowTestUtils.MATCH_ECHO_STEP)); + Assert.assertEquals(firstEchoNode, linear.findFirstMatch(heads, FlowTestUtils.MATCH_ECHO_STEP)); + Assert.assertEquals(firstEchoNode, linear.findFirstMatch(lastNode, FlowTestUtils.MATCH_ECHO_STEP)); + Assert.assertEquals(firstEchoNode, linear.findFirstMatch(exec, FlowTestUtils.MATCH_ECHO_STEP)); + Assert.assertNull(linear.findFirstMatch(nullColl, FlowTestUtils.MATCH_ECHO_STEP)); + Assert.assertNull(linear.findFirstMatch(Collections.emptySet(), FlowTestUtils.MATCH_ECHO_STEP)); + Assert.assertNull(linear.findFirstMatch(nullNode, FlowTestUtils.MATCH_ECHO_STEP)); + Assert.assertNull(linear.findFirstMatch(nullExecution, FlowTestUtils.MATCH_ECHO_STEP)); // Filtered nodes - assertNodeOrder("Filtered echo nodes", linear.filteredNodes(heads, MATCH_ECHO_STEP), 5, 4); - assertNodeOrder("Filtered echo nodes", linear.filteredNodes(heads, Collections.singleton(intermediateNode), MATCH_ECHO_STEP), 5); - Assert.assertEquals(0, linear.filteredNodes(heads, null, (Predicate) Predicates.alwaysFalse()).size()); - Assert.assertEquals(0, linear.filteredNodes(nullNode, MATCH_ECHO_STEP).size()); - Assert.assertEquals(0, linear.filteredNodes(Collections.EMPTY_SET, MATCH_ECHO_STEP).size()); + FlowTestUtils.assertNodeOrder("Filtered echo nodes", linear.filteredNodes(heads, FlowTestUtils.MATCH_ECHO_STEP), 5, 4); + FlowTestUtils.assertNodeOrder("Filtered echo nodes", linear.filteredNodes(heads, Collections.singleton(intermediateNode), FlowTestUtils.MATCH_ECHO_STEP), 5); + Assert.assertEquals(0, linear.filteredNodes(heads, null, Predicates.alwaysFalse()).size()); + Assert.assertEquals(0, linear.filteredNodes(nullNode, FlowTestUtils.MATCH_ECHO_STEP).size()); + Assert.assertEquals(0, linear.filteredNodes(Collections.emptySet(), FlowTestUtils.MATCH_ECHO_STEP).size()); // Same filter using the filterator linear.setup(heads); ArrayList collected = new ArrayList<>(); - Filterator filt = linear.filter(MATCH_ECHO_STEP); + Filterator filt = linear.filter(FlowTestUtils.MATCH_ECHO_STEP); while (filt.hasNext()) { collected.add(filt.next()); } - assertNodeOrder("Filterator filtered echo nodes", collected, 5, 4); + FlowTestUtils.assertNodeOrder("Filterator filtered echo nodes", collected, 5, 4); // Visitor pattern tests FlowTestUtils.CollectingVisitor visitor = new FlowTestUtils.CollectingVisitor(); - linear.visitAll(Collections.EMPTY_SET, visitor); + linear.visitAll(Collections.emptySet(), visitor); Assert.assertEquals(0, visitor.getVisited().size()); visitor.reset(); linear.visitAll(heads, visitor); - assertNodeOrder("Visiting all nodes", visitor.getVisited(), 6, 5, 4, 3, 2); + FlowTestUtils.assertNodeOrder("Visiting all nodes", visitor.getVisited(), 6, 5, 4, 3, 2); // And visiting with blacklist visitor.reset(); linear.visitAll(heads, Collections.singleton(intermediateNode), visitor); - assertNodeOrder("Visiting all nodes with blacklist", visitor.getVisited(), 6, 5); + FlowTestUtils.assertNodeOrder("Visiting all nodes with blacklist", visitor.getVisited(), 6, 5); // Tests for edge cases of the various basic APIs linear.myNext = null; @@ -238,12 +235,12 @@ public void testSimpleScan() throws Exception { for (AbstractFlowScanner scan : scans) { System.out.println("Iteration test with scanner: " + scan.getClass()); scan.setup(heads, null); - assertNodeOrder("Testing full scan for scanner " + scan.getClass(), scan, 6, 5, 4, 3, 2); + FlowTestUtils.assertNodeOrder("Testing full scan for scanner " + scan.getClass(), scan, 6, 5, 4, 3, 2); Assert.assertFalse(scan.hasNext()); // Blacklist tests scan.setup(heads, Collections.singleton(exec.getNode("4"))); - assertNodeOrder("Testing full scan for scanner " + scan.getClass(), scan, 6, 5); + FlowTestUtils.assertNodeOrder("Testing full scan for scanner " + scan.getClass(), scan, 6, 5); FlowNode f = scan.findFirstMatch(heads, Collections.singleton(exec.getNode("6")), Predicates.alwaysTrue()); Assert.assertNull(f); } @@ -282,24 +279,24 @@ public void testBasicScanWithBlock() throws Exception { // Linear analysis LinearScanner linearScanner = new LinearScanner(); linearScanner.setup(heads); - assertNodeOrder("Linear scan with block", linearScanner, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2); + FlowTestUtils.assertNodeOrder("Linear scan with block", linearScanner, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2); linearScanner.setup(exec.getNode("7")); - assertNodeOrder("Linear scan with block from middle ", linearScanner, 7, 6, 5, 4, 3, 2); + FlowTestUtils.assertNodeOrder("Linear scan with block from middle ", linearScanner, 7, 6, 5, 4, 3, 2); LinearBlockHoppingScanner linearBlockHoppingScanner = new LinearBlockHoppingScanner(); // // Test block jump core FlowNode headCandidate = exec.getNode("8"); - Assert.assertEquals(exec.getNode("4"), linearBlockHoppingScanner.jumpBlockScan(headCandidate, Collections.EMPTY_SET)); + Assert.assertEquals(exec.getNode("4"), linearBlockHoppingScanner.jumpBlockScan(headCandidate, Collections.emptySet())); Assert.assertTrue("Setup should return true if we can iterate", linearBlockHoppingScanner.setup(headCandidate, null)); // Test the actual iteration linearBlockHoppingScanner.setup(heads); Assert.assertFalse(linearBlockHoppingScanner.hasNext()); linearBlockHoppingScanner.setup(exec.getNode("8")); - assertNodeOrder("Hopping over one block", linearBlockHoppingScanner, 4, 3, 2); + FlowTestUtils.assertNodeOrder("Hopping over one block", linearBlockHoppingScanner, 4, 3, 2); linearBlockHoppingScanner.setup(exec.getNode("7")); - assertNodeOrder("Hopping over one block", linearBlockHoppingScanner, 7, 6, 5, 4, 3, 2); + FlowTestUtils.assertNodeOrder("Hopping over one block", linearBlockHoppingScanner, 7, 6, 5, 4, 3, 2); // Test the black list in combination with hopping linearBlockHoppingScanner.setup(exec.getNode("8"), Collections.singleton(exec.getNode("5"))); @@ -349,9 +346,9 @@ public void testParallelScan() throws Exception { AbstractFlowScanner scanner = new LinearScanner(); scanner.setup(heads); - assertNodeOrder("Linear", scanner, 15, 14, 13, 9, 8, 6, 4, 3, 2); + FlowTestUtils.assertNodeOrder("Linear", scanner, 15, 14, 13, 9, 8, 6, 4, 3, 2); scanner.setup(heads, Collections.singleton(exec.getNode("9"))); - assertNodeOrder("Linear", scanner, 15, 14, 13, 12, 11, 10, 7, 4, 3, 2); + FlowTestUtils.assertNodeOrder("Linear", scanner, 15, 14, 13, 12, 11, 10, 7, 4, 3, 2); // Depth first scanner and with blacklist @@ -359,33 +356,33 @@ public void testParallelScan() throws Exception { scanner.setup(heads); // Compatibility test for ordering - assertNodeOrder("FlowGraphWalker", new FlowGraphWalker(exec), 15, 14, 13, + FlowTestUtils.assertNodeOrder("FlowGraphWalker", new FlowGraphWalker(exec), 15, 14, 13, 9, 8, 6, // Branch 1 4, 3, 2, // Before parallel 12, 11, 10, 7); // Branch 2 - assertNodeOrder("Depth first", new FlowGraphWalker(exec), 15, 14, 13, + FlowTestUtils.assertNodeOrder("Depth first", new FlowGraphWalker(exec), 15, 14, 13, 9, 8, 6, // Branch 1 4, 3, 2, // Before parallel 12, 11, 10, 7); // Branch 2 scanner.setup(heads, Collections.singleton(exec.getNode("9"))); - assertNodeOrder("Linear", scanner, 15, 14, 13, 12, 11, 10, 7, 4, 3, 2); + FlowTestUtils.assertNodeOrder("Linear", scanner, 15, 14, 13, 12, 11, 10, 7, 4, 3, 2); scanner.setup(Arrays.asList(exec.getNode("9"), exec.getNode("12"))); - assertNodeOrder("Depth-first scanner from inside parallels", scanner, 9, 8, 6, 4, 3, 2, 12, 11, 10, 7); + FlowTestUtils.assertNodeOrder("Depth-first scanner from inside parallels", scanner, 9, 8, 6, 4, 3, 2, 12, 11, 10, 7); // We're going to test the ForkScanner in more depth since this is its natural use scanner = new ForkScanner(); scanner.setup(heads); - assertNodeOrder("ForkedScanner", scanner, 15, 14, 13, + FlowTestUtils.assertNodeOrder("ForkedScanner", scanner, 15, 14, 13, 12, 11, 10, 7,// One parallel 9, 8, 6, // other parallel 4, 3, 2); // end bit scanner.setup(heads, Collections.singleton(exec.getNode("9"))); - assertNodeOrder("ForkedScanner", scanner, 15, 14, 13, 12, 11, 10, 7, 4, 3, 2); + FlowTestUtils.assertNodeOrder("ForkedScanner", scanner, 15, 14, 13, 12, 11, 10, 7, 4, 3, 2); // Test forkscanner midflow scanner.setup(exec.getNode("14")); - assertNodeOrder("ForkedScanner", scanner, 14, 13, + FlowTestUtils.assertNodeOrder("ForkedScanner", scanner, 14, 13, 12, 11, 10, 7, // Last parallel 9, 8, 6, // First parallel 4, 3, 2); // end bit @@ -393,19 +390,19 @@ public void testParallelScan() throws Exception { // Test forkscanner inside a parallel List startingPoints = Arrays.asList(exec.getNode("9"), exec.getNode("12")); scanner.setup(startingPoints); - assertNodeOrder("ForkedScanner", scanner, 9, 8, 6, 12, 11, 10, 7, 4, 3, 2); + FlowTestUtils.assertNodeOrder("ForkedScanner", scanner, 9, 8, 6, 12, 11, 10, 7, 4, 3, 2); startingPoints = Arrays.asList(exec.getNode("9"), exec.getNode("11")); scanner.setup(startingPoints); - assertNodeOrder("ForkedScanner", scanner, 9, 8, 6, 11, 10, 7, 4, 3, 2); + FlowTestUtils.assertNodeOrder("ForkedScanner", scanner, 9, 8, 6, 11, 10, 7, 4, 3, 2); // Filtering at different points within branches List blackList = Arrays.asList(exec.getNode("6"), exec.getNode("7")); - Assert.assertEquals(4, scanner.filteredNodes(heads, blackList, MATCH_ECHO_STEP).size()); - Assert.assertEquals(4, scanner.filteredNodes(heads, Collections.singletonList(exec.getNode("4")), MATCH_ECHO_STEP).size()); + Assert.assertEquals(4, scanner.filteredNodes(heads, blackList, FlowTestUtils.MATCH_ECHO_STEP).size()); + Assert.assertEquals(4, scanner.filteredNodes(heads, Collections.singletonList(exec.getNode("4")), FlowTestUtils.MATCH_ECHO_STEP).size()); blackList = Arrays.asList(exec.getNode("6"), exec.getNode("10")); - Assert.assertEquals(3, scanner.filteredNodes(heads, blackList, MATCH_ECHO_STEP).size()); + Assert.assertEquals(3, scanner.filteredNodes(heads, blackList, FlowTestUtils.MATCH_ECHO_STEP).size()); } @Test @@ -465,7 +462,7 @@ public void testNestedParallelScan() throws Exception { // Basic test of DepthFirstScanner AbstractFlowScanner scanner = new DepthFirstScanner(); - Collection matches = scanner.filteredNodes(heads, null, MATCH_ECHO_STEP); + Collection matches = scanner.filteredNodes(heads, null, FlowTestUtils.MATCH_ECHO_STEP); Assert.assertEquals(7, matches.size()); scanner.setup(heads); @@ -474,11 +471,11 @@ public void testNestedParallelScan() throws Exception { // We're going to test the ForkScanner in more depth since this is its natural use scanner = new ForkScanner(); - matches = scanner.filteredNodes(heads, null, MATCH_ECHO_STEP); + matches = scanner.filteredNodes(heads, null, FlowTestUtils.MATCH_ECHO_STEP); Assert.assertEquals(7, matches.size()); heads = Arrays.asList(exec.getNode("20"), exec.getNode("17"), exec.getNode("9")); - matches = scanner.filteredNodes(heads, null, MATCH_ECHO_STEP); + matches = scanner.filteredNodes(heads, null, FlowTestUtils.MATCH_ECHO_STEP); Assert.assertEquals(6, matches.size()); // Commented out since temporarily failing } -} \ No newline at end of file +} diff --git a/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowTestUtils.java b/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowTestUtils.java index fc30d154..56ea42f3 100644 --- a/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowTestUtils.java +++ b/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/FlowTestUtils.java @@ -30,7 +30,7 @@ import org.jenkinsci.plugins.workflow.steps.StepDescriptor; import org.junit.Assert; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.NonNull; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -40,17 +40,14 @@ * @author Sam Van Oort */ public class FlowTestUtils { - public static Predicate predicateMatchStepDescriptor(@Nonnull final String descriptorId) { - return new Predicate() { - @Override - public boolean apply(FlowNode input) { - if (input instanceof StepAtomNode) { - StepAtomNode san = (StepAtomNode)input; - StepDescriptor sd = san.getDescriptor(); - return sd != null && descriptorId.equals(sd.getId()); - } - return false; + public static Predicate predicateMatchStepDescriptor(@NonNull final String descriptorId) { + return input -> { + if (input instanceof StepAtomNode) { + StepAtomNode san = (StepAtomNode)input; + StepDescriptor sd = san.getDescriptor(); + return sd != null && descriptorId.equals(sd.getId()); } + return false; }; } @@ -58,7 +55,7 @@ public static final class CollectingVisitor implements FlowNodeVisitor { ArrayList visited = new ArrayList<>(); @Override - public boolean visit(@Nonnull FlowNode f) { + public boolean visit(@NonNull FlowNode f) { visited.add(f); return true; } diff --git a/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/ForkScannerTest.java b/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/ForkScannerTest.java index 661319e7..86f1da4b 100644 --- a/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/ForkScannerTest.java +++ b/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/ForkScannerTest.java @@ -49,6 +49,7 @@ import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; @@ -58,9 +59,6 @@ import java.util.function.Predicate; import java.util.stream.Collectors; -// Slightly dirty but it removes a ton of FlowTestUtils.* class qualifiers -import static org.jenkinsci.plugins.workflow.graphanalysis.FlowTestUtils.*; - /** * Tests for internals of ForkScanner */ @@ -73,7 +71,7 @@ public class ForkScannerTest { public static Predicate predicateForCallEntryType(final TestVisitor.CallType type) { return new Predicate() { - TestVisitor.CallType myType = type; + final TestVisitor.CallType myType = type; @Override public boolean test(TestVisitor.CallEntry input) { @@ -183,7 +181,7 @@ private void assertIncompleteParallelsHaveEventsForEnd(List heads, Tes // Verify we have at least one appropriate parallel end event, for the mandatory parallel List parallelEnds = test.filteredCallsByType(TestVisitor.CallType.PARALLEL_END).stream() - .map(CALL_TO_NODE_ID::apply) + .map(CALL_TO_NODE_ID) .collect(Collectors.toList()); boolean hasMatchingEnd = false; for (FlowNode f : heads) { @@ -196,7 +194,7 @@ private void assertIncompleteParallelsHaveEventsForEnd(List heads, Tes List branchEnds = test.filteredCallsByType(TestVisitor.CallType.PARALLEL_BRANCH_END).stream() - .map(CALL_TO_NODE_ID::apply) + .map(CALL_TO_NODE_ID) .collect(Collectors.toList()); // Verify each branch has a branch end event for (FlowNode f : heads) { @@ -207,7 +205,7 @@ private void assertIncompleteParallelsHaveEventsForEnd(List heads, Tes } /** Runs a fairly extensive suite of sanity tests of iteration and visitor use */ - private void sanityTestIterationAndVisiter(List heads) throws Exception { + private void sanityTestIterationAndVisiter(List heads) { ForkScanner scan = new ForkScanner(); TestVisitor test = new TestVisitor(); scan.setup(heads); @@ -281,7 +279,7 @@ public void testForkedScanner() throws Exception { Assert.assertNotNull(scanner.parallelBlockStartStack); Assert.assertEquals(0, scanner.parallelBlockStartStack.size()); Assert.assertEquals(exec.getNode("4"), scanner.currentParallelStartNode); - sanityTestIterationAndVisiter(Arrays.asList(exec.getNode("13"))); + sanityTestIterationAndVisiter(Collections.singletonList(exec.getNode("13"))); ForkScanner.ParallelBlockStart start = scanner.currentParallelStart; Assert.assertEquals(1, start.unvisited.size()); @@ -362,7 +360,7 @@ public void testFlowSegmentSplit() throws Exception { for (FlowNode f : mainBranch.visited) { nodeMap.put(f, mainBranch); } - assertNodeOrder("Visited nodes", mainBranch.visited, 9, 8, 6, 4, 3); + FlowTestUtils.assertNodeOrder("Visited nodes", mainBranch.visited, 9, 8, 6, 4, 3); // Branch 2 sideBranch.add(BRANCH2_END); @@ -372,16 +370,16 @@ public void testFlowSegmentSplit() throws Exception { for (FlowNode f : sideBranch.visited) { nodeMap.put(f, sideBranch); } - assertNodeOrder("Visited nodes", sideBranch.visited, 12, 11, 10, 7); + FlowTestUtils.assertNodeOrder("Visited nodes", sideBranch.visited, 12, 11, 10, 7); ForkScanner.Fork forked = mainBranch.split(nodeMap, (BlockStartNode)exec.getNode("4"), sideBranch); ForkScanner.FlowSegment splitSegment = (ForkScanner.FlowSegment)nodeMap.get(BRANCH1_END); // New branch Assert.assertNull(splitSegment.after); - assertNodeOrder("Branch 1 split after fork", splitSegment.visited, 9, 8, 6); + FlowTestUtils.assertNodeOrder("Branch 1 split after fork", splitSegment.visited, 9, 8, 6); // Just the single node before the fork Assert.assertEquals(forked, mainBranch.after); - assertNodeOrder("Head of flow, pre-fork", mainBranch.visited, 3); + FlowTestUtils.assertNodeOrder("Head of flow, pre-fork", mainBranch.visited, 3); // Fork point Assert.assertEquals(forked, nodeMap.get(START_PARALLEL)); @@ -390,7 +388,7 @@ public void testFlowSegmentSplit() throws Exception { // Branch 2 Assert.assertEquals(sideBranch, nodeMap.get(BRANCH2_END)); - assertNodeOrder("Branch 2", sideBranch.visited, 12, 11, 10, 7); + FlowTestUtils.assertNodeOrder("Branch 2", sideBranch.visited, 12, 11, 10, 7); // Test me where splitting right at a fork point, where we should have a fork with and main branch shoudl become following // Along with side branch (branch2) @@ -410,9 +408,9 @@ public void testFlowSegmentSplit() throws Exception { follows[0] = mainBranch; follows[1] = sideBranch; Assert.assertArrayEquals(follows, forked.following.toArray()); - assertNodeOrder("Branch1", mainBranch.visited, 6); + FlowTestUtils.assertNodeOrder("Branch1", mainBranch.visited, 6); Assert.assertNull(mainBranch.after); - assertNodeOrder("Branch2", sideBranch.visited, 7); + FlowTestUtils.assertNodeOrder("Branch2", sideBranch.visited, 7); Assert.assertNull(sideBranch.after); Assert.assertEquals(forked, nodeMap.get(START_PARALLEL)); Assert.assertEquals(mainBranch, nodeMap.get(exec.getNode("6"))); @@ -433,9 +431,9 @@ public void testEmptyParallel() throws Exception { Assert.assertEquals(9, outputs.size()); } - private Function NODE_TO_ID = input -> input != null ? input.getId() : null; + private final Function NODE_TO_ID = input -> input != null ? input.getId() : null; - private Function CALL_TO_NODE_ID = + private final Function CALL_TO_NODE_ID = input -> input != null && input.getNodeId() != null ? input.getNodeId().toString() @@ -521,7 +519,7 @@ public void testSingleNestedParallelBranches() throws Exception { FlowNode echoNode = new DepthFirstScanner().findFirstMatch(b.getExecution(), new NodeStepTypePredicate(EchoStep.DescriptorImpl.byFunctionName("echo"))); Assert.assertNotNull(echoNode); sanityTestIterationAndVisiter(b.getExecution().getCurrentHeads()); - sanityTestIterationAndVisiter(Arrays.asList(echoNode)); + sanityTestIterationAndVisiter(Collections.singletonList(echoNode)); TestVisitor visitor = new TestVisitor(); ForkScanner scanner = new ForkScanner(); @@ -550,7 +548,7 @@ public void testLeastCommonAncestor() throws Exception { Assert.assertArrayEquals(heads.toArray(), start.unvisited.toArray()); // Ensure no issues with single start triggering least common ancestor - heads = new LinkedHashSet<>(Arrays.asList(exec.getNode("4"))); + heads = new LinkedHashSet<>(Collections.singletonList(exec.getNode("4"))); scan.setup(heads); Assert.assertNull(scan.currentParallelStart); Assert.assertTrue(scan.parallelBlockStartStack == null || scan.parallelBlockStartStack.isEmpty()); @@ -710,7 +708,7 @@ public void testSimpleVisitor() throws Exception { FlowExecution exec = this.SIMPLE_PARALLEL_RUN.getExecution(); ForkScanner f = new ForkScanner(); f.setup(exec.getCurrentHeads()); - Assert.assertArrayEquals(new HashSet(exec.getCurrentHeads()).toArray(), new HashSet(f.currentParallelHeads()).toArray()); + Assert.assertArrayEquals(new HashSet<>(exec.getCurrentHeads()).toArray(), new HashSet<>(f.currentParallelHeads()).toArray()); List expectedHeads = f.currentParallelHeads(); sanityTestIterationAndVisiter(exec.getCurrentHeads()); diff --git a/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/MemoryFlowChunkTest.java b/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/MemoryFlowChunkTest.java index 90a7a492..794e12d3 100644 --- a/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/MemoryFlowChunkTest.java +++ b/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/MemoryFlowChunkTest.java @@ -27,8 +27,8 @@ import org.jenkinsci.plugins.workflow.graph.FlowNode; import org.junit.Test; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; public class MemoryFlowChunkTest { diff --git a/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/NoOpChunkFinder.java b/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/NoOpChunkFinder.java index 1b866d90..2d902d41 100644 --- a/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/NoOpChunkFinder.java +++ b/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/NoOpChunkFinder.java @@ -2,8 +2,8 @@ import org.jenkinsci.plugins.workflow.graph.FlowNode; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; /** * For test use: a ChunkFinder that never returns chunks, to use in testing parallel handling only. @@ -16,12 +16,12 @@ public boolean isStartInsideChunk() { } @Override - public boolean isChunkStart(@Nonnull FlowNode current, @CheckForNull FlowNode previous) { + public boolean isChunkStart(@NonNull FlowNode current, @CheckForNull FlowNode previous) { return false; } @Override - public boolean isChunkEnd(@Nonnull FlowNode current, @CheckForNull FlowNode previous) { + public boolean isChunkEnd(@NonNull FlowNode current, @CheckForNull FlowNode previous) { return false; } } diff --git a/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/TestVisitor.java b/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/TestVisitor.java index dddc3c5e..f71f2325 100644 --- a/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/TestVisitor.java +++ b/src/test/java/org/jenkinsci/plugins/workflow/graphanalysis/TestVisitor.java @@ -4,8 +4,8 @@ import org.junit.Assert; import org.jvnet.hudson.test.Issue; -import javax.annotation.CheckForNull; -import javax.annotation.Nonnull; +import edu.umd.cs.findbugs.annotations.CheckForNull; +import edu.umd.cs.findbugs.annotations.NonNull; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; @@ -59,9 +59,7 @@ public CallEntry(CallType type, FlowNode... nodes) { public CallEntry(CallType type, int... vals) { this.type = type; - for (int i=0; i calls = new ArrayList<>(); @Override - public void chunkStart(@Nonnull FlowNode startNode, @CheckForNull FlowNode beforeBlock, @Nonnull ForkScanner scanner) { + public void chunkStart(@NonNull FlowNode startNode, @CheckForNull FlowNode beforeBlock, @NonNull ForkScanner scanner) { calls.add(new CallEntry(CallType.CHUNK_START, startNode, beforeBlock)); } @Override - public void chunkEnd(@Nonnull FlowNode endNode, @CheckForNull FlowNode afterChunk, @Nonnull ForkScanner scanner) { + public void chunkEnd(@NonNull FlowNode endNode, @CheckForNull FlowNode afterChunk, @NonNull ForkScanner scanner) { calls.add(new CallEntry(CallType.CHUNK_END, endNode, afterChunk)); } @Override - public void parallelStart(@Nonnull FlowNode parallelStartNode, @Nonnull FlowNode branchNode, @Nonnull ForkScanner scanner) { + public void parallelStart(@NonNull FlowNode parallelStartNode, @NonNull FlowNode branchNode, @NonNull ForkScanner scanner) { calls.add(new CallEntry(CallType.PARALLEL_START, parallelStartNode, branchNode)); } @Override - public void parallelEnd(@Nonnull FlowNode parallelStartNode, @Nonnull FlowNode parallelEndNode, @Nonnull ForkScanner scanner) { + public void parallelEnd(@NonNull FlowNode parallelStartNode, @NonNull FlowNode parallelEndNode, @NonNull ForkScanner scanner) { calls.add(new CallEntry(CallType.PARALLEL_END, parallelStartNode, parallelEndNode)); } @Override - public void parallelBranchStart(@Nonnull FlowNode parallelStartNode, @Nonnull FlowNode branchStartNode, @Nonnull ForkScanner scanner) { + public void parallelBranchStart(@NonNull FlowNode parallelStartNode, @NonNull FlowNode branchStartNode, @NonNull ForkScanner scanner) { calls.add(new CallEntry(CallType.PARALLEL_BRANCH_START, parallelStartNode, branchStartNode)); } @Override - public void parallelBranchEnd(@Nonnull FlowNode parallelStartNode, @Nonnull FlowNode branchEndNode, @Nonnull ForkScanner scanner) { + public void parallelBranchEnd(@NonNull FlowNode parallelStartNode, @NonNull FlowNode branchEndNode, @NonNull ForkScanner scanner) { calls.add(new CallEntry(CallType.PARALLEL_BRANCH_END, parallelStartNode, branchEndNode)); } @Override - public void atomNode(@CheckForNull FlowNode before, @Nonnull FlowNode atomNode, @CheckForNull FlowNode after, @Nonnull ForkScanner scan) { + public void atomNode(@CheckForNull FlowNode before, @NonNull FlowNode atomNode, @CheckForNull FlowNode after, @NonNull ForkScanner scan) { calls.add(new CallEntry(CallType.ATOM_NODE, before, atomNode, after)); } @@ -194,7 +192,7 @@ public List filteredCallsByType(TestVisitor.CallType type /** Tests that the rules laid out in {@link SimpleChunkVisitor} javadocs are followed. * Specifically: no atomNode dupes for the same node, no atomNode with a start/end for the same node*/ - public void assertNoDupes() throws Exception { + public void assertNoDupes() { // Full equality check List entries = new ArrayList<>(); HashSet visitedAtomNodes = new HashSet<>(); @@ -236,7 +234,7 @@ public void assertNoDupes() throws Exception { /** Parallel callback events CANNOT have nulls for the parallel start node */ @Issue("JENKINS-39841") - public void assertNoIllegalNullsInEvents() throws Exception { + public void assertNoIllegalNullsInEvents() { for (CallEntry ce : calls) { Integer id = ce.getNodeId(); Assert.assertNotNull("Callback with illegally null node: "+ce, id); @@ -263,7 +261,7 @@ public void assertAllNodesGotChunkEvents(Iterable nodes) { } } - public void assertMatchingParallelBranchStartEnd() throws Exception { + public void assertMatchingParallelBranchStartEnd() { // Map the parallel start node to the start/end nodes for all branches HashMap> branchStartIds = new HashMap<>(); HashMap> branchEndIds = new HashMap<>(); @@ -308,7 +306,7 @@ public void assertMatchingParallelBranchStartEnd() throws Exception { } /** Verify that we have balanced start/end for parallels */ - public void assertMatchingParallelStartEnd() throws Exception { + public void assertMatchingParallelStartEnd() { // It's like balancing parentheses, starts and ends must be equal ArrayDeque openParallelStarts = new ArrayDeque<>(); @@ -334,7 +332,7 @@ public void assertMatchingParallelStartEnd() throws Exception { for (Integer parallelStartId : openParallelStarts) { sb.append(parallelStartId).append(','); } - Assert.fail("Parallel ends with no starts, for parallel(s) with start nodes IDs: "+sb.toString()); + Assert.fail("Parallel ends with no starts, for parallel(s) with start nodes IDs: " + sb); } } } diff --git a/src/test/java/org/jenkinsci/plugins/workflow/log/FileLogStorageTest.java b/src/test/java/org/jenkinsci/plugins/workflow/log/FileLogStorageTest.java index ae92a4fe..c54bb0f5 100644 --- a/src/test/java/org/jenkinsci/plugins/workflow/log/FileLogStorageTest.java +++ b/src/test/java/org/jenkinsci/plugins/workflow/log/FileLogStorageTest.java @@ -24,9 +24,10 @@ package org.jenkinsci.plugins.workflow.log; +import static org.junit.Assert.assertTrue; + import hudson.model.TaskListener; import java.io.File; -import static org.junit.Assert.*; import org.junit.Before; import org.junit.Rule; import org.junit.Test; diff --git a/src/test/java/org/jenkinsci/plugins/workflow/log/LogStorageTestBase.java b/src/test/java/org/jenkinsci/plugins/workflow/log/LogStorageTestBase.java index 6db161e0..69205a5c 100644 --- a/src/test/java/org/jenkinsci/plugins/workflow/log/LogStorageTestBase.java +++ b/src/test/java/org/jenkinsci/plugins/workflow/log/LogStorageTestBase.java @@ -24,6 +24,11 @@ package org.jenkinsci.plugins.workflow.log; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.empty; +import static org.junit.Assert.assertEquals; + +import edu.umd.cs.findbugs.annotations.NonNull; import hudson.console.AnnotatedLargeText; import hudson.console.HyperlinkNote; import hudson.model.Action; @@ -51,7 +56,6 @@ import org.apache.commons.io.output.NullOutputStream; import org.apache.commons.io.output.NullWriter; import org.apache.commons.io.output.WriterOutputStream; -import static org.hamcrest.Matchers.*; import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; import org.jenkinsci.plugins.workflow.flow.FlowExecution; import org.jenkinsci.plugins.workflow.flow.FlowExecutionOwner; @@ -59,7 +63,6 @@ import org.jenkinsci.plugins.workflow.graph.FlowNode; import org.jenkinsci.plugins.workflow.job.WorkflowJob; import org.jenkinsci.plugins.workflow.job.WorkflowRun; -import static org.junit.Assert.*; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; @@ -161,11 +164,11 @@ protected static void close(TaskListener listener) throws Exception { logging.capture(100).record(Channel.class, Level.WARNING); LogStorage ls = createStorage(); TaskListener overall = ls.overallListener(); - overall.getLogger().println("overall from master"); + overall.getLogger().println("overall from controller"); TaskListener step = ls.nodeListener(new MockNode("1")); - step.getLogger().println("step from master"); - long overallPos = assertOverallLog(0, "overall from master\nstep from master\n", true); - long stepPos = assertStepLog("1", 0, "step from master\n", true); + step.getLogger().println("step from controller"); + long overallPos = assertOverallLog(0, "overall from controller\nstep from controller\n", true); + long stepPos = assertStepLog("1", 0, "step from controller\n", true); DumbSlave s = r.createOnlineSlave(); r.showAgentLogs(s, agentLoggers()); VirtualChannel channel = s.getChannel(); @@ -307,11 +310,11 @@ private long assertLog(Callable> text, long start, String return pos; } - protected final void assertLength(long length) throws Exception { + protected final void assertLength(long length) { assertLength(text(), length); } - protected final void assertLength(String id, long length) throws Exception { + protected final void assertLength(String id, long length) { assertLength(text(id), length); } @@ -319,11 +322,11 @@ private void assertLength(AnnotatedLargeText text, long length) { assertEquals(length, text.length()); } - private AnnotatedLargeText text() throws Exception { + private AnnotatedLargeText text() { return createStorage().overallLog(null, true); } - private AnnotatedLargeText text(String id) throws Exception { + private AnnotatedLargeText text(String id) { return createStorage().stepLog(new MockNode(id), true); } @@ -368,6 +371,7 @@ public FlowNode getNode(String id) { throw new UnsupportedOperationException(); } + @NonNull @Override public Authentication getAuthentication() { throw new UnsupportedOperationException(); diff --git a/src/test/java/org/jenkinsci/plugins/workflow/log/SpanCoalescerTest.java b/src/test/java/org/jenkinsci/plugins/workflow/log/SpanCoalescerTest.java index 48e27676..b000c2a0 100644 --- a/src/test/java/org/jenkinsci/plugins/workflow/log/SpanCoalescerTest.java +++ b/src/test/java/org/jenkinsci/plugins/workflow/log/SpanCoalescerTest.java @@ -24,10 +24,11 @@ package org.jenkinsci.plugins.workflow.log; +import static org.junit.Assert.assertEquals; + import hudson.console.AnnotatedLargeText; import java.util.regex.Matcher; import java.util.regex.Pattern; -import static org.junit.Assert.*; import org.junit.Test; public class SpanCoalescerTest { diff --git a/src/test/java/org/jenkinsci/plugins/workflow/log/TaskListenerDecoratorOrderTest.java b/src/test/java/org/jenkinsci/plugins/workflow/log/TaskListenerDecoratorOrderTest.java new file mode 100644 index 00000000..8848b0e8 --- /dev/null +++ b/src/test/java/org/jenkinsci/plugins/workflow/log/TaskListenerDecoratorOrderTest.java @@ -0,0 +1,190 @@ +package org.jenkinsci.plugins.workflow.log; + +import com.google.common.collect.ImmutableSet; +import hudson.console.ConsoleLogFilter; +import hudson.console.LineTransformationOutputStream; +import hudson.model.AbstractBuild; +import hudson.model.Node; +import hudson.model.TaskListener; +import hudson.remoting.Channel; +import jenkins.security.MasterToSlaveCallable; +import jenkins.util.JenkinsJVM; +import org.jenkinsci.plugins.workflow.cps.CpsFlowDefinition; +import org.jenkinsci.plugins.workflow.flow.FlowExecutionOwner; +import org.jenkinsci.plugins.workflow.job.WorkflowJob; +import org.jenkinsci.plugins.workflow.job.WorkflowRun; +import org.jenkinsci.plugins.workflow.steps.*; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.jvnet.hudson.test.*; +import org.kohsuke.stapler.DataBoundConstructor; + +import java.io.IOException; +import java.io.OutputStream; +import java.io.Serializable; +import java.util.Collections; +import java.util.Set; + +@For(TaskListenerDecorator.class) +public class TaskListenerDecoratorOrderTest { + @ClassRule public static BuildWatcher buildWatcher = new BuildWatcher(); + + @Rule public JenkinsRule r = new JenkinsRule(); + + @Test public void verifyTaskListenerDecoratorOrder() throws Exception { + r.createSlave("remote", null, null); + WorkflowJob p = r.createProject(WorkflowJob.class, "p"); + p.setDefinition(new CpsFlowDefinition("filter {node('remote') {remotePrint()}}", true)); + WorkflowRun b = r.buildAndAssertSuccess(p); + r.assertLogContains("[ApplyOrderDecoratorFactory: job/p/1/] Started", b); + // TaskListenerDecorator applied before step decorators, modify stream last. pseudo: new StepDecorator(new TaskListenerDecorator(stream)) + r.assertLogContains("[ApplyOrderDecoratorFactory: job/p/1/] [StepLevelDecorator] Running on remote in", b); + r.assertLogContains("[ApplyOrderDecoratorFactory: job/p/1/ via remote] [StepLevelDecorator via remote] printed a message on master=false", b); + // now reverse the order + ApplyOrderDecoratorFactory muteOrderDecoratorFactory=r.jenkins.getExtensionList(TaskListenerDecorator.Factory.class).get(ApplyOrderDecoratorFactory.class); + muteOrderDecoratorFactory.applyFirst=false; + b = r.buildAndAssertSuccess(p); + // TaskListenerDecorator applied after step decorators, modify stream first. pseudo: new TaskListenerDecorator(new StepDecorator(stream)) + r.assertLogContains("[StepLevelDecorator] [ApplyOrderDecoratorFactory: job/p/2/] Running", b); + r.assertLogContains("[StepLevelDecorator via remote] [ApplyOrderDecoratorFactory: job/p/2/ via remote] printed a message on master=false", b); + } + + private static final class DecoratorImpl extends TaskListenerDecorator { + private static final long serialVersionUID = 1L; + + private final String tagName; + DecoratorImpl(String tagName) { + this.tagName = tagName; + } + private Object writeReplace() { + Channel ch = Channel.current(); + return ch != null ? new DecoratorImpl(tagName + " via " + ch.getName()) : this; + } + @Override public OutputStream decorate(OutputStream logger) throws IOException, InterruptedException { + return new LineTransformationOutputStream() { + @Override + protected void eol(byte[] b, int len) throws IOException { + logger.write(("[" + tagName + "] ").getBytes()); + logger.write(b, 0, len ); + } + @Override public void close() throws IOException { + super.close(); + logger.close(); + } + @Override public void flush() throws IOException { + logger.flush(); + } + }; + } + @Override public String toString() { + return "DecoratorImpl[" + tagName + "]"; + } + } + + @TestExtension public static final class ApplyOrderDecoratorFactory implements TaskListenerDecorator.Factory { + private boolean applyFirst=true; + @Override public TaskListenerDecorator of(FlowExecutionOwner owner) { + try { + return new DecoratorImpl("ApplyOrderDecoratorFactory: "+owner.getUrl()); + } catch (IOException x) { + throw new AssertionError(x); + } + } + protected void setApplyFirst(boolean applyFirst){ + this.applyFirst=applyFirst; + } + @Override + public boolean isAppliedBeforeMainDecorator() { + return applyFirst; + } + } + + + public static final class FilterStep extends Step { + @DataBoundConstructor public FilterStep() {} + @Override public StepExecution start(StepContext context) throws Exception { + return new Execution(context); + } + private static final class Execution extends StepExecution { + private static final long serialVersionUID = 1L; + + Execution(StepContext context) { + super(context); + } + @Override public boolean start() throws Exception { + getContext().newBodyInvoker().withContext(new Filter("StepLevelDecorator")).withCallback(BodyExecutionCallback.wrap(getContext())).start(); + return false; + } + } + private static final class Filter extends ConsoleLogFilter implements Serializable { + private static final long serialVersionUID = 1L; + + private final String message; + Filter(String message) { + this.message = message; + } + private Object writeReplace() { + Channel ch = Channel.current(); + return ch != null ? new Filter(message + " via " + ch.getName()) : this; + } + @SuppressWarnings("rawtypes") + @Override public OutputStream decorateLogger(AbstractBuild _ignore, OutputStream logger) throws IOException, InterruptedException { + return new DecoratorImpl(message).decorate(logger); + } + @Override public String toString() { + return "Filter[" + message + "]"; + } + } + @TestExtension public static final class DescriptorImpl extends StepDescriptor { + @Override public Set> getRequiredContext() { + return Collections.emptySet(); + } + @Override public String getFunctionName() { + return "filter"; + } + @Override public boolean takesImplicitBlockArgument() { + return true; + } + } + } + + public static final class RemotePrintStep extends Step { + @DataBoundConstructor public RemotePrintStep() {} + @Override public StepExecution start(StepContext context) throws Exception { + return new Execution(context); + } + private static final class Execution extends SynchronousNonBlockingStepExecution { + private static final long serialVersionUID = 1L; + + Execution(StepContext context) { + super(context); + } + @Override protected Void run() throws Exception { + return getContext().get(Node.class).getChannel().call(new PrintCallable(getContext().get(TaskListener.class))); + } + } + private static final class PrintCallable extends MasterToSlaveCallable { + private static final long serialVersionUID = 1L; + + private final TaskListener listener; + PrintCallable(TaskListener listener) { + this.listener = listener; + } + @Override public Void call() throws RuntimeException { + listener.getLogger().println("printed a message on master=" + JenkinsJVM.isJenkinsJVM()); + listener.getLogger().flush(); + return null; + } + } + @TestExtension public static final class DescriptorImpl extends StepDescriptor { + @Override public Set> getRequiredContext() { + return ImmutableSet.of(Node.class, TaskListener.class); + } + @Override public String getFunctionName() { + return "remotePrint"; + } + } + } + +}