forked from jenkinsci/pipeline-graph-view-plugin
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathPipelineGraphApi.java
More file actions
203 lines (178 loc) · 8.67 KB
/
PipelineGraphApi.java
File metadata and controls
203 lines (178 loc) · 8.67 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
package io.jenkins.plugins.pipelinegraphview.utils;
import static java.util.Collections.emptyList;
import edu.umd.cs.findbugs.annotations.NonNull;
import hudson.model.Cause;
import hudson.model.CauseAction;
import hudson.model.Item;
import hudson.model.Queue;
import io.jenkins.plugins.pipelinegraphview.Messages;
import io.jenkins.plugins.pipelinegraphview.treescanner.PipelineNodeGraphAdapter;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeoutException;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.jenkinsci.plugins.workflow.actions.WorkspaceAction;
import org.jenkinsci.plugins.workflow.flow.FlowExecution;
import org.jenkinsci.plugins.workflow.graph.FlowNode;
import org.jenkinsci.plugins.workflow.graphanalysis.DepthFirstScanner;
import org.jenkinsci.plugins.workflow.job.WorkflowRun;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PipelineGraphApi {
private static final Logger logger = LoggerFactory.getLogger(PipelineGraphApi.class);
private static final BuildScheduleResult NOT_SCHEDULED =
new BuildScheduleResult.NotScheduled(Messages.scheduled_failure());
private final transient WorkflowRun run;
public PipelineGraphApi(WorkflowRun run) {
this.run = run;
}
public Integer replay() throws ExecutionException, InterruptedException, TimeoutException {
BuildScheduleResult result = scheduleBuild(run -> {
CauseAction causeAction = new CauseAction(new Cause.UserIdCause());
return Queue.getInstance()
.schedule2(run.getParent(), 0, causeAction)
.getItem();
});
// when java 21+ we can use switch expression
if (result instanceof BuildScheduleResult.Scheduled s) {
return s.buildNumber();
}
return null;
}
public @NonNull BuildScheduleResult scheduleBuild(Function<WorkflowRun, Queue.Item> scheduler) {
if (run == null) {
return NOT_SCHEDULED;
}
run.checkPermission(Item.BUILD);
if (!run.getParent().isBuildable()) {
return NOT_SCHEDULED;
}
Queue.Item item = scheduler.apply(run);
if (item == null) {
return NOT_SCHEDULED;
}
return new BuildScheduleResult.Scheduled(run.getParent().getNextBuildNumber());
}
public sealed interface BuildScheduleResult {
record NotScheduled(String message) implements BuildScheduleResult {}
record Scheduled(int buildNumber) implements BuildScheduleResult {
public String message() {
return Messages.scheduled_success(buildNumber);
}
}
}
private List<PipelineStageInternal> getPipelineNodes(PipelineGraphBuilderApi builder) {
return builder.getPipelineNodes().stream()
.map(flowNodeWrapper -> new PipelineStageInternal(
flowNodeWrapper.getId(), // TODO no need to parse it BO returns a string even though the
// datatype is number on the frontend
flowNodeWrapper.getDisplayName(),
flowNodeWrapper.getParents().stream()
.map(FlowNodeWrapper::getId)
.collect(Collectors.toList()),
PipelineState.of(flowNodeWrapper.getStatus()),
flowNodeWrapper.getType(),
flowNodeWrapper.getDisplayName(), // TODO blue ocean uses timing information: "Passed in 0s"
flowNodeWrapper.isSynthetic(),
flowNodeWrapper.getTiming(),
getStageNode(flowNodeWrapper)))
.collect(Collectors.toList());
}
private Function<String, PipelineStage> mapper(
Map<String, PipelineStageInternal> stageMap, Map<String, List<String>> stageToChildrenMap) {
String runUrl = run.getUrl();
return id -> {
List<String> orDefault = stageToChildrenMap.getOrDefault(id, emptyList());
List<PipelineStage> children =
orDefault.stream().map(mapper(stageMap, stageToChildrenMap)).collect(Collectors.toList());
return stageMap.get(id).toPipelineStage(children, runUrl);
};
}
private PipelineGraph createTree(PipelineGraphBuilderApi builder) {
// We want to remap children here, so we don't update the parents of the
// original objects - as
// these are completely new representations.
List<PipelineStageInternal> stages = getPipelineNodes(builder);
// id => stage
Map<String, PipelineStageInternal> stageMap = stages.stream()
.collect(Collectors.toMap(
PipelineStageInternal::getId, stage -> stage, (u, v) -> u, LinkedHashMap::new));
Map<String, List<String>> stageToChildrenMap = new HashMap<>();
List<String> childNodes = new ArrayList<>();
FlowExecution execution = run.getExecution();
if (execution == null) {
// If we don't have an execution - e.g. if the Pipeline has a syntax error -
// then return an
// empty graph.
return new PipelineGraph(new ArrayList<>(), false);
}
stages.forEach(stage -> {
if (stage.getParents().isEmpty()) {
stageToChildrenMap.put(stage.getId(), new ArrayList<>());
} else {
List<String> parentChildren =
stageToChildrenMap.getOrDefault(stage.getParents().get(0), new ArrayList<String>());
parentChildren.add(stage.getId());
childNodes.add(stage.getId());
stageToChildrenMap.put(stage.getParents().get(0), parentChildren);
}
});
String runUrl = run.getUrl();
List<PipelineStage> stageResults = stageMap.values().stream()
.map(pipelineStageInternal -> {
List<PipelineStage> children =
stageToChildrenMap.getOrDefault(pipelineStageInternal.getId(), emptyList()).stream()
.map(mapper(stageMap, stageToChildrenMap))
.collect(Collectors.toList());
return pipelineStageInternal.toPipelineStage(children, runUrl);
})
.filter(stage -> !childNodes.contains(stage.getId()))
.collect(Collectors.toList());
return new PipelineGraph(stageResults, execution.isComplete());
}
private static String getStageNode(FlowNodeWrapper flowNodeWrapper) {
FlowNode flowNode = flowNodeWrapper.getNode();
DepthFirstScanner scan = new DepthFirstScanner();
logger.debug("Checking node {}", flowNode);
FlowExecution execution = flowNode.getExecution();
for (FlowNode n : scan.allNodes(execution)) {
WorkspaceAction ws = n.getAction(WorkspaceAction.class);
if (ws != null) {
logger.debug("Found workspace node: {}", n);
boolean isWorkspaceNode = Objects.equals(n.getId(), flowNode.getId())
|| Objects.equals(n.getEnclosingId(), flowNode.getId())
|| flowNode.getAllEnclosingIds().contains(n.getId());
// Parallel stages have a sub-stage, so we need to check the 3rd parent for a match
if (flowNodeWrapper.getType() == FlowNodeWrapper.NodeType.PARALLEL) {
try {
if (n.getEnclosingId() != null) {
FlowNode p = execution.getNode(n.getEnclosingId());
if (p != null && p.getEnclosingId() != null) {
p = execution.getNode(p.getEnclosingId());
if (p != null && p.getEnclosingId() != null) {
isWorkspaceNode = Objects.equals(flowNode.getId(), p.getEnclosingId());
}
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
if (isWorkspaceNode) {
logger.debug("Found correct stage node: {}", n.getId());
String node = ws.getNode();
if (node.isEmpty()) {
node = "built-in";
}
return node;
}
}
}
return null;
}
public PipelineGraph createTree() {
return createTree(new PipelineNodeGraphAdapter(run));
}
}