pipeline中任务分段日志获取 发表于 2019-10-22 | 更新于 2019-11-23 | 阅读次数: 脚本式pipeline12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182import hudson.model.Actionimport org.jenkinsci.plugins.workflow.cps.nodes.StepStartNodeimport org.jenkinsci.plugins.workflow.graph.FlowNodeimport org.jenkinsci.plugins.workflow.actions.LabelActiondef hasLabelAction(FlowNode flowNode) { def actions = flowNode.getActions() for (Action action : actions) { if (action instanceof LabelAction) { return true } } return false}def getStepStartNode(List<FlowNode> flowNodes, String stepNodeName, def depth) { if (depth < 0) { return null } for(FlowNode flowNode : flowNodes) { def labelActionFlag = false if (flowNode instanceof StepStartNode) { labelActionFlag = hasLabelAction(flowNode) } if (labelActionFlag && flowNode.getDisplayName().equals(stepNodeName)) { return flowNode } // 递归查询 def node = getStepStartNode(flowNode.getParents(), stepNodeName, depth) if(node) { return node } } return null}def getBlueOceanLogUrlByName(String stepNodeName) { // currentBuild: class org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper // build: class org.jenkinsci.plugins.workflow.job.WorkflowRun def build = currentBuild.getRawBuild() // execution: class org.jenkinsci.plugins.workflow.cps.CpsFlowExecution def execution = build.getExecution() // executionHeads: class java.util.ArrayList def executionHeads = execution.getCurrentHeads() def flowNode = getStepStartNode(executionHeads, stepNodeName, 10) if (flowNode) { return Jenkins.instance.getRootUrl() + "blue/rest/organizations/jenkins/pipelines/${JOB_NAME}/runs/${BUILD_NUMBER}/nodes/" + flowNode.getId() + "/log" } return ""}def generateTask(def taskName) { def taskBody = { println getBlueOceanLogUrlByName("Branch: ${taskName}") node { println("====> ${taskName} start") sleep 3 println("====> ${taskName} end") } } taskBody}def createParallelTasks(def jobs) { def pipelineConfig = [:] jobs.each { def job -> def taskBody = generateTask(job) pipelineConfig.put(job, taskBody) } pipelineConfig}script { def jobs = ["job_a", "job_b", "job_c"] def pipelineConfig = createParallelTasks(jobs) stage('test') { println getBlueOceanLogUrlByName("test") parallel pipelineConfig }} 打赏 微信支付