Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,176 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.dolphinscheduler.server.master.engine.command.handler;

import static com.google.common.base.Preconditions.checkArgument;
import static org.apache.dolphinscheduler.common.constants.CommandKeyConstants.CMD_PARAM_START_NODES;

import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.WorkflowExecutionStatus;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.Command;
import org.apache.dolphinscheduler.dao.entity.TaskDefinition;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.WorkflowInstance;
import org.apache.dolphinscheduler.dao.repository.TaskInstanceDao;
import org.apache.dolphinscheduler.dao.repository.WorkflowInstanceDao;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.master.engine.graph.IWorkflowGraph;
import org.apache.dolphinscheduler.server.master.engine.graph.WorkflowExecutionGraph;
import org.apache.dolphinscheduler.server.master.engine.graph.WorkflowGraphTopologyLogicalVisitor;
import org.apache.dolphinscheduler.server.master.engine.task.runnable.TaskExecutionRunnable;
import org.apache.dolphinscheduler.server.master.engine.task.runnable.TaskExecutionRunnableBuilder;
import org.apache.dolphinscheduler.server.master.runner.WorkflowExecuteContext.WorkflowExecuteContextBuilder;

import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;

import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.stream.Collectors;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Component;

import com.google.common.base.Splitter;

/**
* This handler is used to handle {@link CommandType#EXECUTE_TASK}.
* <p> It will rerun the given start task and all its downstream tasks in the same workflow instance.
*/
@Component
public class ExecuteTaskCommandHandler extends AbstractCommandHandler {

@Autowired

Check warning on line 65 in dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/ExecuteTaskCommandHandler.java

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Remove this field injection and use constructor injection instead.

See more on https://sonarcloud.io/project/issues?id=apache-dolphinscheduler&issues=AZyVnRVlPEdjtM1NhJ8K&open=AZyVnRVlPEdjtM1NhJ8K&pullRequest=18000
private WorkflowInstanceDao workflowInstanceDao;

@Autowired

Check warning on line 68 in dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/ExecuteTaskCommandHandler.java

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Remove this field injection and use constructor injection instead.

See more on https://sonarcloud.io/project/issues?id=apache-dolphinscheduler&issues=AZyVnRVlPEdjtM1NhJ8L&open=AZyVnRVlPEdjtM1NhJ8L&pullRequest=18000
private TaskInstanceDao taskInstanceDao;

Check failure on line 69 in dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/ExecuteTaskCommandHandler.java

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

"taskInstanceDao" is the name of a field in "AbstractCommandHandler".

See more on https://sonarcloud.io/project/issues?id=apache-dolphinscheduler&issues=AZyVnRVlPEdjtM1NhJ8I&open=AZyVnRVlPEdjtM1NhJ8I&pullRequest=18000

@Autowired

Check warning on line 71 in dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/ExecuteTaskCommandHandler.java

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Remove this field injection and use constructor injection instead.

See more on https://sonarcloud.io/project/issues?id=apache-dolphinscheduler&issues=AZyVnRVlPEdjtM1NhJ8M&open=AZyVnRVlPEdjtM1NhJ8M&pullRequest=18000
private ApplicationContext applicationContext;

Check failure on line 72 in dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/ExecuteTaskCommandHandler.java

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

"applicationContext" is the name of a field in "AbstractCommandHandler".

See more on https://sonarcloud.io/project/issues?id=apache-dolphinscheduler&issues=AZyVnRVlPEdjtM1NhJ8J&open=AZyVnRVlPEdjtM1NhJ8J&pullRequest=18000

@Autowired

Check warning on line 74 in dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/ExecuteTaskCommandHandler.java

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Remove this field injection and use constructor injection instead.

See more on https://sonarcloud.io/project/issues?id=apache-dolphinscheduler&issues=AZyVnRVlPEdjtM1NhJ8N&open=AZyVnRVlPEdjtM1NhJ8N&pullRequest=18000
private MasterConfig masterConfig;

@Override
protected void assembleWorkflowInstance(final WorkflowExecuteContextBuilder workflowExecuteContextBuilder) {
final Command command = workflowExecuteContextBuilder.getCommand();
final int workflowInstanceId = command.getWorkflowInstanceId();
final WorkflowInstance workflowInstance = workflowInstanceDao.queryOptionalById(workflowInstanceId)
.orElseThrow(() -> new IllegalArgumentException("Cannot find WorkflowInstance:" + workflowInstanceId));
workflowInstance.setStateWithDesc(WorkflowExecutionStatus.RUNNING_EXECUTION, command.getCommandType().name());
workflowInstance.setCommandType(command.getCommandType());
if (command.getTaskDependType() != null) {

Check warning on line 85 in dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/ExecuteTaskCommandHandler.java

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Remove this use of "getTaskDependType"; it is deprecated.

See more on https://sonarcloud.io/project/issues?id=apache-dolphinscheduler&issues=AZyVnRVlPEdjtM1NhJ8O&open=AZyVnRVlPEdjtM1NhJ8O&pullRequest=18000

Check notice

Code scanning / CodeQL

Deprecated method or constructor invocation Note

Invoking
Command.getTaskDependType
should be avoided because it has been deprecated.
workflowInstance.setTaskDependType(command.getTaskDependType());

Check warning on line 86 in dolphinscheduler-master/src/main/java/org/apache/dolphinscheduler/server/master/engine/command/handler/ExecuteTaskCommandHandler.java

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Remove this use of "getTaskDependType"; it is deprecated.

See more on https://sonarcloud.io/project/issues?id=apache-dolphinscheduler&issues=AZyVnRVlPEdjtM1NhJ8P&open=AZyVnRVlPEdjtM1NhJ8P&pullRequest=18000

Check notice

Code scanning / CodeQL

Deprecated method or constructor invocation Note

Invoking
Command.getTaskDependType
should be avoided because it has been deprecated.
}
workflowInstance.setHost(masterConfig.getMasterAddress());
workflowInstanceDao.updateById(workflowInstance);
workflowExecuteContextBuilder.setWorkflowInstance(workflowInstance);
}

@Override
protected void assembleWorkflowExecutionGraph(final WorkflowExecuteContextBuilder workflowExecuteContextBuilder) {
final IWorkflowGraph workflowGraph = workflowExecuteContextBuilder.getWorkflowGraph();
final List<String> startNodes = parseStartNodesFromCommand(workflowExecuteContextBuilder, workflowGraph);
final Map<String, TaskInstance> taskInstanceMap = getValidTaskInstance(workflowExecuteContextBuilder
.getWorkflowInstance())
.stream()
.collect(Collectors.toMap(TaskInstance::getName, Function.identity()));

// Mark the selected task and all its downstream task instances as invalid, then trigger them again.
final Set<String> taskNamesNeedRerun = new HashSet<>();
final WorkflowGraphTopologyLogicalVisitor markInvalidTaskVisitor =
WorkflowGraphTopologyLogicalVisitor.builder()
.onWorkflowGraph(workflowGraph)
.taskDependType(workflowExecuteContextBuilder.getWorkflowInstance().getTaskDependType())
.fromTask(startNodes)
.doVisitFunction((task, successors) -> taskNamesNeedRerun.add(task))
.build();
markInvalidTaskVisitor.visit();

final List<TaskInstance> taskInstancesNeedRerun = taskNamesNeedRerun.stream()
.map(taskInstanceMap::remove)
.filter(Objects::nonNull)
.collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(taskInstancesNeedRerun)) {
taskInstanceDao.markTaskInstanceInvalid(taskInstancesNeedRerun);
}

final WorkflowExecutionGraph workflowExecutionGraph = new WorkflowExecutionGraph();
final BiConsumer<String, Set<String>> taskExecutionRunnableCreator = (task, successors) -> {
final TaskExecutionRunnableBuilder taskExecutionRunnableBuilder =
TaskExecutionRunnableBuilder
.builder()
.workflowExecutionGraph(workflowExecutionGraph)
.workflowDefinition(workflowExecuteContextBuilder.getWorkflowDefinition())
.project(workflowExecuteContextBuilder.getProject())
.workflowInstance(workflowExecuteContextBuilder.getWorkflowInstance())
.taskDefinition(workflowGraph.getTaskNodeByName(task))
.taskInstance(taskInstanceMap.get(task))
.workflowEventBus(workflowExecuteContextBuilder.getWorkflowEventBus())
.applicationContext(applicationContext)
.build();
workflowExecutionGraph.addNode(new TaskExecutionRunnable(taskExecutionRunnableBuilder));
workflowExecutionGraph.addEdge(task, successors);
};

final WorkflowGraphTopologyLogicalVisitor workflowGraphTopologyLogicalVisitor =
WorkflowGraphTopologyLogicalVisitor.builder()
.taskDependType(workflowExecuteContextBuilder.getWorkflowInstance().getTaskDependType())
.onWorkflowGraph(workflowGraph)
.fromTask(startNodes)
.doVisitFunction(taskExecutionRunnableCreator)
.build();
workflowGraphTopologyLogicalVisitor.visit();
workflowExecutionGraph.removeUnReachableEdge();
workflowExecuteContextBuilder.setWorkflowExecutionGraph(workflowExecutionGraph);
}

private List<String> parseStartNodesFromCommand(final WorkflowExecuteContextBuilder workflowExecuteContextBuilder,
final IWorkflowGraph workflowGraph) {
final Command command = workflowExecuteContextBuilder.getCommand();
final String startNodes = JSONUtils.getNodeString(command.getCommandParam(), CMD_PARAM_START_NODES);
checkArgument(StringUtils.isNotBlank(startNodes),
"Invalid command param, the start nodes is empty: " + command.getCommandParam());
final List<Long> startNodeCodes = Splitter.on(',')
.trimResults()
.omitEmptyStrings()
.splitToStream(startNodes)
.map(Long::parseLong)

Check notice

Code scanning / CodeQL

Missing catch of NumberFormatException Note

Potential uncaught 'java.lang.NumberFormatException'.
.collect(Collectors.toList());
checkArgument(CollectionUtils.isNotEmpty(startNodeCodes),
"Invalid command param, cannot parse start nodes from command param: " + command.getCommandParam());
return startNodeCodes
.stream()
.map(workflowGraph::getTaskNodeByCode)
.map(TaskDefinition::getName)
.collect(Collectors.toList());
}

@Override
public CommandType commandType() {
return CommandType.EXECUTE_TASK;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,9 @@
import org.apache.dolphinscheduler.service.expand.CuringParamsService;
import org.apache.dolphinscheduler.service.process.ProcessService;

import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;

import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
Expand Down Expand Up @@ -87,7 +86,8 @@ public TaskExecutionContext createTaskExecutionContext(TaskExecutionContextCreat
final Project project = request.getProject();

final List<Property> varPools =
generateTaskInstanceVarPool(request.getTaskDefinition(), request.getWorkflowExecutionGraph());
generateTaskInstanceVarPool(workflowInstance, request.getTaskDefinition(),
request.getWorkflowExecutionGraph());
taskInstance.setVarPool(VarPoolUtils.serializeVarPool(varPools));

return TaskExecutionContextBuilder.get()
Expand Down Expand Up @@ -192,19 +192,26 @@ private Optional<String> getEnvironmentConfigFromDB(final TaskInstance taskInsta
return Optional.ofNullable(environmentOptional.get().getConfig());
}

// The successors of the task instance will be used to generate the var pool
// All out varPool from the successors will be merged into the var pool of the task instance
private List<Property> generateTaskInstanceVarPool(TaskDefinition taskDefinition,
// The predecessors of the task instance will be used to generate the var pool.
// In execute-task(TASK_ONLY) scenario, the predecessor might be outside current execution sub-graph.
Copy link
Contributor Author

@Mrhs121 Mrhs121 Feb 25, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

// For this case, fallback to workflow varPool to keep compatibility with historical behavior.
private List<Property> generateTaskInstanceVarPool(WorkflowInstance workflowInstance,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's better to use a clear logic, if the task instance is the start node, then use the workflow instance's varpool as the task instance varpool.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good idea. I’ve changed it to explicit start-node logic only start node uses workflow instance varPool

TaskDefinition taskDefinition,
IWorkflowExecutionGraph workflowExecutionGraph) {
List<ITaskExecutionRunnable> predecessors = workflowExecutionGraph.getPredecessors(taskDefinition.getName());
if (CollectionUtils.isEmpty(predecessors)) {
return Collections.emptyList();
final boolean isStartNode = workflowExecutionGraph.getStartNodes()
.stream()
.anyMatch(node -> node.getTaskDefinition().getCode() == taskDefinition.getCode());
if (isStartNode) {
return VarPoolUtils.deserializeVarPool(workflowInstance.getVarPool());
}
List<String> varPoolsFromPredecessors = predecessors

List<String> varPoolsFromPredecessors = workflowExecutionGraph.getPredecessors(taskDefinition.getName())
.stream()
.filter(ITaskExecutionRunnable::isTaskInstanceInitialized)
.map(ITaskExecutionRunnable::getTaskInstance)
.sorted(Comparator.comparing(TaskInstance::getEndTime, Comparator.nullsLast(Comparator.naturalOrder())))
.map(TaskInstance::getVarPool)
.filter(StringUtils::isNotBlank)
.collect(Collectors.toList());
return VarPoolUtils.mergeVarPoolJsonString(varPoolsFromPredecessors);
}
Expand Down
Loading
Loading