Commit 1c5c8b75 authored by Yelli's avatar Yelli Committed by qiaozhanwei
Browse files

refactor import process (#1804)

* refactor import process

* add refactor import process UT

* add import process UT

* add null check UT for import process metadata

* add UT for import process

* modify dependentparam UT

* modify testAddImportDependentSpecialParam

* modify DependentParamTest

* modify processDefinitionService UT
parent 1a86e7ba
Loading
Loading
Loading
Loading
+4 −4
Original line number Diff line number Diff line
@@ -59,7 +59,7 @@ public class ProcessMeta {
    /**
     * warning group id
     */
    private int scheduleWarningGroupId;
    private Integer scheduleWarningGroupId;

    /**
     * warning group name
@@ -99,7 +99,7 @@ public class ProcessMeta {
    /**
     * worker group id
     */
    private int scheduleWorkerGroupId;
    private Integer scheduleWorkerGroupId;

    /**
     * worker group name
@@ -165,7 +165,7 @@ public class ProcessMeta {
        this.scheduleWarningType = scheduleWarningType;
    }

    public int getScheduleWarningGroupId() {
    public Integer getScheduleWarningGroupId() {
        return scheduleWarningGroupId;
    }

@@ -229,7 +229,7 @@ public class ProcessMeta {
        this.scheduleProcessInstancePriority = scheduleProcessInstancePriority;
    }

    public int getScheduleWorkerGroupId() {
    public Integer getScheduleWorkerGroupId() {
        return scheduleWorkerGroupId;
    }

+155 −215
Original line number Diff line number Diff line
@@ -27,9 +27,10 @@ import org.apache.dolphinscheduler.api.dto.treeview.Instance;
import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.FileUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.exportprocess.ProcessAddTaskParam;
import org.apache.dolphinscheduler.api.utils.exportprocess.TaskNodeParamFactory;
import org.apache.dolphinscheduler.api.utils.exportprocess.exportProcessAddTaskParam;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*;
import org.apache.dolphinscheduler.common.graph.DAG;
@@ -56,9 +57,7 @@ import org.springframework.web.multipart.MultipartFile;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
@@ -97,9 +96,6 @@ public class ProcessDefinitionService extends BaseDAGService {
    @Autowired
    private ProcessDao processDao;

    @Autowired
    private DataSourceMapper dataSourceMapper;

    @Autowired
    private WorkerGroupMapper workerGroupMapper;

@@ -540,7 +536,7 @@ public class ProcessDefinitionService extends BaseDAGService {
     */
    public String exportProcessMetaDataStr(Integer processDefinitionId, ProcessDefinition processDefinition) {
        //correct task param which has data source or dependent param
        String correctProcessDefinitionJson = addTaskNodeSpecialParam(processDefinition.getProcessDefinitionJson());
        String correctProcessDefinitionJson = addExportTaskNodeSpecialParam(processDefinition.getProcessDefinitionJson());
        processDefinition.setProcessDefinitionJson(correctProcessDefinitionJson);

        //export process metadata
@@ -586,7 +582,7 @@ public class ProcessDefinitionService extends BaseDAGService {
     * @param processDefinitionJson processDefinitionJson
     * @return correct processDefinitionJson
     */
    public String addTaskNodeSpecialParam(String processDefinitionJson) {
    public String addExportTaskNodeSpecialParam(String processDefinitionJson) {
        JSONObject jsonObject = JSONUtils.parseObject(processDefinitionJson);
        JSONArray jsonArray = (JSONArray) jsonObject.get("tasks");

@@ -595,9 +591,9 @@ public class ProcessDefinitionService extends BaseDAGService {
            if (StringUtils.isNotEmpty(taskNode.getString("type"))) {
                String taskType = taskNode.getString("type");

                exportProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType);
                ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType);
                if (null != addTaskParam) {
                    addTaskParam.addSpecialParam(taskNode);
                    addTaskParam.addExportSpecialParam(taskNode);
                }
            }
        }
@@ -605,24 +601,6 @@ public class ProcessDefinitionService extends BaseDAGService {
        return jsonObject.toString();
    }

    /**
     * check task if has dependent
     * @param taskType task type
     * @return if task has dependent return true else false
     */
    private boolean checkTaskHasDependent(String taskType) {
        return taskType.equals(TaskType.DEPENDENT.name());
    }

    /**
     * check task if has data source info
     * @param taskType task type
     * @return if task has data source return true else false
     */
    private boolean checkTaskHasDataSource(String taskType) {
        return taskType.equals(TaskType.SQL.name())  || taskType.equals(TaskType.PROCEDURE.name());
    }

    /**
     * check task if has sub process
     * @param taskType task type
@@ -642,115 +620,94 @@ public class ProcessDefinitionService extends BaseDAGService {
    @Transactional(rollbackFor = Exception.class)
    public Map<String, Object> importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) {
        Map<String, Object> result = new HashMap<>(5);
        String processMetaJson = FileUtils.file2String(file);
        ProcessMeta processMeta = JSONUtils.parseObject(processMetaJson, ProcessMeta.class);

        JSONObject json;

        //read workflow json
        try(InputStreamReader inputStreamReader = new InputStreamReader( file.getInputStream(), StandardCharsets.UTF_8)) {
            BufferedReader streamReader = new BufferedReader(inputStreamReader);
            StringBuilder respomseStrBuilder = new StringBuilder();
            String inputStr;

            while ((inputStr = streamReader.readLine())!= null){
                respomseStrBuilder.append( inputStr );
            }

            json = JSONObject.parseObject( respomseStrBuilder.toString() );

            if(null != json){
                String originProjectName = null;
                String processDefinitionName = null;
                String processDefinitionJson = null;
                String processDefinitionDesc = null;
                String processDefinitionLocations = null;
                String processDefinitionConnects = null;

                String scheduleWarningType = null;
                String scheduleWarningGroupId = null;
                String scheduleStartTime = null;
                String scheduleEndTime = null;
                String scheduleCrontab = null;
                String scheduleFailureStrategy = null;
                String scheduleReleaseState = null;
                String scheduleProcessInstancePriority = null;
                String scheduleWorkerGroupId = null;
                String scheduleWorkerGroupName = null;

                if (Objects.nonNull(json.get("projectName"))) {
                    originProjectName = json.get("projectName").toString();
                } else {
                    putMsg(result, Status.DATA_IS_NULL, "processDefinitionName");
        //check file content
        if (null == processMeta) {
            putMsg(result, Status.DATA_IS_NULL, "fileContent");
            return result;
        }
                if (Objects.nonNull(json.get("processDefinitionName"))) {
                    processDefinitionName = json.get("processDefinitionName").toString();
                } else {
        if (StringUtils.isEmpty(processMeta.getProjectName())) {
            putMsg(result, Status.DATA_IS_NULL, "projectName");
            return result;
        }
        if (StringUtils.isEmpty(processMeta.getProcessDefinitionName())) {
            putMsg(result, Status.DATA_IS_NULL, "processDefinitionName");
            return result;
        }
                if (Objects.nonNull(json.get("processDefinitionJson"))) {
                    processDefinitionJson = json.get("processDefinitionJson").toString();
                } else {
        if (StringUtils.isEmpty(processMeta.getProcessDefinitionJson())) {
            putMsg(result, Status.DATA_IS_NULL, "processDefinitionJson");
            return result;
        }
                if (Objects.nonNull(json.get("processDefinitionDescription"))) {
                    processDefinitionDesc = json.get("processDefinitionDescription").toString();
                }
                if (Objects.nonNull(json.get("processDefinitionLocations"))) {
                    processDefinitionLocations = json.get("processDefinitionLocations").toString();
                }
                if (Objects.nonNull(json.get("processDefinitionConnects"))) {
                    processDefinitionConnects = json.get("processDefinitionConnects").toString();
                }

                //check user access for org project
                Project originProject = projectMapper.queryByName(originProjectName);
                Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, originProject, originProjectName);
                Status resultStatus = (Status) checkResult.get(Constants.STATUS);

                if (resultStatus == Status.SUCCESS) {
        //deal with process name
        String processDefinitionName = processMeta.getProcessDefinitionName();
        //use currentProjectName to query
        Project targetProject = projectMapper.queryByName(currentProjectName);
        if(null != targetProject){
                        processDefinitionName = recursionProcessDefinitionName(targetProject.getId(), processDefinitionName, 1);
            processDefinitionName = recursionProcessDefinitionName(targetProject.getId(),
                    processDefinitionName, 1);
        }

                    JSONObject jsonObject = JSONUtils.parseObject(processDefinitionJson);
                    JSONArray jsonArray = (JSONArray) jsonObject.get("tasks");
        //add special task param
        String importProcessParam = addImportTaskNodeParam(loginUser, processMeta.getProcessDefinitionJson(), targetProject);

                    for (int j = 0; j < jsonArray.size(); j++) {
                        JSONObject taskNode = jsonArray.getJSONObject(j);
                        String taskType = taskNode.getString("type");
                        if(checkTaskHasDataSource(taskType)) {
                            JSONObject sqlParameters = JSONUtils.parseObject(taskNode.getString("params"));
                            List<DataSource> dataSources = dataSourceMapper.queryDataSourceByName(sqlParameters.getString("datasourceName"));
                            if (!dataSources.isEmpty()) {
                                DataSource dataSource = dataSources.get(0);
                                sqlParameters.put("datasource", dataSource.getId());
                            }
                            taskNode.put("params", sqlParameters);
                        }else if(checkTaskHasDependent(taskType)){
                            JSONObject dependentParameters =  JSONUtils.parseObject(taskNode.getString("dependence"));
                            if(dependentParameters != null){
                                JSONArray dependTaskList = (JSONArray) dependentParameters.get("dependTaskList");
                                for (int h = 0; h < dependTaskList.size(); h++) {
                                    JSONObject dependentTaskModel = dependTaskList.getJSONObject(h);
                                    JSONArray dependItemList = (JSONArray) dependentTaskModel.get("dependItemList");
                                    for (int k = 0; k < dependItemList.size(); k++) {
                                        JSONObject dependentItem = dependItemList.getJSONObject(k);
                                        Project dependentItemProject = projectMapper.queryByName(dependentItem.getString("projectName"));
                                        if(dependentItemProject != null){
                                            ProcessDefinition definition = processDefineMapper.queryByDefineName(dependentItemProject.getId(),dependentItem.getString("definitionName"));
                                            if(definition != null){
                                                dependentItem.put("projectId",dependentItemProject.getId());
                                                dependentItem.put("definitionId",definition.getId());
                                            }
        Map<String, Object> createProcessResult;
        try {
            createProcessResult = createProcessDefinition(loginUser
                    ,currentProjectName,
                    processDefinitionName,
                    importProcessParam,
                    processMeta.getProcessDefinitionDescription(),
                    processMeta.getProcessDefinitionLocations(),
                    processMeta.getProcessDefinitionConnects());
        } catch (JsonProcessingException e) {
            logger.error("import process meta json data: {}", e.getMessage(), e);
            putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
            return result;
        }

        putMsg(result, Status.SUCCESS);
        //create process definition
        Integer processDefinitionId = null;
        if (null != createProcessResult && Objects.nonNull(createProcessResult.get("processDefinitionId"))) {
            processDefinitionId = Integer.parseInt(createProcessResult.get("processDefinitionId").toString());
        }
        //scheduler param
        if (null != processMeta.getScheduleCrontab() && null != processDefinitionId) {
            int scheduleInsert = importProcessSchedule(loginUser,
                    currentProjectName,
                    processMeta,
                    processDefinitionName,
                    processDefinitionId);

            if (0 == scheduleInsert) {
                putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
                return result;
            }
        }
                                taskNode.put("dependence", dependentParameters);

        return result;
    }

    /**
     * import process add special task param
     * @param loginUser login user
     * @param processDefinitionJson process definition json
     * @param targetProject target project
     * @return import process param
     */
    private String addImportTaskNodeParam(User loginUser, String processDefinitionJson, Project targetProject) {
        JSONObject jsonObject = JSONUtils.parseObject(processDefinitionJson);
        JSONArray jsonArray = (JSONArray) jsonObject.get("tasks");
        //add sql and dependent param
        for (int i = 0; i < jsonArray.size(); i++) {
            JSONObject taskNode = jsonArray.getJSONObject(i);
            String taskType = taskNode.getString("type");
            ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType);
            if (null != addTaskParam) {
                addTaskParam.addImportSpecialParam(taskNode);
            }
        }

@@ -761,18 +718,25 @@ public class ProcessDefinitionService extends BaseDAGService {
                .filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).getString("type")))
                .collect(Collectors.toList());

                    if (!subProcessList.isEmpty()) {
        if (CollectionUtils.isNotEmpty(subProcessList)) {
            importSubProcess(loginUser, targetProject, jsonArray, subProcessIdMap);
        }

        jsonObject.put("tasks", jsonArray);

                    Map<String, Object> createProcessDefinitionResult = createProcessDefinition(loginUser,currentProjectName,processDefinitionName,jsonObject.toString(),processDefinitionDesc,processDefinitionLocations,processDefinitionConnects);
                    Integer processDefinitionId = null;
                    if (Objects.nonNull(createProcessDefinitionResult.get("processDefinitionId"))) {
                        processDefinitionId = Integer.parseInt(createProcessDefinitionResult.get("processDefinitionId").toString());
        return jsonObject.toString();
    }
                    if (Objects.nonNull(json.get("scheduleCrontab")) && processDefinitionId != null) {

    /**
     * import process schedule
     * @param loginUser login user
     * @param currentProjectName current project name
     * @param processMeta process meta data
     * @param processDefinitionName process definition name
     * @param processDefinitionId process definition id
     * @return insert schedule flag
     */
    public int importProcessSchedule(User loginUser, String currentProjectName, ProcessMeta processMeta,
                                     String processDefinitionName, Integer processDefinitionId) {
        Date now = new Date();
        Schedule scheduleObj = new Schedule();
        scheduleObj.setProjectName(currentProjectName);
@@ -783,65 +747,41 @@ public class ProcessDefinitionService extends BaseDAGService {
        scheduleObj.setUserId(loginUser.getId());
        scheduleObj.setUserName(loginUser.getUserName());

        scheduleObj.setCrontab(processMeta.getScheduleCrontab());

                        scheduleCrontab = json.get("scheduleCrontab").toString();
                        scheduleObj.setCrontab(scheduleCrontab);
                        if (Objects.nonNull(json.get("scheduleStartTime"))) {
                            scheduleStartTime = json.get("scheduleStartTime").toString();
                            scheduleObj.setStartTime(DateUtils.stringToDate(scheduleStartTime));
        if (null != processMeta.getScheduleStartTime()) {
            scheduleObj.setStartTime(DateUtils.stringToDate(processMeta.getScheduleStartTime()));
        }
                        if (Objects.nonNull(json.get("scheduleEndTime"))) {
                            scheduleEndTime = json.get("scheduleEndTime").toString();
                            scheduleObj.setEndTime(DateUtils.stringToDate(scheduleEndTime));
        if (null != processMeta.getScheduleEndTime()) {
            scheduleObj.setEndTime(DateUtils.stringToDate(processMeta.getScheduleEndTime()));
        }
                        if (Objects.nonNull(json.get("scheduleWarningType"))) {
                            scheduleWarningType = json.get("scheduleWarningType").toString();
                            scheduleObj.setWarningType(WarningType.valueOf(scheduleWarningType));
        if (null != processMeta.getScheduleWarningType()) {
            scheduleObj.setWarningType(WarningType.valueOf(processMeta.getScheduleWarningType()));
        }
                        if (Objects.nonNull(json.get("scheduleWarningGroupId"))) {
                            scheduleWarningGroupId = json.get("scheduleWarningGroupId").toString();
                            scheduleObj.setWarningGroupId(Integer.parseInt(scheduleWarningGroupId));
        if (null != processMeta.getScheduleWarningGroupId()) {
            scheduleObj.setWarningGroupId(processMeta.getScheduleWarningGroupId());
        }
                        if (Objects.nonNull(json.get("scheduleFailureStrategy"))) {
                            scheduleFailureStrategy = json.get("scheduleFailureStrategy").toString();
                            scheduleObj.setFailureStrategy(FailureStrategy.valueOf(scheduleFailureStrategy));
        if (null != processMeta.getScheduleFailureStrategy()) {
            scheduleObj.setFailureStrategy(FailureStrategy.valueOf(processMeta.getScheduleFailureStrategy()));
        }
                        if (Objects.nonNull(json.get("scheduleReleaseState"))) {
                            scheduleReleaseState = json.get("scheduleReleaseState").toString();
                            scheduleObj.setReleaseState(ReleaseState.valueOf(scheduleReleaseState));
        if (null != processMeta.getScheduleReleaseState()) {
            scheduleObj.setReleaseState(ReleaseState.valueOf(processMeta.getScheduleReleaseState()));
        }
                        if (Objects.nonNull(json.get("scheduleProcessInstancePriority"))) {
                            scheduleProcessInstancePriority = json.get("scheduleProcessInstancePriority").toString();
                            scheduleObj.setProcessInstancePriority(Priority.valueOf(scheduleProcessInstancePriority));
        if (null != processMeta.getScheduleProcessInstancePriority()) {
            scheduleObj.setProcessInstancePriority(Priority.valueOf(processMeta.getScheduleProcessInstancePriority()));
        }
                        if (Objects.nonNull(json.get("scheduleWorkerGroupId"))) {
                            scheduleWorkerGroupId = json.get("scheduleWorkerGroupId").toString();
                            if(scheduleWorkerGroupId != null){
                                scheduleObj.setWorkerGroupId(Integer.parseInt(scheduleWorkerGroupId));
        if (null != processMeta.getScheduleWorkerGroupId()) {
            scheduleObj.setWorkerGroupId(processMeta.getScheduleWorkerGroupId());
        } else {
                                if (Objects.nonNull(json.get("scheduleWorkerGroupName"))) {
                                    scheduleWorkerGroupName = json.get("scheduleWorkerGroupName").toString();
                                    List<WorkerGroup> workerGroups = workerGroupMapper.queryWorkerGroupByName(scheduleWorkerGroupName);
                                    if(!workerGroups.isEmpty()){
            if (null != processMeta.getScheduleWorkerGroupName()) {
                List<WorkerGroup> workerGroups = workerGroupMapper.queryWorkerGroupByName(processMeta.getScheduleWorkerGroupName());
                if(CollectionUtils.isNotEmpty(workerGroups)){
                    scheduleObj.setWorkerGroupId(workerGroups.get(0).getId());
                }
            }
        }
                        }
                        scheduleMapper.insert(scheduleObj);
                    }

                    putMsg(result, Status.SUCCESS);
                    return result;
                }
            }else{
                putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
                return result;
            }
        } catch (IOException e) {
            throw new RuntimeException(e.getMessage(), e);
        }
        return result;
        return scheduleMapper.insert(scheduleObj);
    }

    /**
@@ -873,7 +813,7 @@ public class ProcessDefinitionService extends BaseDAGService {
                            .filter(item -> checkTaskHasSubProcess(JSONUtils.parseObject(item.toString()).getString("type")))
                            .collect(Collectors.toList());

                    if (!subProcessList.isEmpty()) {
                    if (CollectionUtils.isNotEmpty(subProcessList)) {
                        importSubProcess(loginUser, targetProject, subJsonArray, subProcessIdMap);
                        //sub process processId correct
                        if (!subProcessIdMap.isEmpty()) {
+27 −1
Original line number Diff line number Diff line
@@ -22,9 +22,12 @@ import org.springframework.core.io.Resource;
import org.springframework.core.io.UrlResource;
import org.springframework.web.multipart.MultipartFile;

import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
@@ -77,4 +80,27 @@ public class FileUtils {
        }
        return null;
    }

    /**
     * file convert String
     * @param file MultipartFile file
     * @return file content string
     */
    public static String file2String(MultipartFile file) {
        StringBuilder strBuilder = new StringBuilder();

        try (InputStreamReader inputStreamReader = new InputStreamReader(file.getInputStream(), StandardCharsets.UTF_8)) {
            BufferedReader streamReader = new BufferedReader(inputStreamReader);
            String inputStr;

            while ((inputStr = streamReader.readLine()) != null) {
                strBuilder.append(inputStr);
            }

        } catch (IOException e) {
            logger.error("file convert to string failed: {}", file.getName());
        }

        return strBuilder.toString();
    }
}
+21 −2
Original line number Diff line number Diff line
@@ -25,11 +25,13 @@ import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.util.List;

/**
 * task node add datasource param strategy
 */
@Service
public class DataSourceParam implements exportProcessAddTaskParam, InitializingBean {
public class DataSourceParam implements ProcessAddTaskParam, InitializingBean {

    @Autowired
    private DataSourceMapper dataSourceMapper;
@@ -40,7 +42,7 @@ public class DataSourceParam implements exportProcessAddTaskParam, InitializingB
     * @return task node json object
     */
    @Override
    public JSONObject addSpecialParam(JSONObject taskNode) {
    public JSONObject addExportSpecialParam(JSONObject taskNode) {
        // add sqlParameters
        JSONObject sqlParameters = JSONUtils.parseObject(taskNode.getString("params"));
        DataSource dataSource = dataSourceMapper.selectById((Integer) sqlParameters.get("datasource"));
@@ -52,6 +54,23 @@ public class DataSourceParam implements exportProcessAddTaskParam, InitializingB
        return taskNode;
    }

    /**
     * import process add datasource params
     * @param taskNode task node json object
     * @return task node json object
     */
    @Override
    public JSONObject addImportSpecialParam(JSONObject taskNode) {
        JSONObject sqlParameters = JSONUtils.parseObject(taskNode.getString("params"));
        List<DataSource> dataSources = dataSourceMapper.queryDataSourceByName(sqlParameters.getString("datasourceName"));
        if (!dataSources.isEmpty()) {
            DataSource dataSource = dataSources.get(0);
            sqlParameters.put("datasource", dataSource.getId());
        }
        taskNode.put("params", sqlParameters);
        return taskNode;
    }


    /**
     * put datasource strategy
+37 −2

File changed.

Preview size limit exceeded, changes collapsed.

Loading