status
stringclasses 1
value | repo_name
stringclasses 31
values | repo_url
stringclasses 31
values | issue_id
int64 1
104k
| title
stringlengths 4
369
| body
stringlengths 0
254k
⌀ | issue_url
stringlengths 37
56
| pull_url
stringlengths 37
54
| before_fix_sha
stringlengths 40
40
| after_fix_sha
stringlengths 40
40
| report_datetime
unknown | language
stringclasses 5
values | commit_datetime
unknown | updated_file
stringlengths 4
188
| file_content
stringlengths 0
5.12M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,464 | [BUG] dependent task failed when conditions task exists | ![image](https://user-images.githubusercontent.com/29528966/79708968-8ba91800-82f3-11ea-8bfb-ee1757bb2d80.png)
description:
1. work flow A have a conditions task.
2. work flow B have a dependent task that depend work A
3. work flow B would failed always.
| https://github.com/apache/dolphinscheduler/issues/2464 | https://github.com/apache/dolphinscheduler/pull/2768 | 7a05c007b45ea742e3ed1a7558c1a641515c140c | 184d64e85291b1e8fb82494bf5a8e2ea755a3dc1 | "2020-04-20T02:48:35Z" | java | "2020-05-21T02:34:28Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.enums;
import com.baomidou.mybatisplus.annotation.EnumValue;
/**
* running status for workflow and task nodes
*
*/
public enum ExecutionStatus {
/**
* status:
* 0 submit success
* 1 running
* 2 ready pause
* 3 pause
* 4 ready stop
* 5 stop
* 6 failure
* 7 success
* 8 need fault tolerance
* 9 kill
* 10 waiting thread
* 11 waiting depend node complete
*/
SUBMITTED_SUCCESS(0, "submit success"),
RUNNING_EXEUTION(1, "running"),
READY_PAUSE(2, "ready pause"),
PAUSE(3, "pause"),
READY_STOP(4, "ready stop"),
STOP(5, "stop"),
FAILURE(6, "failure"),
SUCCESS(7, "success"),
NEED_FAULT_TOLERANCE(8, "need fault tolerance"),
KILL(9, "kill"),
WAITTING_THREAD(10, "waiting thread"),
WAITTING_DEPEND(11, "waiting depend node complete");
ExecutionStatus(int code, String descp){
this.code = code;
this.descp = descp;
}
@EnumValue
private final int code;
private final String descp;
/**
* status is success
* @return status
*/
public boolean typeIsSuccess(){
return this == SUCCESS;
}
/**
* status is failure
* @return status
*/
public boolean typeIsFailure(){
return this == FAILURE || this == NEED_FAULT_TOLERANCE;
}
/**
* status is finished
* @return status
*/
public boolean typeIsFinished(){
return typeIsSuccess() || typeIsFailure() || typeIsCancel() || typeIsPause()
|| typeIsWaittingThread();
}
/**
* status is waiting thread
* @return status
*/
public boolean typeIsWaittingThread(){
return this == WAITTING_THREAD;
}
/**
* status is pause
* @return status
*/
public boolean typeIsPause(){
return this == PAUSE;
}
/**
* status is running
* @return status
*/
public boolean typeIsRunning(){
return this == RUNNING_EXEUTION || this == WAITTING_DEPEND;
}
/**
* status is cancel
* @return status
*/
public boolean typeIsCancel(){
return this == KILL || this == STOP ;
}
public int getCode() {
return code;
}
public String getDescp() {
return descp;
}
public static ExecutionStatus of(int status){
for(ExecutionStatus es : values()){
if(es.getCode() == status){
return es;
}
}
throw new IllegalArgumentException("invalid status : " + status);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,464 | [BUG] dependent task failed when conditions task exists | ![image](https://user-images.githubusercontent.com/29528966/79708968-8ba91800-82f3-11ea-8bfb-ee1757bb2d80.png)
description:
1. work flow A have a conditions task.
2. work flow B have a dependent task that depend work A
3. work flow B would failed always.
| https://github.com/apache/dolphinscheduler/issues/2464 | https://github.com/apache/dolphinscheduler/pull/2768 | 7a05c007b45ea742e3ed1a7558c1a641515c140c | 184d64e85291b1e8fb82494bf5a8e2ea755a3dc1 | "2020-04-20T02:48:35Z" | java | "2020-05-21T02:34:28Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/DependentTaskExecThread.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.master.runner;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DependResult;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.model.DependentTaskModel;
import org.apache.dolphinscheduler.common.task.dependent.DependentParameters;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.utils.DependentUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.server.utils.DependentExecute;
import org.slf4j.LoggerFactory;
import java.util.*;
import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT;
public class DependentTaskExecThread extends MasterBaseTaskExecThread {
private DependentParameters dependentParameters;
/**
* dependent task list
*/
private List<DependentExecute> dependentTaskList = new ArrayList<>();
/**
* depend item result map
* save the result to log file
*/
private Map<String, DependResult> dependResultMap = new HashMap<>();
/**
* dependent date
*/
private Date dependentDate;
/**
* constructor of MasterBaseTaskExecThread
*
* @param taskInstance task instance
*/
public DependentTaskExecThread(TaskInstance taskInstance) {
super(taskInstance);
}
@Override
public Boolean submitWaitComplete() {
try{
logger.info("dependent task start");
this.taskInstance = submit();
logger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX,
taskInstance.getProcessDefinitionId(),
taskInstance.getProcessInstanceId(),
taskInstance.getId()));
String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, processService.formatTaskAppId(this.taskInstance));
Thread.currentThread().setName(threadLoggerInfoName);
initTaskParameters();
initDependParameters();
waitTaskQuit();
updateTaskState();
}catch (Exception e){
logger.error("dependent task run exception" , e);
}
return true;
}
/**
* init dependent parameters
*/
private void initDependParameters() {
this.dependentParameters = JSONUtils.parseObject(this.taskInstance.getDependency(),
DependentParameters.class);
for(DependentTaskModel taskModel : dependentParameters.getDependTaskList()){
this.dependentTaskList.add(new DependentExecute(
taskModel.getDependItemList(), taskModel.getRelation()));
}
if(this.processInstance.getScheduleTime() != null){
this.dependentDate = this.processInstance.getScheduleTime();
}else{
this.dependentDate = new Date();
}
}
/**
*
*/
private void updateTaskState() {
ExecutionStatus status;
if(this.cancel){
status = ExecutionStatus.KILL;
}else{
DependResult result = getTaskDependResult();
status = (result == DependResult.SUCCESS) ? ExecutionStatus.SUCCESS : ExecutionStatus.FAILURE;
}
taskInstance.setState(status);
taskInstance.setEndTime(new Date());
processService.saveTaskInstance(taskInstance);
}
/**
* wait dependent tasks quit
*/
private Boolean waitTaskQuit() {
logger.info("wait depend task : {} complete", this.taskInstance.getName());
if (taskInstance.getState().typeIsFinished()) {
logger.info("task {} already complete. task state:{}",
this.taskInstance.getName(),
this.taskInstance.getState());
return true;
}
while (Stopper.isRunning()) {
try{
if(this.processInstance == null){
logger.error("process instance not exists , master task exec thread exit");
return true;
}
if(this.cancel || this.processInstance.getState() == ExecutionStatus.READY_STOP){
cancelTaskInstance();
break;
}
if ( allDependentTaskFinish() || taskInstance.getState().typeIsFinished()){
break;
}
// updateProcessInstance task instance
taskInstance = processService.findTaskInstanceById(taskInstance.getId());
processInstance = processService.findProcessInstanceById(processInstance.getId());
Thread.sleep(Constants.SLEEP_TIME_MILLIS);
} catch (Exception e) {
logger.error("exception",e);
if (processInstance != null) {
logger.error("wait task quit failed, instance id:{}, task id:{}",
processInstance.getId(), taskInstance.getId());
}
}
}
return true;
}
/**
* cancel dependent task
*/
private void cancelTaskInstance() {
this.cancel = true;
}
private void initTaskParameters() {
taskInstance.setLogPath(getTaskLogPath(taskInstance));
taskInstance.setHost(OSUtils.getHost() + Constants.COLON + masterConfig.getListenPort());
taskInstance.setState(ExecutionStatus.RUNNING_EXEUTION);
taskInstance.setStartTime(new Date());
processService.updateTaskInstance(taskInstance);
}
/**
* judge all dependent tasks finish
* @return whether all dependent tasks finish
*/
private boolean allDependentTaskFinish(){
boolean finish = true;
for(DependentExecute dependentExecute : dependentTaskList){
for(Map.Entry<String, DependResult> entry: dependentExecute.getDependResultMap().entrySet()) {
if(!dependResultMap.containsKey(entry.getKey())){
dependResultMap.put(entry.getKey(), entry.getValue());
//save depend result to log
logger.info("dependent item complete {} {},{}",
DEPENDENT_SPLIT, entry.getKey(), entry.getValue());
}
}
if(!dependentExecute.finish(dependentDate)){
finish = false;
}
}
return finish;
}
/**
* get dependent result
* @return DependResult
*/
private DependResult getTaskDependResult(){
List<DependResult> dependResultList = new ArrayList<>();
for(DependentExecute dependentExecute : dependentTaskList){
DependResult dependResult = dependentExecute.getModelDependResult(dependentDate);
dependResultList.add(dependResult);
}
DependResult result = DependentUtils.getDependResultForRelation(
this.dependentParameters.getRelation(), dependResultList
);
logger.info("dependent task completed, dependent result:{}", result);
return result;
}
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,464 | [BUG] dependent task failed when conditions task exists | ![image](https://user-images.githubusercontent.com/29528966/79708968-8ba91800-82f3-11ea-8bfb-ee1757bb2d80.png)
description:
1. work flow A have a conditions task.
2. work flow B have a dependent task that depend work A
3. work flow B would failed always.
| https://github.com/apache/dolphinscheduler/issues/2464 | https://github.com/apache/dolphinscheduler/pull/2768 | 7a05c007b45ea742e3ed1a7558c1a641515c140c | 184d64e85291b1e8fb82494bf5a8e2ea755a3dc1 | "2020-04-20T02:48:35Z" | java | "2020-05-21T02:34:28Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.master.runner;
import com.alibaba.fastjson.JSON;
import com.google.common.collect.Lists;
import org.apache.commons.io.FileUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import org.apache.dolphinscheduler.remote.NettyRemotingClient;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.utils.AlertManager;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import static org.apache.dolphinscheduler.common.Constants.*;
/**
* master exec thread,split dag
*/
public class MasterExecThread implements Runnable {
/**
* logger of MasterExecThread
*/
private static final Logger logger = LoggerFactory.getLogger(MasterExecThread.class);
/**
* process instance
*/
private ProcessInstance processInstance;
/**
* runing TaskNode
*/
private final Map<MasterBaseTaskExecThread,Future<Boolean>> activeTaskNode = new ConcurrentHashMap<>();
/**
* task exec service
*/
private final ExecutorService taskExecService;
/**
* submit failure nodes
*/
private boolean taskFailedSubmit = false;
/**
* recover node id list
*/
private List<TaskInstance> recoverNodeIdList = new ArrayList<>();
/**
* error task list
*/
private Map<String,TaskInstance> errorTaskList = new ConcurrentHashMap<>();
/**
* complete task list
*/
private Map<String, TaskInstance> completeTaskList = new ConcurrentHashMap<>();
/**
* ready to submit task list
*/
private Map<String, TaskInstance> readyToSubmitTaskList = new ConcurrentHashMap<>();
/**
* depend failed task map
*/
private Map<String, TaskInstance> dependFailedTask = new ConcurrentHashMap<>();
/**
* forbidden task map
*/
private Map<String, TaskNode> forbiddenTaskList = new ConcurrentHashMap<>();
/**
* skip task map
*/
private Map<String, TaskNode> skipTaskNodeList = new ConcurrentHashMap<>();
/**
* recover tolerance fault task list
*/
private List<TaskInstance> recoverToleranceFaultTaskList = new ArrayList<>();
/**
* alert manager
*/
private AlertManager alertManager = new AlertManager();
/**
* the object of DAG
*/
private DAG<String,TaskNode,TaskNodeRelation> dag;
/**
* process service
*/
private ProcessService processService;
/**
* master config
*/
private MasterConfig masterConfig;
/**
*
*/
private NettyRemotingClient nettyRemotingClient;
/**
* constructor of MasterExecThread
* @param processInstance processInstance
* @param processService processService
* @param nettyRemotingClient nettyRemotingClient
*/
public MasterExecThread(ProcessInstance processInstance, ProcessService processService, NettyRemotingClient nettyRemotingClient){
this.processService = processService;
this.processInstance = processInstance;
this.masterConfig = SpringApplicationContext.getBean(MasterConfig.class);
int masterTaskExecNum = masterConfig.getMasterExecTaskNum();
this.taskExecService = ThreadUtils.newDaemonFixedThreadExecutor("Master-Task-Exec-Thread",
masterTaskExecNum);
this.nettyRemotingClient = nettyRemotingClient;
}
@Override
public void run() {
// process instance is null
if (processInstance == null){
logger.info("process instance is not exists");
return;
}
// check to see if it's done
if (processInstance.getState().typeIsFinished()){
logger.info("process instance is done : {}",processInstance.getId());
return;
}
try {
if (processInstance.isComplementData() && Flag.NO == processInstance.getIsSubProcess()){
// sub process complement data
executeComplementProcess();
}else{
// execute flow
executeProcess();
}
}catch (Exception e){
logger.error("master exec thread exception", e);
logger.error("process execute failed, process id:{}", processInstance.getId());
processInstance.setState(ExecutionStatus.FAILURE);
processInstance.setEndTime(new Date());
processService.updateProcessInstance(processInstance);
}finally {
taskExecService.shutdown();
// post handle
postHandle();
}
}
/**
* execute process
* @throws Exception exception
*/
private void executeProcess() throws Exception {
prepareProcess();
runProcess();
endProcess();
}
/**
* execute complement process
* @throws Exception exception
*/
private void executeComplementProcess() throws Exception {
Map<String, String> cmdParam = JSONUtils.toMap(processInstance.getCommandParam());
Date startDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE));
Date endDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE));
processService.saveProcessInstance(processInstance);
// get schedules
int processDefinitionId = processInstance.getProcessDefinitionId();
List<Schedule> schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId);
List<Date> listDate = Lists.newLinkedList();
if(!CollectionUtils.isEmpty(schedules)){
for (Schedule schedule : schedules) {
listDate.addAll(CronUtils.getSelfFireDateList(startDate, endDate, schedule.getCrontab()));
}
}
// get first fire date
Iterator<Date> iterator = null;
Date scheduleDate = null;
if(!CollectionUtils.isEmpty(listDate)) {
iterator = listDate.iterator();
scheduleDate = iterator.next();
processInstance.setScheduleTime(scheduleDate);
processService.updateProcessInstance(processInstance);
}else{
scheduleDate = processInstance.getScheduleTime();
if(scheduleDate == null){
scheduleDate = startDate;
}
}
while(Stopper.isRunning()){
// prepare dag and other info
prepareProcess();
if(dag == null){
logger.error("process {} dag is null, please check out parameters",
processInstance.getId());
processInstance.setState(ExecutionStatus.SUCCESS);
processService.updateProcessInstance(processInstance);
return;
}
// execute process ,waiting for end
runProcess();
// process instance failure ,no more complements
if(!processInstance.getState().typeIsSuccess()){
logger.info("process {} state {}, complement not completely!",
processInstance.getId(), processInstance.getState());
break;
}
// current process instance success ,next execute
if(null == iterator){
// loop by day
scheduleDate = DateUtils.getSomeDay(scheduleDate, 1);
if(scheduleDate.after(endDate)){
// all success
logger.info("process {} complement completely!", processInstance.getId());
break;
}
}else{
// loop by schedule date
if(!iterator.hasNext()){
// all success
logger.info("process {} complement completely!", processInstance.getId());
break;
}
scheduleDate = iterator.next();
}
logger.info("process {} start to complement {} data",
processInstance.getId(), DateUtils.dateToString(scheduleDate));
// execute next process instance complement data
processInstance.setScheduleTime(scheduleDate);
if(cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_START_NODE_STRING)){
cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING);
processInstance.setCommandParam(JSONUtils.toJson(cmdParam));
}
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId());
for(TaskInstance taskInstance : taskInstanceList){
taskInstance.setFlag(Flag.NO);
processService.updateTaskInstance(taskInstance);
}
processInstance.setState(ExecutionStatus.RUNNING_EXEUTION);
processInstance.setGlobalParams(ParameterUtils.curingGlobalParams(
processInstance.getProcessDefinition().getGlobalParamMap(),
processInstance.getProcessDefinition().getGlobalParamList(),
CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime()));
processService.saveProcessInstance(processInstance);
}
// flow end
endProcess();
}
/**
* prepare process parameter
* @throws Exception exception
*/
private void prepareProcess() throws Exception {
// init task queue
initTaskQueue();
// gen process dag
buildFlowDag();
logger.info("prepare process :{} end", processInstance.getId());
}
/**
* process end handle
*/
private void endProcess() {
processInstance.setEndTime(new Date());
processService.updateProcessInstance(processInstance);
if(processInstance.getState().typeIsWaittingThread()){
processService.createRecoveryWaitingThreadCommand(null, processInstance);
}
List<TaskInstance> taskInstances = processService.findValidTaskListByProcessId(processInstance.getId());
alertManager.sendAlertProcessInstance(processInstance, taskInstances);
}
/**
* generate process dag
* @throws Exception exception
*/
private void buildFlowDag() throws Exception {
recoverNodeIdList = getStartTaskInstanceList(processInstance.getCommandParam());
forbiddenTaskList = DagHelper.getForbiddenTaskNodeMaps(processInstance.getProcessInstanceJson());
// generate process to get DAG info
List<String> recoveryNameList = getRecoveryNodeNameList();
List<String> startNodeNameList = parseStartNodeName(processInstance.getCommandParam());
ProcessDag processDag = generateFlowDag(processInstance.getProcessInstanceJson(),
startNodeNameList, recoveryNameList, processInstance.getTaskDependType());
if(processDag == null){
logger.error("processDag is null");
return;
}
// generate process dag
dag = DagHelper.buildDagGraph(processDag);
}
/**
* init task queue
*/
private void initTaskQueue(){
taskFailedSubmit = false;
activeTaskNode.clear();
dependFailedTask.clear();
completeTaskList.clear();
errorTaskList.clear();
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId());
for(TaskInstance task : taskInstanceList){
if(task.isTaskComplete()){
completeTaskList.put(task.getName(), task);
}
if(task.getState().typeIsFailure() && !task.taskCanRetry()){
errorTaskList.put(task.getName(), task);
}
}
}
/**
* process post handle
*/
private void postHandle() {
logger.info("develop mode is: {}", CommonUtils.isDevelopMode());
if (!CommonUtils.isDevelopMode()) {
// get exec dir
String execLocalPath = org.apache.dolphinscheduler.common.utils.FileUtils
.getProcessExecDir(processInstance.getProcessDefinition().getProjectId(),
processInstance.getProcessDefinitionId(),
processInstance.getId());
try {
FileUtils.deleteDirectory(new File(execLocalPath));
} catch (IOException e) {
logger.error("delete exec dir failed ", e);
}
}
}
/**
* submit task to execute
* @param taskInstance task instance
* @return TaskInstance
*/
private TaskInstance submitTaskExec(TaskInstance taskInstance) {
MasterBaseTaskExecThread abstractExecThread = null;
if(taskInstance.isSubProcess()){
abstractExecThread = new SubProcessTaskExecThread(taskInstance);
}else if(taskInstance.isDependTask()){
abstractExecThread = new DependentTaskExecThread(taskInstance);
}else if(taskInstance.isConditionsTask()){
abstractExecThread = new ConditionsTaskExecThread(taskInstance);
}else {
abstractExecThread = new MasterTaskExecThread(taskInstance);
}
Future<Boolean> future = taskExecService.submit(abstractExecThread);
activeTaskNode.putIfAbsent(abstractExecThread, future);
return abstractExecThread.getTaskInstance();
}
/**
* find task instance in db.
* in case submit more than one same name task in the same time.
* @param taskName task name
* @return TaskInstance
*/
private TaskInstance findTaskIfExists(String taskName){
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(this.processInstance.getId());
for(TaskInstance taskInstance : taskInstanceList){
if(taskInstance.getName().equals(taskName)){
return taskInstance;
}
}
return null;
}
/**
* encapsulation task
* @param processInstance process instance
* @param nodeName node name
* @return TaskInstance
*/
private TaskInstance createTaskInstance(ProcessInstance processInstance, String nodeName,
TaskNode taskNode) {
TaskInstance taskInstance = findTaskIfExists(nodeName);
if(taskInstance == null){
taskInstance = new TaskInstance();
// task name
taskInstance.setName(nodeName);
// process instance define id
taskInstance.setProcessDefinitionId(processInstance.getProcessDefinitionId());
// task instance state
taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS);
// process instance id
taskInstance.setProcessInstanceId(processInstance.getId());
// task instance node json
taskInstance.setTaskJson(JSON.toJSONString(taskNode));
// task instance type
taskInstance.setTaskType(taskNode.getType());
// task instance whether alert
taskInstance.setAlertFlag(Flag.NO);
// task instance start time
taskInstance.setStartTime(new Date());
// task instance flag
taskInstance.setFlag(Flag.YES);
// task instance retry times
taskInstance.setRetryTimes(0);
// max task instance retry times
taskInstance.setMaxRetryTimes(taskNode.getMaxRetryTimes());
// retry task instance interval
taskInstance.setRetryInterval(taskNode.getRetryInterval());
// task instance priority
if(taskNode.getTaskInstancePriority() == null){
taskInstance.setTaskInstancePriority(Priority.MEDIUM);
}else{
taskInstance.setTaskInstancePriority(taskNode.getTaskInstancePriority());
}
String processWorkerGroup = processInstance.getWorkerGroup();
processWorkerGroup = StringUtils.isBlank(processWorkerGroup) ? DEFAULT_WORKER_GROUP : processWorkerGroup;
String taskWorkerGroup = StringUtils.isBlank(taskNode.getWorkerGroup()) ? processWorkerGroup : taskNode.getWorkerGroup();
if (!processWorkerGroup.equals(DEFAULT_WORKER_GROUP) && taskWorkerGroup.equals(DEFAULT_WORKER_GROUP)) {
taskInstance.setWorkerGroup(processWorkerGroup);
}else {
taskInstance.setWorkerGroup(taskWorkerGroup);
}
}
return taskInstance;
}
/**
* if all of the task dependence are skip, skip it too.
* @param taskNode
* @return
*/
private boolean isTaskNodeNeedSkip(TaskNode taskNode){
if(CollectionUtils.isEmpty(taskNode.getDepList())){
return false;
}
for(String depNode : taskNode.getDepList()){
if(!skipTaskNodeList.containsKey(depNode)){
return false;
}
}
return true;
}
/**
* set task node skip if dependence all skip
* @param taskNodesSkipList
*/
private void setTaskNodeSkip(List<String> taskNodesSkipList){
for(String skipNode : taskNodesSkipList){
skipTaskNodeList.putIfAbsent(skipNode, dag.getNode(skipNode));
Collection<String> postNodeList = DagHelper.getStartVertex(skipNode, dag, completeTaskList);
List<String> postSkipList = new ArrayList<>();
for(String post : postNodeList){
TaskNode postNode = dag.getNode(post);
if(isTaskNodeNeedSkip(postNode)){
postSkipList.add(post);
}
}
setTaskNodeSkip(postSkipList);
}
}
/**
* parse condition task find the branch process
* set skip flag for another one.
* @param nodeName
* @return
*/
private List<String> parseConditionTask(String nodeName){
List<String> conditionTaskList = new ArrayList<>();
TaskNode taskNode = dag.getNode(nodeName);
if(!taskNode.isConditionsTask()){
return conditionTaskList;
}
ConditionsParameters conditionsParameters =
JSONUtils.parseObject(taskNode.getConditionResult(), ConditionsParameters.class);
TaskInstance taskInstance = completeTaskList.get(nodeName);
if(taskInstance == null){
logger.error("task instance {} cannot find, please check it!", nodeName);
return conditionTaskList;
}
if(taskInstance.getState().typeIsSuccess()){
conditionTaskList = conditionsParameters.getSuccessNode();
setTaskNodeSkip(conditionsParameters.getFailedNode());
}else if(taskInstance.getState().typeIsFailure()){
conditionTaskList = conditionsParameters.getFailedNode();
setTaskNodeSkip(conditionsParameters.getSuccessNode());
}else{
conditionTaskList.add(nodeName);
}
return conditionTaskList;
}
/**
* parse post node list of previous node
* if condition node: return process according to the settings
* if post node completed, return post nodes of the completed node
* @param previousNodeName
* @return
*/
private List<String> parsePostNodeList(String previousNodeName){
List<String> postNodeList = new ArrayList<>();
TaskNode taskNode = dag.getNode(previousNodeName);
if(taskNode != null && taskNode.isConditionsTask()){
return parseConditionTask(previousNodeName);
}
Collection<String> postNodeCollection = DagHelper.getStartVertex(previousNodeName, dag, completeTaskList);
List<String> postSkipList = new ArrayList<>();
// delete success node, parse the past nodes
// if conditions node,
// 1. parse the branch process according the conditions setting
// 2. set skip flag on anther branch process
for(String postNode : postNodeCollection){
if(completeTaskList.containsKey(postNode)){
TaskInstance postTaskInstance = completeTaskList.get(postNode);
if(dag.getNode(postNode).isConditionsTask()){
List<String> conditionTaskNodeList = parseConditionTask(postNode);
for(String conditions : conditionTaskNodeList){
postNodeList.addAll(parsePostNodeList(conditions));
}
}else if(postTaskInstance.getState().typeIsSuccess()){
postNodeList.addAll(parsePostNodeList(postNode));
}else{
postNodeList.add(postNode);
}
}else if(isTaskNodeNeedSkip(dag.getNode(postNode))){
postSkipList.add(postNode);
setTaskNodeSkip(postSkipList);
postSkipList.clear();
}else{
postNodeList.add(postNode);
}
}
return postNodeList;
}
/**
* submit post node
* @param parentNodeName parent node name
*/
private void submitPostNode(String parentNodeName){
List<String> submitTaskNodeList = parsePostNodeList(parentNodeName);
List<TaskInstance> taskInstances = new ArrayList<>();
for(String taskNode : submitTaskNodeList){
taskInstances.add(createTaskInstance(processInstance, taskNode,
dag.getNode(taskNode)));
}
// if previous node success , post node submit
for(TaskInstance task : taskInstances){
if(readyToSubmitTaskList.containsKey(task.getName())){
continue;
}
if(completeTaskList.containsKey(task.getName())){
logger.info("task {} has already run success", task.getName());
continue;
}
if(task.getState().typeIsPause() || task.getState().typeIsCancel()){
logger.info("task {} stopped, the state is {}", task.getName(), task.getState());
}else{
addTaskToStandByList(task);
}
}
}
/**
* determine whether the dependencies of the task node are complete
* @return DependResult
*/
private DependResult isTaskDepsComplete(String taskName) {
Collection<String> startNodes = dag.getBeginNode();
// if vertex,returns true directly
if(startNodes.contains(taskName)){
return DependResult.SUCCESS;
}
TaskNode taskNode = dag.getNode(taskName);
List<String> depNameList = taskNode.getDepList();
for(String depsNode : depNameList ){
if(!dag.containsNode(depsNode)
|| forbiddenTaskList.containsKey(depsNode)
|| skipTaskNodeList.containsKey(depsNode)){
continue;
}
// dependencies must be fully completed
if(!completeTaskList.containsKey(depsNode)){
return DependResult.WAITING;
}
ExecutionStatus depTaskState = completeTaskList.get(depsNode).getState();
// conditions task would not return failed.
if(depTaskState.typeIsFailure()
&& !DagHelper.haveConditionsAfterNode(depsNode, dag )
&& !dag.getNode(depsNode).isConditionsTask()){
return DependResult.FAILED;
}
if(depTaskState.typeIsPause() || depTaskState.typeIsCancel()){
return DependResult.WAITING;
}
}
logger.info("taskName: {} completeDependTaskList: {}", taskName, Arrays.toString(completeTaskList.keySet().toArray()));
return DependResult.SUCCESS;
}
/**
* query task instance by complete state
* @param state state
* @return task instance list
*/
private List<TaskInstance> getCompleteTaskByState(ExecutionStatus state){
List<TaskInstance> resultList = new ArrayList<>();
for (Map.Entry<String, TaskInstance> entry: completeTaskList.entrySet()) {
if(entry.getValue().getState() == state){
resultList.add(entry.getValue());
}
}
return resultList;
}
/**
* where there are ongoing tasks
* @param state state
* @return ExecutionStatus
*/
private ExecutionStatus runningState(ExecutionStatus state){
if(state == ExecutionStatus.READY_STOP ||
state == ExecutionStatus.READY_PAUSE ||
state == ExecutionStatus.WAITTING_THREAD){
// if the running task is not completed, the state remains unchanged
return state;
}else{
return ExecutionStatus.RUNNING_EXEUTION;
}
}
/**
* exists failure task,contains submit failure、dependency failure,execute failure(retry after)
*
* @return Boolean whether has failed task
*/
private boolean hasFailedTask(){
if(this.taskFailedSubmit){
return true;
}
if(this.errorTaskList.size() > 0){
return true;
}
return this.dependFailedTask.size() > 0;
}
/**
* process instance failure
*
* @return Boolean whether process instance failed
*/
private boolean processFailed(){
if(hasFailedTask()) {
if(processInstance.getFailureStrategy() == FailureStrategy.END){
return true;
}
if (processInstance.getFailureStrategy() == FailureStrategy.CONTINUE) {
return readyToSubmitTaskList.size() == 0 || activeTaskNode.size() == 0;
}
}
return false;
}
/**
* whether task for waiting thread
* @return Boolean whether has waiting thread task
*/
private boolean hasWaitingThreadTask(){
List<TaskInstance> waitingList = getCompleteTaskByState(ExecutionStatus.WAITTING_THREAD);
return CollectionUtils.isNotEmpty(waitingList);
}
/**
* prepare for pause
* 1,failed retry task in the preparation queue , returns to failure directly
* 2,exists pause task,complement not completed, pending submission of tasks, return to suspension
* 3,success
* @return ExecutionStatus
*/
private ExecutionStatus processReadyPause(){
if(hasRetryTaskInStandBy()){
return ExecutionStatus.FAILURE;
}
List<TaskInstance> pauseList = getCompleteTaskByState(ExecutionStatus.PAUSE);
if(CollectionUtils.isNotEmpty(pauseList)
|| !isComplementEnd()
|| readyToSubmitTaskList.size() > 0){
return ExecutionStatus.PAUSE;
}else{
return ExecutionStatus.SUCCESS;
}
}
/**
* generate the latest process instance status by the tasks state
* @return process instance execution status
*/
private ExecutionStatus getProcessInstanceState(){
ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId());
ExecutionStatus state = instance.getState();
if(activeTaskNode.size() > 0 || retryTaskExists()){
// active task and retry task exists
return runningState(state);
}
// process failure
if(processFailed()){
return ExecutionStatus.FAILURE;
}
// waiting thread
if(hasWaitingThreadTask()){
return ExecutionStatus.WAITTING_THREAD;
}
// pause
if(state == ExecutionStatus.READY_PAUSE){
return processReadyPause();
}
// stop
if(state == ExecutionStatus.READY_STOP){
List<TaskInstance> stopList = getCompleteTaskByState(ExecutionStatus.STOP);
List<TaskInstance> killList = getCompleteTaskByState(ExecutionStatus.KILL);
if(CollectionUtils.isNotEmpty(stopList)
|| CollectionUtils.isNotEmpty(killList)
|| !isComplementEnd()){
return ExecutionStatus.STOP;
}else{
return ExecutionStatus.SUCCESS;
}
}
// success
if(state == ExecutionStatus.RUNNING_EXEUTION){
List<TaskInstance> killTasks = getCompleteTaskByState(ExecutionStatus.KILL);
if(readyToSubmitTaskList.size() > 0){
//tasks currently pending submission, no retries, indicating that depend is waiting to complete
return ExecutionStatus.RUNNING_EXEUTION;
}else if(CollectionUtils.isNotEmpty(killTasks)){
// tasks maybe killed manually
return ExecutionStatus.FAILURE;
}else{
// if the waiting queue is empty and the status is in progress, then success
return ExecutionStatus.SUCCESS;
}
}
return state;
}
/**
* whether standby task list have retry tasks
* @return
*/
private boolean retryTaskExists() {
boolean result = false;
for(String taskName : readyToSubmitTaskList.keySet()){
TaskInstance task = readyToSubmitTaskList.get(taskName);
if(task.getState().typeIsFailure()){
result = true;
break;
}
}
return result;
}
/**
* whether complement end
* @return Boolean whether is complement end
*/
private boolean isComplementEnd() {
if(!processInstance.isComplementData()){
return true;
}
try {
Map<String, String> cmdParam = JSONUtils.toMap(processInstance.getCommandParam());
Date endTime = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE));
return processInstance.getScheduleTime().equals(endTime);
} catch (Exception e) {
logger.error("complement end failed ",e);
return false;
}
}
/**
* updateProcessInstance process instance state
* after each batch of tasks is executed, the status of the process instance is updated
*/
private void updateProcessInstanceState() {
ExecutionStatus state = getProcessInstanceState();
if(processInstance.getState() != state){
logger.info(
"work flow process instance [id: {}, name:{}], state change from {} to {}, cmd type: {}",
processInstance.getId(), processInstance.getName(),
processInstance.getState(), state,
processInstance.getCommandType());
ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId());
instance.setState(state);
instance.setProcessDefinition(processInstance.getProcessDefinition());
processService.updateProcessInstance(instance);
processInstance = instance;
}
}
/**
* get task dependency result
* @param taskInstance task instance
* @return DependResult
*/
private DependResult getDependResultForTask(TaskInstance taskInstance){
return isTaskDepsComplete(taskInstance.getName());
}
/**
* add task to standby list
* @param taskInstance task instance
*/
private void addTaskToStandByList(TaskInstance taskInstance){
logger.info("add task to stand by list: {}", taskInstance.getName());
readyToSubmitTaskList.putIfAbsent(taskInstance.getName(), taskInstance);
}
/**
* remove task from stand by list
* @param taskInstance task instance
*/
private void removeTaskFromStandbyList(TaskInstance taskInstance){
logger.info("remove task from stand by list: {}", taskInstance.getName());
readyToSubmitTaskList.remove(taskInstance.getName());
}
/**
* has retry task in standby
* @return Boolean whether has retry task in standby
*/
private boolean hasRetryTaskInStandBy(){
for (Map.Entry<String, TaskInstance> entry: readyToSubmitTaskList.entrySet()) {
if(entry.getValue().getState().typeIsFailure()){
return true;
}
}
return false;
}
/**
* submit and watch the tasks, until the work flow stop
*/
private void runProcess(){
// submit start node
submitPostNode(null);
boolean sendTimeWarning = false;
while(!processInstance.isProcessInstanceStop()){
// send warning email if process time out.
if(!sendTimeWarning && checkProcessTimeOut(processInstance) ){
alertManager.sendProcessTimeoutAlert(processInstance,
processService.findProcessDefineById(processInstance.getProcessDefinitionId()));
sendTimeWarning = true;
}
for(Map.Entry<MasterBaseTaskExecThread,Future<Boolean>> entry: activeTaskNode.entrySet()) {
Future<Boolean> future = entry.getValue();
TaskInstance task = entry.getKey().getTaskInstance();
if(!future.isDone()){
continue;
}
// node monitor thread complete
task = this.processService.findTaskInstanceById(task.getId());
if(task == null){
this.taskFailedSubmit = true;
activeTaskNode.remove(entry.getKey());
continue;
}
// node monitor thread complete
if(task.getState().typeIsFinished()){
activeTaskNode.remove(entry.getKey());
}
logger.info("task :{}, id:{} complete, state is {} ",
task.getName(), task.getId(), task.getState());
// node success , post node submit
if(task.getState() == ExecutionStatus.SUCCESS){
completeTaskList.put(task.getName(), task);
submitPostNode(task.getName());
continue;
}
// node fails, retry first, and then execute the failure process
if(task.getState().typeIsFailure()){
if(task.getState() == ExecutionStatus.NEED_FAULT_TOLERANCE){
this.recoverToleranceFaultTaskList.add(task);
}
if(task.taskCanRetry()){
addTaskToStandByList(task);
}else{
completeTaskList.put(task.getName(), task);
if( task.isConditionsTask()
|| DagHelper.haveConditionsAfterNode(task.getName(), dag)) {
submitPostNode(task.getName());
}else{
errorTaskList.put(task.getName(), task);
if(processInstance.getFailureStrategy() == FailureStrategy.END){
killTheOtherTasks();
}
}
}
continue;
}
// other status stop/pause
completeTaskList.put(task.getName(), task);
}
// send alert
if(CollectionUtils.isNotEmpty(this.recoverToleranceFaultTaskList)){
alertManager.sendAlertWorkerToleranceFault(processInstance, recoverToleranceFaultTaskList);
this.recoverToleranceFaultTaskList.clear();
}
// updateProcessInstance completed task status
// failure priority is higher than pause
// if a task fails, other suspended tasks need to be reset kill
if(errorTaskList.size() > 0){
for(Map.Entry<String, TaskInstance> entry: completeTaskList.entrySet()) {
TaskInstance completeTask = entry.getValue();
if(completeTask.getState()== ExecutionStatus.PAUSE){
completeTask.setState(ExecutionStatus.KILL);
completeTaskList.put(entry.getKey(), completeTask);
processService.updateTaskInstance(completeTask);
}
}
}
if(canSubmitTaskToQueue()){
submitStandByTask();
}
try {
Thread.sleep(Constants.SLEEP_TIME_MILLIS);
} catch (InterruptedException e) {
logger.error(e.getMessage(),e);
}
updateProcessInstanceState();
}
logger.info("process:{} end, state :{}", processInstance.getId(), processInstance.getState());
}
/**
* whether check process time out
* @param processInstance task instance
* @return true if time out of process instance > running time of process instance
*/
private boolean checkProcessTimeOut(ProcessInstance processInstance) {
if(processInstance.getTimeout() == 0 ){
return false;
}
Date now = new Date();
long runningTime = DateUtils.diffMin(now, processInstance.getStartTime());
return runningTime > processInstance.getTimeout();
}
/**
* whether can submit task to queue
* @return boolean
*/
private boolean canSubmitTaskToQueue() {
return OSUtils.checkResource(masterConfig.getMasterMaxCpuloadAvg(), masterConfig.getMasterReservedMemory());
}
/**
* close the on going tasks
*/
private void killTheOtherTasks() {
logger.info("kill called on process instance id: {}, num: {}", processInstance.getId(),
activeTaskNode.size());
for (Map.Entry<MasterBaseTaskExecThread, Future<Boolean>> entry : activeTaskNode.entrySet()) {
MasterBaseTaskExecThread taskExecThread = entry.getKey();
Future<Boolean> future = entry.getValue();
TaskInstance taskInstance = taskExecThread.getTaskInstance();
taskInstance = processService.findTaskInstanceById(taskInstance.getId());
if(taskInstance != null && taskInstance.getState().typeIsFinished()){
continue;
}
if (!future.isDone()) {
// record kill info
logger.info("kill process instance, id: {}, task: {}", processInstance.getId(), taskExecThread.getTaskInstance().getId());
// kill node
taskExecThread.kill();
}
}
}
/**
* whether the retry interval is timed out
* @param taskInstance task instance
* @return Boolean
*/
private boolean retryTaskIntervalOverTime(TaskInstance taskInstance){
if(taskInstance.getState() != ExecutionStatus.FAILURE){
return true;
}
if(taskInstance.getId() == 0 ||
taskInstance.getMaxRetryTimes() ==0 ||
taskInstance.getRetryInterval() == 0 ){
return true;
}
Date now = new Date();
long failedTimeInterval = DateUtils.differSec(now, taskInstance.getEndTime());
// task retry does not over time, return false
return taskInstance.getRetryInterval() * SEC_2_MINUTES_TIME_UNIT < failedTimeInterval;
}
/**
* handling the list of tasks to be submitted
*/
private void submitStandByTask(){
for(Map.Entry<String, TaskInstance> entry: readyToSubmitTaskList.entrySet()) {
TaskInstance task = entry.getValue();
DependResult dependResult = getDependResultForTask(task);
if(DependResult.SUCCESS == dependResult){
if(retryTaskIntervalOverTime(task)){
submitTaskExec(task);
removeTaskFromStandbyList(task);
}
}else if(DependResult.FAILED == dependResult){
// if the dependency fails, the current node is not submitted and the state changes to failure.
dependFailedTask.put(entry.getKey(), task);
removeTaskFromStandbyList(task);
logger.info("task {},id:{} depend result : {}",task.getName(), task.getId(), dependResult);
}
}
}
/**
* get recovery task instance
* @param taskId task id
* @return recovery task instance
*/
private TaskInstance getRecoveryTaskInstance(String taskId){
if(!StringUtils.isNotEmpty(taskId)){
return null;
}
try {
Integer intId = Integer.valueOf(taskId);
TaskInstance task = processService.findTaskInstanceById(intId);
if(task == null){
logger.error("start node id cannot be found: {}", taskId);
}else {
return task;
}
}catch (Exception e){
logger.error("get recovery task instance failed ",e);
}
return null;
}
/**
* get start task instance list
* @param cmdParam command param
* @return task instance list
*/
private List<TaskInstance> getStartTaskInstanceList(String cmdParam){
List<TaskInstance> instanceList = new ArrayList<>();
Map<String, String> paramMap = JSONUtils.toMap(cmdParam);
if(paramMap != null && paramMap.containsKey(CMDPARAM_RECOVERY_START_NODE_STRING)){
String[] idList = paramMap.get(CMDPARAM_RECOVERY_START_NODE_STRING).split(Constants.COMMA);
for(String nodeId : idList){
TaskInstance task = getRecoveryTaskInstance(nodeId);
if(task != null){
instanceList.add(task);
}
}
}
return instanceList;
}
/**
* parse "StartNodeNameList" from cmd param
* @param cmdParam command param
* @return start node name list
*/
private List<String> parseStartNodeName(String cmdParam){
List<String> startNodeNameList = new ArrayList<>();
Map<String, String> paramMap = JSONUtils.toMap(cmdParam);
if(paramMap == null){
return startNodeNameList;
}
if(paramMap.containsKey(CMDPARAM_START_NODE_NAMES)){
startNodeNameList = Arrays.asList(paramMap.get(CMDPARAM_START_NODE_NAMES).split(Constants.COMMA));
}
return startNodeNameList;
}
/**
* generate start node name list from parsing command param;
* if "StartNodeIdList" exists in command param, return StartNodeIdList
* @return recovery node name list
*/
private List<String> getRecoveryNodeNameList(){
List<String> recoveryNodeNameList = new ArrayList<>();
if(CollectionUtils.isNotEmpty(recoverNodeIdList)) {
for (TaskInstance task : recoverNodeIdList) {
recoveryNodeNameList.add(task.getName());
}
}
return recoveryNodeNameList;
}
/**
* generate flow dag
* @param processDefinitionJson process definition json
* @param startNodeNameList start node name list
* @param recoveryNodeNameList recovery node name list
* @param depNodeType depend node type
* @return ProcessDag process dag
* @throws Exception exception
*/
public ProcessDag generateFlowDag(String processDefinitionJson,
List<String> startNodeNameList,
List<String> recoveryNodeNameList,
TaskDependType depNodeType)throws Exception{
return DagHelper.generateFlowDag(processDefinitionJson, startNodeNameList, recoveryNodeNameList, depNodeType);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,464 | [BUG] dependent task failed when conditions task exists | ![image](https://user-images.githubusercontent.com/29528966/79708968-8ba91800-82f3-11ea-8bfb-ee1757bb2d80.png)
description:
1. work flow A have a conditions task.
2. work flow B have a dependent task that depend work A
3. work flow B would failed always.
| https://github.com/apache/dolphinscheduler/issues/2464 | https://github.com/apache/dolphinscheduler/pull/2768 | 7a05c007b45ea742e3ed1a7558c1a641515c140c | 184d64e85291b1e8fb82494bf5a8e2ea755a3dc1 | "2020-04-20T02:48:35Z" | java | "2020-05-21T02:34:28Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/DependentExecute.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.utils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DependResult;
import org.apache.dolphinscheduler.common.enums.DependentRelation;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.model.DateInterval;
import org.apache.dolphinscheduler.common.model.DependentItem;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DependentUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* dependent item execute
*/
public class DependentExecute {
/**
* process service
*/
private final ProcessService processService = SpringApplicationContext.getBean(ProcessService.class);
/**
* depend item list
*/
private List<DependentItem> dependItemList;
/**
* dependent relation
*/
private DependentRelation relation;
/**
* depend result
*/
private DependResult modelDependResult = DependResult.WAITING;
/**
* depend result map
*/
private Map<String, DependResult> dependResultMap = new HashMap<>();
/**
* logger
*/
private Logger logger = LoggerFactory.getLogger(DependentExecute.class);
/**
* constructor
* @param itemList item list
* @param relation relation
*/
public DependentExecute(List<DependentItem> itemList, DependentRelation relation){
this.dependItemList = itemList;
this.relation = relation;
}
/**
* get dependent item for one dependent item
* @param dependentItem dependent item
* @param currentTime current time
* @return DependResult
*/
private DependResult getDependentResultForItem(DependentItem dependentItem, Date currentTime){
List<DateInterval> dateIntervals = DependentUtils.getDateIntervalList(currentTime, dependentItem.getDateValue());
return calculateResultForTasks(dependentItem, dateIntervals );
}
/**
* calculate dependent result for one dependent item.
* @param dependentItem dependent item
* @param dateIntervals date intervals
* @return dateIntervals
*/
private DependResult calculateResultForTasks(DependentItem dependentItem,
List<DateInterval> dateIntervals) {
DependResult result = DependResult.FAILED;
for(DateInterval dateInterval : dateIntervals){
ProcessInstance processInstance = findLastProcessInterval(dependentItem.getDefinitionId(),
dateInterval);
if(processInstance == null){
logger.error("cannot find the right process instance: definition id:{}, start:{}, end:{}",
dependentItem.getDefinitionId(), dateInterval.getStartTime(), dateInterval.getEndTime() );
return DependResult.FAILED;
}
// need to check workflow for updates, so get all task and check the task state
if(dependentItem.getDepTasks().equals(Constants.DEPENDENT_ALL)){
result = dependResultByProcessInstance(processInstance);
}else{
result = getDependTaskResult(dependentItem.getDepTasks(),processInstance);
}
if(result != DependResult.SUCCESS){
break;
}
}
return result;
}
/**
* depend type = depend_all
* skip the condition tasks.
* judge all the task
* @return
*/
private DependResult dependResultByProcessInstance(ProcessInstance processInstance){
DependResult result = DependResult.FAILED;
List<TaskNode> taskNodes =
processService.getTaskNodeListByDefinitionId(processInstance.getProcessDefinitionId());
if(CollectionUtils.isEmpty(taskNodes)) {
return result;
}
for(TaskNode taskNode:taskNodes){
if(taskNode.isConditionsTask()
|| DagHelper.haveConditionsAfterNode(taskNode.getName(), taskNodes)){
continue;
}
DependResult tmpResult = getDependTaskResult(taskNode.getName(),processInstance);
if(DependResult.SUCCESS != tmpResult){
return tmpResult;
}
}
return DependResult.SUCCESS;
}
/**
* get depend task result
* @param taskName
* @param processInstance
* @return
*/
private DependResult getDependTaskResult(String taskName, ProcessInstance processInstance) {
DependResult result;
TaskInstance taskInstance = null;
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId());
for(TaskInstance task : taskInstanceList){
if(task.getName().equals(taskName)){
taskInstance = task;
break;
}
}
if(taskInstance == null){
// cannot find task in the process instance
// maybe because process instance is running or failed.
result = getDependResultByProcessStateWhenTaskNull(processInstance.getState());
}else{
result = getDependResultByState(taskInstance.getState());
}
return result;
}
/**
* find the last one process instance that :
* 1. manual run and finish between the interval
* 2. schedule run and schedule time between the interval
* @param definitionId definition id
* @param dateInterval date interval
* @return ProcessInstance
*/
private ProcessInstance findLastProcessInterval(int definitionId, DateInterval dateInterval) {
ProcessInstance runningProcess = processService.findLastRunningProcess(definitionId, dateInterval.getStartTime(), dateInterval.getEndTime());
if(runningProcess != null){
return runningProcess;
}
ProcessInstance lastSchedulerProcess = processService.findLastSchedulerProcessInterval(
definitionId, dateInterval
);
ProcessInstance lastManualProcess = processService.findLastManualProcessInterval(
definitionId, dateInterval
);
if(lastManualProcess ==null){
return lastSchedulerProcess;
}
if(lastSchedulerProcess == null){
return lastManualProcess;
}
return (lastManualProcess.getEndTime().after(lastSchedulerProcess.getEndTime()))?
lastManualProcess : lastSchedulerProcess;
}
/**
* get dependent result by task/process instance state
* @param state state
* @return DependResult
*/
private DependResult getDependResultByState(ExecutionStatus state) {
if(state.typeIsRunning()
|| state == ExecutionStatus.SUBMITTED_SUCCESS
|| state == ExecutionStatus.WAITTING_THREAD){
return DependResult.WAITING;
}else if(state.typeIsSuccess()){
return DependResult.SUCCESS;
}else{
return DependResult.FAILED;
}
}
/**
* get dependent result by task instance state when task instance is null
* @param state state
* @return DependResult
*/
private DependResult getDependResultByProcessStateWhenTaskNull(ExecutionStatus state) {
if(state.typeIsRunning()
|| state == ExecutionStatus.SUBMITTED_SUCCESS
|| state == ExecutionStatus.WAITTING_THREAD){
return DependResult.WAITING;
}else{
return DependResult.FAILED;
}
}
/**
* judge depend item finished
* @param currentTime current time
* @return boolean
*/
public boolean finish(Date currentTime){
if(modelDependResult == DependResult.WAITING){
modelDependResult = getModelDependResult(currentTime);
return false;
}
return true;
}
/**
* get model depend result
* @param currentTime current time
* @return DependResult
*/
public DependResult getModelDependResult(Date currentTime){
List<DependResult> dependResultList = new ArrayList<>();
for(DependentItem dependentItem : dependItemList){
DependResult dependResult = getDependResultForItem(dependentItem, currentTime);
if(dependResult != DependResult.WAITING){
dependResultMap.put(dependentItem.getKey(), dependResult);
}
dependResultList.add(dependResult);
}
modelDependResult = DependentUtils.getDependResultForRelation(
this.relation, dependResultList
);
return modelDependResult;
}
/**
* get dependent item result
* @param item item
* @param currentTime current time
* @return DependResult
*/
private DependResult getDependResultForItem(DependentItem item, Date currentTime){
String key = item.getKey();
if(dependResultMap.containsKey(key)){
return dependResultMap.get(key);
}
return getDependentResultForItem(item, currentTime);
}
public Map<String, DependResult> getDependResultMap(){
return dependResultMap;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,464 | [BUG] dependent task failed when conditions task exists | ![image](https://user-images.githubusercontent.com/29528966/79708968-8ba91800-82f3-11ea-8bfb-ee1757bb2d80.png)
description:
1. work flow A have a conditions task.
2. work flow B have a dependent task that depend work A
3. work flow B would failed always.
| https://github.com/apache/dolphinscheduler/issues/2464 | https://github.com/apache/dolphinscheduler/pull/2768 | 7a05c007b45ea742e3ed1a7558c1a641515c140c | 184d64e85291b1e8fb82494bf5a8e2ea755a3dc1 | "2020-04-20T02:48:35Z" | java | "2020-05-21T02:34:28Z" | dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/DependentTaskTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.master;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.model.DateInterval;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.utils.dependent.DependentDateUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.master.runner.DependentTaskExecThread;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@RunWith(MockitoJUnitRunner.Silent.class)
public class DependentTaskTest {
private static final Logger logger = LoggerFactory.getLogger(DependentTaskTest.class);
private ProcessService processService;
private ApplicationContext applicationContext;
private MasterConfig config;
@Before
public void before() throws Exception{
config = new MasterConfig();
config.setMasterTaskCommitRetryTimes(3);
config.setMasterTaskCommitInterval(1000);
processService = Mockito.mock(ProcessService.class);
DateInterval dateInterval =DependentDateUtils.getTodayInterval(new Date()).get(0);
Mockito.when(processService
.findLastRunningProcess(4, dateInterval.getStartTime(),
dateInterval.getEndTime()))
.thenReturn(findLastProcessInterval());
Mockito.when(processService
.getTaskNodeListByDefinitionId(4))
.thenReturn(getTaskNodes());
Mockito.when(processService
.findValidTaskListByProcessId(11))
.thenReturn(getTaskInstances());
Mockito.when(processService
.findTaskInstanceById(252612))
.thenReturn(getTaskInstance());
Mockito.when(processService.findProcessInstanceById(10111))
.thenReturn(getProcessInstance());
Mockito.when(processService.findProcessDefineById(0))
.thenReturn(getProcessDefinition());
Mockito.when(processService.saveTaskInstance(getTaskInstance()))
.thenReturn(true);
applicationContext = Mockito.mock(ApplicationContext.class);
SpringApplicationContext springApplicationContext = new SpringApplicationContext();
springApplicationContext.setApplicationContext(applicationContext);
Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService);
Mockito.when(applicationContext.getBean(MasterConfig.class)).thenReturn(config);
}
@Test
public void test() throws Exception{
TaskInstance taskInstance = getTaskInstance();
String dependString = "{\"dependTaskList\":[{\"dependItemList\":[{\"dateValue\":\"today\",\"depTasks\":\"ALL\",\"projectId\":1,\"definitionList\":[{\"label\":\"C\",\"value\":4},{\"label\":\"B\",\"value\":3},{\"label\":\"A\",\"value\":2}],\"cycle\":\"day\",\"definitionId\":4}],\"relation\":\"AND\"}],\"relation\":\"AND\"}";
taskInstance.setDependency(dependString);
Mockito.when(processService.submitTask(taskInstance))
.thenReturn(taskInstance);
DependentTaskExecThread dependentTask =
new DependentTaskExecThread(taskInstance);
dependentTask.call();
Assert.assertEquals(ExecutionStatus.SUCCESS, dependentTask.getTaskInstance().getState());
}
private ProcessInstance findLastProcessInterval(){
ProcessInstance processInstance = new ProcessInstance();
processInstance.setId(11);
processInstance.setProcessDefinitionId(4);
processInstance.setState(ExecutionStatus.SUCCESS);
return processInstance;
}
private ProcessDefinition getProcessDefinition(){
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setId(0);
return processDefinition;
}
private ProcessInstance getProcessInstance(){
ProcessInstance processInstance = new ProcessInstance();
processInstance.setId(10111);
processInstance.setProcessDefinitionId(0);
processInstance.setState(ExecutionStatus.RUNNING_EXEUTION);
return processInstance;
}
private List<TaskNode> getTaskNodes(){
List<TaskNode> list = new ArrayList<>();
TaskNode taskNode = new TaskNode();
taskNode.setName("C");
taskNode.setType("SQL");
list.add(taskNode);
return list;
}
private List<TaskInstance> getTaskInstances(){
List<TaskInstance> list = new ArrayList<>();
TaskInstance taskInstance = new TaskInstance();
taskInstance.setName("C");
taskInstance.setState(ExecutionStatus.SUCCESS);
taskInstance.setDependency("1231");
list.add(taskInstance);
return list;
}
private TaskInstance getTaskInstance(){
TaskInstance taskInstance = new TaskInstance();
taskInstance.setTaskType("DEPENDENT");
taskInstance.setId(252612);
taskInstance.setName("C");
taskInstance.setProcessInstanceId(10111);
return taskInstance;
}
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,464 | [BUG] dependent task failed when conditions task exists | ![image](https://user-images.githubusercontent.com/29528966/79708968-8ba91800-82f3-11ea-8bfb-ee1757bb2d80.png)
description:
1. work flow A have a conditions task.
2. work flow B have a dependent task that depend work A
3. work flow B would failed always.
| https://github.com/apache/dolphinscheduler/issues/2464 | https://github.com/apache/dolphinscheduler/pull/2768 | 7a05c007b45ea742e3ed1a7558c1a641515c140c | 184d64e85291b1e8fb82494bf5a8e2ea755a3dc1 | "2020-04-20T02:48:35Z" | java | "2020-05-21T02:34:28Z" | dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.process;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.cronutils.model.Cron;
import org.apache.commons.lang.ArrayUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*;
import org.apache.dolphinscheduler.common.model.DateInterval;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.service.log.LogClientService;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import java.io.File;
import java.util.*;
import java.util.stream.Collectors;
import static java.util.stream.Collectors.toSet;
import static org.apache.dolphinscheduler.common.Constants.*;
/**
* process relative dao that some mappers in this.
*/
@Component
public class ProcessService {
private final Logger logger = LoggerFactory.getLogger(getClass());
private final int[] stateArray = new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(),
ExecutionStatus.RUNNING_EXEUTION.ordinal(),
ExecutionStatus.READY_PAUSE.ordinal(),
ExecutionStatus.READY_STOP.ordinal()};
@Autowired
private UserMapper userMapper;
@Autowired
private ProcessDefinitionMapper processDefineMapper;
@Autowired
private ProcessInstanceMapper processInstanceMapper;
@Autowired
private DataSourceMapper dataSourceMapper;
@Autowired
private ProcessInstanceMapMapper processInstanceMapMapper;
@Autowired
private TaskInstanceMapper taskInstanceMapper;
@Autowired
private CommandMapper commandMapper;
@Autowired
private ScheduleMapper scheduleMapper;
@Autowired
private UdfFuncMapper udfFuncMapper;
@Autowired
private ResourceMapper resourceMapper;
@Autowired
private ErrorCommandMapper errorCommandMapper;
@Autowired
private TenantMapper tenantMapper;
@Autowired
private ProjectMapper projectMapper;
/**
* handle Command (construct ProcessInstance from Command) , wrapped in transaction
* @param logger logger
* @param host host
* @param validThreadNum validThreadNum
* @param command found command
* @return process instance
*/
@Transactional(rollbackFor = Exception.class)
public ProcessInstance handleCommand(Logger logger, String host, int validThreadNum, Command command) {
ProcessInstance processInstance = constructProcessInstance(command, host);
//cannot construct process instance, return null;
if(processInstance == null){
logger.error("scan command, command parameter is error: {}", command);
moveToErrorCommand(command, "process instance is null");
return null;
}
if(!checkThreadNum(command, validThreadNum)){
logger.info("there is not enough thread for this command: {}", command);
return setWaitingThreadProcess(command, processInstance);
}
if (processInstance.getCommandType().equals(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS)){
delCommandByid(command.getId());
return null;
}
processInstance.setCommandType(command.getCommandType());
processInstance.addHistoryCmd(command.getCommandType());
saveProcessInstance(processInstance);
this.setSubProcessParam(processInstance);
delCommandByid(command.getId());
return processInstance;
}
/**
* save error command, and delete original command
* @param command command
* @param message message
*/
@Transactional(rollbackFor = Exception.class)
public void moveToErrorCommand(Command command, String message) {
ErrorCommand errorCommand = new ErrorCommand(command, message);
this.errorCommandMapper.insert(errorCommand);
delCommandByid(command.getId());
}
/**
* set process waiting thread
* @param command command
* @param processInstance processInstance
* @return process instance
*/
private ProcessInstance setWaitingThreadProcess(Command command, ProcessInstance processInstance) {
processInstance.setState(ExecutionStatus.WAITTING_THREAD);
if(command.getCommandType() != CommandType.RECOVER_WAITTING_THREAD){
processInstance.addHistoryCmd(command.getCommandType());
}
saveProcessInstance(processInstance);
this.setSubProcessParam(processInstance);
createRecoveryWaitingThreadCommand(command, processInstance);
return null;
}
/**
* check thread num
* @param command command
* @param validThreadNum validThreadNum
* @return if thread is enough
*/
private boolean checkThreadNum(Command command, int validThreadNum) {
int commandThreadCount = this.workProcessThreadNumCount(command.getProcessDefinitionId());
return validThreadNum >= commandThreadCount;
}
/**
* insert one command
* @param command command
* @return create result
*/
public int createCommand(Command command) {
int result = 0;
if (command != null){
result = commandMapper.insert(command);
}
return result;
}
/**
* find one command from queue list
* @return command
*/
public Command findOneCommand(){
return commandMapper.getOneToRun();
}
/**
* check the input command exists in queue list
* @param command command
* @return create command result
*/
public Boolean verifyIsNeedCreateCommand(Command command){
Boolean isNeedCreate = true;
Map<CommandType,Integer> cmdTypeMap = new HashMap<CommandType,Integer>();
cmdTypeMap.put(CommandType.REPEAT_RUNNING,1);
cmdTypeMap.put(CommandType.RECOVER_SUSPENDED_PROCESS,1);
cmdTypeMap.put(CommandType.START_FAILURE_TASK_PROCESS,1);
CommandType commandType = command.getCommandType();
if(cmdTypeMap.containsKey(commandType)){
JSONObject cmdParamObj = (JSONObject) JSON.parse(command.getCommandParam());
JSONObject tempObj;
int processInstanceId = cmdParamObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING);
List<Command> commands = commandMapper.selectList(null);
// for all commands
for (Command tmpCommand:commands){
if(cmdTypeMap.containsKey(tmpCommand.getCommandType())){
tempObj = (JSONObject) JSON.parse(tmpCommand.getCommandParam());
if(tempObj != null && processInstanceId == tempObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING)){
isNeedCreate = false;
break;
}
}
}
}
return isNeedCreate;
}
/**
* find process instance detail by id
* @param processId processId
* @return process instance
*/
public ProcessInstance findProcessInstanceDetailById(int processId){
return processInstanceMapper.queryDetailById(processId);
}
/**
* get task node list by definitionId
* @param defineId
* @return
*/
public List<TaskNode> getTaskNodeListByDefinitionId(Integer defineId){
ProcessDefinition processDefinition = processDefineMapper.selectById(defineId);
if (processDefinition == null) {
logger.info("process define not exists");
return null;
}
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
//process data check
if (null == processData) {
logger.error("process data is null");
return null;
}
return processData.getTasks();
}
/**
* find process instance by id
* @param processId processId
* @return process instance
*/
public ProcessInstance findProcessInstanceById(int processId){
return processInstanceMapper.selectById(processId);
}
/**
* find process define by id.
* @param processDefinitionId processDefinitionId
* @return process definition
*/
public ProcessDefinition findProcessDefineById(int processDefinitionId) {
return processDefineMapper.selectById(processDefinitionId);
}
/**
* delete work process instance by id
* @param processInstanceId processInstanceId
* @return delete process instance result
*/
public int deleteWorkProcessInstanceById(int processInstanceId){
return processInstanceMapper.deleteById(processInstanceId);
}
/**
* delete all sub process by parent instance id
* @param processInstanceId processInstanceId
* @return delete all sub process instance result
*/
public int deleteAllSubWorkProcessByParentId(int processInstanceId){
List<Integer> subProcessIdList = processInstanceMapMapper.querySubIdListByParentId(processInstanceId);
for(Integer subId : subProcessIdList){
deleteAllSubWorkProcessByParentId(subId);
deleteWorkProcessMapByParentId(subId);
removeTaskLogFile(subId);
deleteWorkProcessInstanceById(subId);
}
return 1;
}
/**
* remove task log file
* @param processInstanceId processInstanceId
*/
public void removeTaskLogFile(Integer processInstanceId){
LogClientService logClient = new LogClientService();
List<TaskInstance> taskInstanceList = findValidTaskListByProcessId(processInstanceId);
if (CollectionUtils.isEmpty(taskInstanceList)){
return;
}
for (TaskInstance taskInstance : taskInstanceList){
String taskLogPath = taskInstance.getLogPath();
if (StringUtils.isEmpty(taskInstance.getHost())){
continue;
}
String ip = Host.of(taskInstance.getHost()).getIp();
int port = Constants.RPC_PORT;
// remove task log from loggerserver
logClient.removeTaskLog(ip,port,taskLogPath);
}
}
/**
* calculate sub process number in the process define.
* @param processDefinitionId processDefinitionId
* @return process thread num count
*/
private Integer workProcessThreadNumCount(Integer processDefinitionId){
List<Integer> ids = new ArrayList<>();
recurseFindSubProcessId(processDefinitionId, ids);
return ids.size()+1;
}
/**
* recursive query sub process definition id by parent id.
* @param parentId parentId
* @param ids ids
*/
public void recurseFindSubProcessId(int parentId, List<Integer> ids){
ProcessDefinition processDefinition = processDefineMapper.selectById(parentId);
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = processData.getTasks();
if (taskNodeList != null && taskNodeList.size() > 0){
for (TaskNode taskNode : taskNodeList){
String parameter = taskNode.getParams();
JSONObject parameterJson = JSONObject.parseObject(parameter);
if (parameterJson.getInteger(CMDPARAM_SUB_PROCESS_DEFINE_ID) != null){
SubProcessParameters subProcessParam = JSON.parseObject(parameter, SubProcessParameters.class);
ids.add(subProcessParam.getProcessDefinitionId());
recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(),ids);
}
}
}
}
/**
* create recovery waiting thread command when thread pool is not enough for the process instance.
* sub work process instance need not to create recovery command.
* create recovery waiting thread command and delete origin command at the same time.
* if the recovery command is exists, only update the field update_time
* @param originCommand originCommand
* @param processInstance processInstance
*/
public void createRecoveryWaitingThreadCommand(Command originCommand, ProcessInstance processInstance) {
// sub process doesnot need to create wait command
if(processInstance.getIsSubProcess() == Flag.YES){
if(originCommand != null){
commandMapper.deleteById(originCommand.getId());
}
return;
}
Map<String, String> cmdParam = new HashMap<>();
cmdParam.put(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD, String.valueOf(processInstance.getId()));
// process instance quit by "waiting thread" state
if(originCommand == null){
Command command = new Command(
CommandType.RECOVER_WAITTING_THREAD,
processInstance.getTaskDependType(),
processInstance.getFailureStrategy(),
processInstance.getExecutorId(),
processInstance.getProcessDefinitionId(),
JSONUtils.toJson(cmdParam),
processInstance.getWarningType(),
processInstance.getWarningGroupId(),
processInstance.getScheduleTime(),
processInstance.getProcessInstancePriority()
);
saveCommand(command);
return ;
}
// update the command time if current command if recover from waiting
if(originCommand.getCommandType() == CommandType.RECOVER_WAITTING_THREAD){
originCommand.setUpdateTime(new Date());
saveCommand(originCommand);
}else{
// delete old command and create new waiting thread command
commandMapper.deleteById(originCommand.getId());
originCommand.setId(0);
originCommand.setCommandType(CommandType.RECOVER_WAITTING_THREAD);
originCommand.setUpdateTime(new Date());
originCommand.setCommandParam(JSONUtils.toJson(cmdParam));
originCommand.setProcessInstancePriority(processInstance.getProcessInstancePriority());
saveCommand(originCommand);
}
}
/**
* get schedule time from command
* @param command command
* @param cmdParam cmdParam map
* @return date
*/
private Date getScheduleTime(Command command, Map<String, String> cmdParam){
Date scheduleTime = command.getScheduleTime();
if(scheduleTime == null){
if(cmdParam != null && cmdParam.containsKey(CMDPARAM_COMPLEMENT_DATA_START_DATE)){
scheduleTime = DateUtils.stringToDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE));
}
}
return scheduleTime;
}
/**
* generate a new work process instance from command.
* @param processDefinition processDefinition
* @param command command
* @param cmdParam cmdParam map
* @return process instance
*/
private ProcessInstance generateNewProcessInstance(ProcessDefinition processDefinition,
Command command,
Map<String, String> cmdParam){
ProcessInstance processInstance = new ProcessInstance(processDefinition);
processInstance.setState(ExecutionStatus.RUNNING_EXEUTION);
processInstance.setRecovery(Flag.NO);
processInstance.setStartTime(new Date());
processInstance.setRunTimes(1);
processInstance.setMaxTryTimes(0);
processInstance.setProcessDefinitionId(command.getProcessDefinitionId());
processInstance.setCommandParam(command.getCommandParam());
processInstance.setCommandType(command.getCommandType());
processInstance.setIsSubProcess(Flag.NO);
processInstance.setTaskDependType(command.getTaskDependType());
processInstance.setFailureStrategy(command.getFailureStrategy());
processInstance.setExecutorId(command.getExecutorId());
WarningType warningType = command.getWarningType() == null ? WarningType.NONE : command.getWarningType();
processInstance.setWarningType(warningType);
Integer warningGroupId = command.getWarningGroupId() == null ? 0 : command.getWarningGroupId();
processInstance.setWarningGroupId(warningGroupId);
// schedule time
Date scheduleTime = getScheduleTime(command, cmdParam);
if(scheduleTime != null){
processInstance.setScheduleTime(scheduleTime);
}
processInstance.setCommandStartTime(command.getStartTime());
processInstance.setLocations(processDefinition.getLocations());
processInstance.setConnects(processDefinition.getConnects());
// curing global params
processInstance.setGlobalParams(ParameterUtils.curingGlobalParams(
processDefinition.getGlobalParamMap(),
processDefinition.getGlobalParamList(),
getCommandTypeIfComplement(processInstance, command),
processInstance.getScheduleTime()));
//copy process define json to process instance
processInstance.setProcessInstanceJson(processDefinition.getProcessDefinitionJson());
// set process instance priority
processInstance.setProcessInstancePriority(command.getProcessInstancePriority());
String workerGroup = StringUtils.isBlank(command.getWorkerGroup()) ? Constants.DEFAULT_WORKER_GROUP : command.getWorkerGroup();
processInstance.setWorkerGroup(workerGroup);
processInstance.setTimeout(processDefinition.getTimeout());
processInstance.setTenantId(processDefinition.getTenantId());
return processInstance;
}
/**
* get process tenant
* there is tenant id in definition, use the tenant of the definition.
* if there is not tenant id in the definiton or the tenant not exist
* use definition creator's tenant.
* @param tenantId tenantId
* @param userId userId
* @return tenant
*/
public Tenant getTenantForProcess(int tenantId, int userId){
Tenant tenant = null;
if(tenantId >= 0){
tenant = tenantMapper.queryById(tenantId);
}
if (userId == 0){
return null;
}
if(tenant == null){
User user = userMapper.selectById(userId);
tenant = tenantMapper.queryById(user.getTenantId());
}
return tenant;
}
/**
* check command parameters is valid
* @param command command
* @param cmdParam cmdParam map
* @return whether command param is valid
*/
private Boolean checkCmdParam(Command command, Map<String, String> cmdParam){
if(command.getTaskDependType() == TaskDependType.TASK_ONLY || command.getTaskDependType()== TaskDependType.TASK_PRE){
if(cmdParam == null
|| !cmdParam.containsKey(Constants.CMDPARAM_START_NODE_NAMES)
|| cmdParam.get(Constants.CMDPARAM_START_NODE_NAMES).isEmpty()){
logger.error("command node depend type is {}, but start nodes is null ", command.getTaskDependType());
return false;
}
}
return true;
}
/**
* construct process instance according to one command.
* @param command command
* @param host host
* @return process instance
*/
private ProcessInstance constructProcessInstance(Command command, String host){
ProcessInstance processInstance = null;
CommandType commandType = command.getCommandType();
Map<String, String> cmdParam = JSONUtils.toMap(command.getCommandParam());
ProcessDefinition processDefinition = null;
if(command.getProcessDefinitionId() != 0){
processDefinition = processDefineMapper.selectById(command.getProcessDefinitionId());
if(processDefinition == null){
logger.error("cannot find the work process define! define id : {}", command.getProcessDefinitionId());
return null;
}
}
if(cmdParam != null ){
Integer processInstanceId = 0;
// recover from failure or pause tasks
if(cmdParam.containsKey(Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING)) {
String processId = cmdParam.get(Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING);
processInstanceId = Integer.parseInt(processId);
if (processInstanceId == 0) {
logger.error("command parameter is error, [ ProcessInstanceId ] is 0");
return null;
}
}else if(cmdParam.containsKey(Constants.CMDPARAM_SUB_PROCESS)){
// sub process map
String pId = cmdParam.get(Constants.CMDPARAM_SUB_PROCESS);
processInstanceId = Integer.parseInt(pId);
}else if(cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD)){
// waiting thread command
String pId = cmdParam.get(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD);
processInstanceId = Integer.parseInt(pId);
}
if(processInstanceId ==0){
processInstance = generateNewProcessInstance(processDefinition, command, cmdParam);
}else{
processInstance = this.findProcessInstanceDetailById(processInstanceId);
}
processDefinition = processDefineMapper.selectById(processInstance.getProcessDefinitionId());
processInstance.setProcessDefinition(processDefinition);
//reset command parameter
if(processInstance.getCommandParam() != null){
Map<String, String> processCmdParam = JSONUtils.toMap(processInstance.getCommandParam());
for(Map.Entry<String, String> entry: processCmdParam.entrySet()) {
if(!cmdParam.containsKey(entry.getKey())){
cmdParam.put(entry.getKey(), entry.getValue());
}
}
}
// reset command parameter if sub process
if(cmdParam.containsKey(Constants.CMDPARAM_SUB_PROCESS)){
processInstance.setCommandParam(command.getCommandParam());
}
}else{
// generate one new process instance
processInstance = generateNewProcessInstance(processDefinition, command, cmdParam);
}
if(!checkCmdParam(command, cmdParam)){
logger.error("command parameter check failed!");
return null;
}
if(command.getScheduleTime() != null){
processInstance.setScheduleTime(command.getScheduleTime());
}
processInstance.setHost(host);
ExecutionStatus runStatus = ExecutionStatus.RUNNING_EXEUTION;
int runTime = processInstance.getRunTimes();
switch (commandType){
case START_PROCESS:
break;
case START_FAILURE_TASK_PROCESS:
// find failed tasks and init these tasks
List<Integer> failedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.FAILURE);
List<Integer> toleranceList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.NEED_FAULT_TOLERANCE);
List<Integer> killedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.KILL);
cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING);
failedList.addAll(killedList);
failedList.addAll(toleranceList);
for(Integer taskId : failedList){
initTaskInstance(this.findTaskInstanceById(taskId));
}
cmdParam.put(Constants.CMDPARAM_RECOVERY_START_NODE_STRING,
String.join(Constants.COMMA, convertIntListToString(failedList)));
processInstance.setCommandParam(JSONUtils.toJson(cmdParam));
processInstance.setRunTimes(runTime +1 );
break;
case START_CURRENT_TASK_PROCESS:
break;
case RECOVER_WAITTING_THREAD:
break;
case RECOVER_SUSPENDED_PROCESS:
// find pause tasks and init task's state
cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING);
List<Integer> suspendedNodeList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.PAUSE);
List<Integer> stopNodeList = findTaskIdByInstanceState(processInstance.getId(),
ExecutionStatus.KILL);
suspendedNodeList.addAll(stopNodeList);
for(Integer taskId : suspendedNodeList){
// initialize the pause state
initTaskInstance(this.findTaskInstanceById(taskId));
}
cmdParam.put(Constants.CMDPARAM_RECOVERY_START_NODE_STRING, String.join(",", convertIntListToString(suspendedNodeList)));
processInstance.setCommandParam(JSONUtils.toJson(cmdParam));
processInstance.setRunTimes(runTime +1);
break;
case RECOVER_TOLERANCE_FAULT_PROCESS:
// recover tolerance fault process
processInstance.setRecovery(Flag.YES);
runStatus = processInstance.getState();
break;
case COMPLEMENT_DATA:
// delete all the valid tasks when complement data
List<TaskInstance> taskInstanceList = this.findValidTaskListByProcessId(processInstance.getId());
for(TaskInstance taskInstance : taskInstanceList){
taskInstance.setFlag(Flag.NO);
this.updateTaskInstance(taskInstance);
}
initComplementDataParam(processDefinition, processInstance, cmdParam);
break;
case REPEAT_RUNNING:
// delete the recover task names from command parameter
if(cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_START_NODE_STRING)){
cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING);
processInstance.setCommandParam(JSONUtils.toJson(cmdParam));
}
// delete all the valid tasks when repeat running
List<TaskInstance> validTaskList = findValidTaskListByProcessId(processInstance.getId());
for(TaskInstance taskInstance : validTaskList){
taskInstance.setFlag(Flag.NO);
updateTaskInstance(taskInstance);
}
processInstance.setStartTime(new Date());
processInstance.setEndTime(null);
processInstance.setRunTimes(runTime +1);
initComplementDataParam(processDefinition, processInstance, cmdParam);
break;
case SCHEDULER:
break;
default:
break;
}
processInstance.setState(runStatus);
return processInstance;
}
/**
* return complement data if the process start with complement data
* @param processInstance processInstance
* @param command command
* @return command type
*/
private CommandType getCommandTypeIfComplement(ProcessInstance processInstance, Command command){
if(CommandType.COMPLEMENT_DATA == processInstance.getCmdTypeIfComplement()){
return CommandType.COMPLEMENT_DATA;
}else{
return command.getCommandType();
}
}
/**
* initialize complement data parameters
* @param processDefinition processDefinition
* @param processInstance processInstance
* @param cmdParam cmdParam
*/
private void initComplementDataParam(ProcessDefinition processDefinition,
ProcessInstance processInstance,
Map<String, String> cmdParam) {
if(!processInstance.isComplementData()){
return;
}
Date startComplementTime = DateUtils.parse(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE),
YYYY_MM_DD_HH_MM_SS);
if(Flag.NO == processInstance.getIsSubProcess()) {
processInstance.setScheduleTime(startComplementTime);
}
processInstance.setGlobalParams(ParameterUtils.curingGlobalParams(
processDefinition.getGlobalParamMap(),
processDefinition.getGlobalParamList(),
CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime()));
}
/**
* set sub work process parameters.
* handle sub work process instance, update relation table and command parameters
* set sub work process flag, extends parent work process command parameters
* @param subProcessInstance subProcessInstance
* @return process instance
*/
public ProcessInstance setSubProcessParam(ProcessInstance subProcessInstance){
String cmdParam = subProcessInstance.getCommandParam();
if(StringUtils.isEmpty(cmdParam)){
return subProcessInstance;
}
Map<String, String> paramMap = JSONUtils.toMap(cmdParam);
// write sub process id into cmd param.
if(paramMap.containsKey(CMDPARAM_SUB_PROCESS)
&& CMDPARAM_EMPTY_SUB_PROCESS.equals(paramMap.get(CMDPARAM_SUB_PROCESS))){
paramMap.remove(CMDPARAM_SUB_PROCESS);
paramMap.put(CMDPARAM_SUB_PROCESS, String.valueOf(subProcessInstance.getId()));
subProcessInstance.setCommandParam(JSONUtils.toJson(paramMap));
subProcessInstance.setIsSubProcess(Flag.YES);
this.saveProcessInstance(subProcessInstance);
}
// copy parent instance user def params to sub process..
String parentInstanceId = paramMap.get(CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID);
if(StringUtils.isNotEmpty(parentInstanceId)){
ProcessInstance parentInstance = findProcessInstanceDetailById(Integer.parseInt(parentInstanceId));
if(parentInstance != null){
subProcessInstance.setGlobalParams(
joinGlobalParams(parentInstance.getGlobalParams(), subProcessInstance.getGlobalParams()));
this.saveProcessInstance(subProcessInstance);
}else{
logger.error("sub process command params error, cannot find parent instance: {} ", cmdParam);
}
}
ProcessInstanceMap processInstanceMap = JSONUtils.parseObject(cmdParam, ProcessInstanceMap.class);
if(processInstanceMap == null || processInstanceMap.getParentProcessInstanceId() == 0){
return subProcessInstance;
}
// update sub process id to process map table
processInstanceMap.setProcessInstanceId(subProcessInstance.getId());
this.updateWorkProcessInstanceMap(processInstanceMap);
return subProcessInstance;
}
/**
* join parent global params into sub process.
* only the keys doesn't in sub process global would be joined.
* @param parentGlobalParams parentGlobalParams
* @param subGlobalParams subGlobalParams
* @return global params join
*/
private String joinGlobalParams(String parentGlobalParams, String subGlobalParams){
List<Property> parentPropertyList = JSONUtils.toList(parentGlobalParams, Property.class);
List<Property> subPropertyList = JSONUtils.toList(subGlobalParams, Property.class);
Map<String,String> subMap = subPropertyList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue));
for(Property parent : parentPropertyList){
if(!subMap.containsKey(parent.getProp())){
subPropertyList.add(parent);
}
}
return JSONUtils.toJson(subPropertyList);
}
/**
* initialize task instance
* @param taskInstance taskInstance
*/
private void initTaskInstance(TaskInstance taskInstance){
if(!taskInstance.isSubProcess()){
if(taskInstance.getState().typeIsCancel() || taskInstance.getState().typeIsFailure()){
taskInstance.setFlag(Flag.NO);
updateTaskInstance(taskInstance);
return;
}
}
taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS);
updateTaskInstance(taskInstance);
}
/**
* submit task to db
* submit sub process to command
* @param taskInstance taskInstance
* @return task instance
*/
@Transactional(rollbackFor = Exception.class)
public TaskInstance submitTask(TaskInstance taskInstance){
ProcessInstance processInstance = this.findProcessInstanceDetailById(taskInstance.getProcessInstanceId());
logger.info("start submit task : {}, instance id:{}, state: {}",
taskInstance.getName(), taskInstance.getProcessInstanceId(), processInstance.getState());
//submit to db
TaskInstance task = submitTaskInstanceToDB(taskInstance, processInstance);
if(task == null){
logger.error("end submit task to db error, task name:{}, process id:{} state: {} ",
taskInstance.getName(), taskInstance.getProcessInstance(), processInstance.getState());
return task;
}
if(!task.getState().typeIsFinished()){
createSubWorkProcessCommand(processInstance, task);
}
logger.info("end submit task to db successfully:{} state:{} complete, instance id:{} state: {} ",
taskInstance.getName(), task.getState(), processInstance.getId(), processInstance.getState());
return task;
}
/**
* set work process instance map
* @param parentInstance parentInstance
* @param parentTask parentTask
* @return process instance map
*/
private ProcessInstanceMap setProcessInstanceMap(ProcessInstance parentInstance, TaskInstance parentTask){
ProcessInstanceMap processMap = findWorkProcessMapByParent(parentInstance.getId(), parentTask.getId());
if(processMap != null){
return processMap;
}else if(parentInstance.getCommandType() == CommandType.REPEAT_RUNNING
|| parentInstance.isComplementData()){
// update current task id to map
// repeat running does not generate new sub process instance
processMap = findPreviousTaskProcessMap(parentInstance, parentTask);
if(processMap!= null){
processMap.setParentTaskInstanceId(parentTask.getId());
updateWorkProcessInstanceMap(processMap);
return processMap;
}
}
// new task
processMap = new ProcessInstanceMap();
processMap.setParentProcessInstanceId(parentInstance.getId());
processMap.setParentTaskInstanceId(parentTask.getId());
createWorkProcessInstanceMap(processMap);
return processMap;
}
/**
* find previous task work process map.
* @param parentProcessInstance parentProcessInstance
* @param parentTask parentTask
* @return process instance map
*/
private ProcessInstanceMap findPreviousTaskProcessMap(ProcessInstance parentProcessInstance,
TaskInstance parentTask) {
Integer preTaskId = 0;
List<TaskInstance> preTaskList = this.findPreviousTaskListByWorkProcessId(parentProcessInstance.getId());
for(TaskInstance task : preTaskList){
if(task.getName().equals(parentTask.getName())){
preTaskId = task.getId();
ProcessInstanceMap map = findWorkProcessMapByParent(parentProcessInstance.getId(), preTaskId);
if(map!=null){
return map;
}
}
}
logger.info("sub process instance is not found,parent task:{},parent instance:{}",
parentTask.getId(), parentProcessInstance.getId());
return null;
}
/**
* create sub work process command
* @param parentProcessInstance parentProcessInstance
* @param task task
*/
private void createSubWorkProcessCommand(ProcessInstance parentProcessInstance,
TaskInstance task){
if(!task.isSubProcess()){
return;
}
ProcessInstanceMap instanceMap = setProcessInstanceMap(parentProcessInstance, task);
TaskNode taskNode = JSONUtils.parseObject(task.getTaskJson(), TaskNode.class);
Map<String, String> subProcessParam = JSONUtils.toMap(taskNode.getParams());
Integer childDefineId = Integer.parseInt(subProcessParam.get(Constants.CMDPARAM_SUB_PROCESS_DEFINE_ID));
ProcessInstance childInstance = findSubProcessInstance(parentProcessInstance.getId(), task.getId());
CommandType fatherType = parentProcessInstance.getCommandType();
CommandType commandType = fatherType;
if(childInstance == null || commandType == CommandType.REPEAT_RUNNING){
String fatherHistoryCommand = parentProcessInstance.getHistoryCmd();
// sub process must begin with schedule/complement data
// if father begin with scheduler/complement data
if(fatherHistoryCommand.startsWith(CommandType.SCHEDULER.toString()) ||
fatherHistoryCommand.startsWith(CommandType.COMPLEMENT_DATA.toString())){
commandType = CommandType.valueOf(fatherHistoryCommand.split(Constants.COMMA)[0]);
}
}
if(childInstance != null){
childInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS);
updateProcessInstance(childInstance);
}
// set sub work process command
String processMapStr = JSONUtils.toJson(instanceMap);
Map<String, String> cmdParam = JSONUtils.toMap(processMapStr);
if(commandType == CommandType.COMPLEMENT_DATA ||
(childInstance != null && childInstance.isComplementData())){
Map<String, String> parentParam = JSONUtils.toMap(parentProcessInstance.getCommandParam());
String endTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE);
String startTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE);
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, endTime);
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, startTime);
processMapStr = JSONUtils.toJson(cmdParam);
}
updateSubProcessDefinitionByParent(parentProcessInstance, childDefineId);
Command command = new Command();
command.setWarningType(parentProcessInstance.getWarningType());
command.setWarningGroupId(parentProcessInstance.getWarningGroupId());
command.setFailureStrategy(parentProcessInstance.getFailureStrategy());
command.setProcessDefinitionId(childDefineId);
command.setScheduleTime(parentProcessInstance.getScheduleTime());
command.setExecutorId(parentProcessInstance.getExecutorId());
command.setCommandParam(processMapStr);
command.setCommandType(commandType);
command.setProcessInstancePriority(parentProcessInstance.getProcessInstancePriority());
command.setWorkerGroup(parentProcessInstance.getWorkerGroup());
createCommand(command);
logger.info("sub process command created: {} ", command.toString());
}
/**
* update sub process definition
* @param parentProcessInstance parentProcessInstance
* @param childDefinitionId childDefinitionId
*/
private void updateSubProcessDefinitionByParent(ProcessInstance parentProcessInstance, int childDefinitionId) {
ProcessDefinition fatherDefinition = this.findProcessDefineById(parentProcessInstance.getProcessDefinitionId());
ProcessDefinition childDefinition = this.findProcessDefineById(childDefinitionId);
if(childDefinition != null && fatherDefinition != null){
childDefinition.setReceivers(fatherDefinition.getReceivers());
childDefinition.setReceiversCc(fatherDefinition.getReceiversCc());
processDefineMapper.updateById(childDefinition);
}
}
/**
* submit task to mysql
* @param taskInstance taskInstance
* @param processInstance processInstance
* @return task instance
*/
public TaskInstance submitTaskInstanceToDB(TaskInstance taskInstance, ProcessInstance processInstance){
ExecutionStatus processInstanceState = processInstance.getState();
if(taskInstance.getState().typeIsFailure()){
if(taskInstance.isSubProcess()){
taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1 );
}else {
if( processInstanceState != ExecutionStatus.READY_STOP
&& processInstanceState != ExecutionStatus.READY_PAUSE){
// failure task set invalid
taskInstance.setFlag(Flag.NO);
updateTaskInstance(taskInstance);
// crate new task instance
if(taskInstance.getState() != ExecutionStatus.NEED_FAULT_TOLERANCE){
taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1 );
}
taskInstance.setEndTime(null);
taskInstance.setStartTime(new Date());
taskInstance.setFlag(Flag.YES);
taskInstance.setHost(null);
taskInstance.setId(0);
}
}
}
taskInstance.setExecutorId(processInstance.getExecutorId());
taskInstance.setProcessInstancePriority(processInstance.getProcessInstancePriority());
taskInstance.setState(getSubmitTaskState(taskInstance, processInstanceState));
taskInstance.setSubmitTime(new Date());
boolean saveResult = saveTaskInstance(taskInstance);
if(!saveResult){
return null;
}
return taskInstance;
}
/**
* ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskInstanceId}_${task executed by ip1},${ip2}...
* The tasks with the highest priority are selected by comparing the priorities of the above four levels from high to low.
* @param taskInstance taskInstance
* @return task zk queue str
*/
public String taskZkInfo(TaskInstance taskInstance) {
String taskWorkerGroup = getTaskWorkerGroup(taskInstance);
ProcessInstance processInstance = this.findProcessInstanceById(taskInstance.getProcessInstanceId());
if(processInstance == null){
logger.error("process instance is null. please check the task info, task id: " + taskInstance.getId());
return "";
}
StringBuilder sb = new StringBuilder(100);
sb.append(processInstance.getProcessInstancePriority().ordinal()).append(Constants.UNDERLINE)
.append(taskInstance.getProcessInstanceId()).append(Constants.UNDERLINE)
.append(taskInstance.getTaskInstancePriority().ordinal()).append(Constants.UNDERLINE)
.append(taskInstance.getId()).append(Constants.UNDERLINE)
.append(taskInstance.getWorkerGroup());
return sb.toString();
}
/**
* get submit task instance state by the work process state
* cannot modify the task state when running/kill/submit success, or this
* task instance is already exists in task queue .
* return pause if work process state is ready pause
* return stop if work process state is ready stop
* if all of above are not satisfied, return submit success
*
* @param taskInstance taskInstance
* @param processInstanceState processInstanceState
* @return process instance state
*/
public ExecutionStatus getSubmitTaskState(TaskInstance taskInstance, ExecutionStatus processInstanceState){
ExecutionStatus state = taskInstance.getState();
if(
// running or killed
// the task already exists in task queue
// return state
state == ExecutionStatus.RUNNING_EXEUTION
|| state == ExecutionStatus.KILL
|| checkTaskExistsInTaskQueue(taskInstance)
){
return state;
}
//return pasue /stop if process instance state is ready pause / stop
// or return submit success
if( processInstanceState == ExecutionStatus.READY_PAUSE){
state = ExecutionStatus.PAUSE;
}else if(processInstanceState == ExecutionStatus.READY_STOP
|| !checkProcessStrategy(taskInstance)) {
state = ExecutionStatus.KILL;
}else{
state = ExecutionStatus.SUBMITTED_SUCCESS;
}
return state;
}
/**
* check process instance strategy
* @param taskInstance taskInstance
* @return check strategy result
*/
private boolean checkProcessStrategy(TaskInstance taskInstance){
ProcessInstance processInstance = this.findProcessInstanceById(taskInstance.getProcessInstanceId());
FailureStrategy failureStrategy = processInstance.getFailureStrategy();
if(failureStrategy == FailureStrategy.CONTINUE){
return true;
}
List<TaskInstance> taskInstances = this.findValidTaskListByProcessId(taskInstance.getProcessInstanceId());
for(TaskInstance task : taskInstances){
if(task.getState() == ExecutionStatus.FAILURE){
return false;
}
}
return true;
}
/**
* check the task instance existing in queue
* @param taskInstance taskInstance
* @return whether taskinstance exists queue
*/
public boolean checkTaskExistsInTaskQueue(TaskInstance taskInstance){
if(taskInstance.isSubProcess()){
return false;
}
String taskZkInfo = taskZkInfo(taskInstance);
return false;
}
/**
* create a new process instance
* @param processInstance processInstance
*/
public void createProcessInstance(ProcessInstance processInstance){
if (processInstance != null){
processInstanceMapper.insert(processInstance);
}
}
/**
* insert or update work process instance to data base
* @param processInstance processInstance
*/
public void saveProcessInstance(ProcessInstance processInstance){
if (processInstance == null){
logger.error("save error, process instance is null!");
return ;
}
if(processInstance.getId() != 0){
processInstanceMapper.updateById(processInstance);
}else{
createProcessInstance(processInstance);
}
}
/**
* insert or update command
* @param command command
* @return save command result
*/
public int saveCommand(Command command){
if(command.getId() != 0){
return commandMapper.updateById(command);
}else{
return commandMapper.insert(command);
}
}
/**
* insert or update task instance
* @param taskInstance taskInstance
* @return save task instance result
*/
public boolean saveTaskInstance(TaskInstance taskInstance){
if(taskInstance.getId() != 0){
return updateTaskInstance(taskInstance);
}else{
return createTaskInstance(taskInstance);
}
}
/**
* insert task instance
* @param taskInstance taskInstance
* @return create task instance result
*/
public boolean createTaskInstance(TaskInstance taskInstance) {
int count = taskInstanceMapper.insert(taskInstance);
return count > 0;
}
/**
* update task instance
* @param taskInstance taskInstance
* @return update task instance result
*/
public boolean updateTaskInstance(TaskInstance taskInstance){
int count = taskInstanceMapper.updateById(taskInstance);
return count > 0;
}
/**
* delete a command by id
* @param id id
*/
public void delCommandByid(int id) {
commandMapper.deleteById(id);
}
/**
* find task instance by id
* @param taskId task id
* @return task intance
*/
public TaskInstance findTaskInstanceById(Integer taskId){
return taskInstanceMapper.selectById(taskId);
}
/**
* package task instance,associate processInstance and processDefine
* @param taskInstId taskInstId
* @return task instance
*/
public TaskInstance getTaskInstanceDetailByTaskId(int taskInstId){
// get task instance
TaskInstance taskInstance = findTaskInstanceById(taskInstId);
if(taskInstance == null){
return taskInstance;
}
// get process instance
ProcessInstance processInstance = findProcessInstanceDetailById(taskInstance.getProcessInstanceId());
// get process define
ProcessDefinition processDefine = findProcessDefineById(taskInstance.getProcessDefinitionId());
taskInstance.setProcessInstance(processInstance);
taskInstance.setProcessDefine(processDefine);
return taskInstance;
}
/**
* get id list by task state
* @param instanceId instanceId
* @param state state
* @return task instance states
*/
public List<Integer> findTaskIdByInstanceState(int instanceId, ExecutionStatus state){
return taskInstanceMapper.queryTaskByProcessIdAndState(instanceId, state.ordinal());
}
/**
* find valid task list by process definition id
* @param processInstanceId processInstanceId
* @return task instance list
*/
public List<TaskInstance> findValidTaskListByProcessId(Integer processInstanceId){
return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.YES);
}
/**
* find previous task list by work process id
* @param processInstanceId processInstanceId
* @return task instance list
*/
public List<TaskInstance> findPreviousTaskListByWorkProcessId(Integer processInstanceId){
return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.NO);
}
/**
* update work process instance map
* @param processInstanceMap processInstanceMap
* @return update process instance result
*/
public int updateWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap){
return processInstanceMapMapper.updateById(processInstanceMap);
}
/**
* create work process instance map
* @param processInstanceMap processInstanceMap
* @return create process instance result
*/
public int createWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap){
Integer count = 0;
if(processInstanceMap !=null){
return processInstanceMapMapper.insert(processInstanceMap);
}
return count;
}
/**
* find work process map by parent process id and parent task id.
* @param parentWorkProcessId parentWorkProcessId
* @param parentTaskId parentTaskId
* @return process instance map
*/
public ProcessInstanceMap findWorkProcessMapByParent(Integer parentWorkProcessId, Integer parentTaskId){
return processInstanceMapMapper.queryByParentId(parentWorkProcessId, parentTaskId);
}
/**
* delete work process map by parent process id
* @param parentWorkProcessId parentWorkProcessId
* @return delete process map result
*/
public int deleteWorkProcessMapByParentId(int parentWorkProcessId){
return processInstanceMapMapper.deleteByParentProcessId(parentWorkProcessId);
}
/**
* find sub process instance
* @param parentProcessId parentProcessId
* @param parentTaskId parentTaskId
* @return process instance
*/
public ProcessInstance findSubProcessInstance(Integer parentProcessId, Integer parentTaskId){
ProcessInstance processInstance = null;
ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryByParentId(parentProcessId, parentTaskId);
if(processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0){
return processInstance;
}
processInstance = findProcessInstanceById(processInstanceMap.getProcessInstanceId());
return processInstance;
}
/**
* find parent process instance
* @param subProcessId subProcessId
* @return process instance
*/
public ProcessInstance findParentProcessInstance(Integer subProcessId) {
ProcessInstance processInstance = null;
ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryBySubProcessId(subProcessId);
if(processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0){
return processInstance;
}
processInstance = findProcessInstanceById(processInstanceMap.getParentProcessInstanceId());
return processInstance;
}
/**
* change task state
* @param state state
* @param startTime startTime
* @param host host
* @param executePath executePath
* @param logPath logPath
* @param taskInstId taskInstId
*/
public void changeTaskState(ExecutionStatus state, Date startTime, String host,
String executePath,
String logPath,
int taskInstId) {
TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstId);
taskInstance.setState(state);
taskInstance.setStartTime(startTime);
taskInstance.setHost(host);
taskInstance.setExecutePath(executePath);
taskInstance.setLogPath(logPath);
saveTaskInstance(taskInstance);
}
/**
* update process instance
* @param processInstance processInstance
* @return update process instance result
*/
public int updateProcessInstance(ProcessInstance processInstance){
return processInstanceMapper.updateById(processInstance);
}
/**
* update the process instance
* @param processInstanceId processInstanceId
* @param processJson processJson
* @param globalParams globalParams
* @param scheduleTime scheduleTime
* @param flag flag
* @param locations locations
* @param connects connects
* @return update process instance result
*/
public int updateProcessInstance(Integer processInstanceId, String processJson,
String globalParams, Date scheduleTime, Flag flag,
String locations, String connects){
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if(processInstance!= null){
processInstance.setProcessInstanceJson(processJson);
processInstance.setGlobalParams(globalParams);
processInstance.setScheduleTime(scheduleTime);
processInstance.setLocations(locations);
processInstance.setConnects(connects);
return processInstanceMapper.updateById(processInstance);
}
return 0;
}
/**
* change task state
* @param state state
* @param endTime endTime
* @param taskInstId taskInstId
*/
public void changeTaskState(ExecutionStatus state,
Date endTime,
int processId,
String appIds,
int taskInstId) {
TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstId);
taskInstance.setPid(processId);
taskInstance.setAppLink(appIds);
taskInstance.setState(state);
taskInstance.setEndTime(endTime);
saveTaskInstance(taskInstance);
}
/**
* convert integer list to string list
* @param intList intList
* @return string list
*/
public List<String> convertIntListToString(List<Integer> intList){
if(intList == null){
return new ArrayList<>();
}
List<String> result = new ArrayList<String>(intList.size());
for(Integer intVar : intList){
result.add(String.valueOf(intVar));
}
return result;
}
/**
* update pid and app links field by task instance id
* @param taskInstId taskInstId
* @param pid pid
* @param appLinks appLinks
*/
public void updatePidByTaskInstId(int taskInstId, int pid,String appLinks) {
TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstId);
taskInstance.setPid(pid);
taskInstance.setAppLink(appLinks);
saveTaskInstance(taskInstance);
}
/**
* query schedule by id
* @param id id
* @return schedule
*/
public Schedule querySchedule(int id) {
return scheduleMapper.selectById(id);
}
/**
* query Schedule by processDefinitionId
* @param processDefinitionId processDefinitionId
* @see Schedule
*/
public List<Schedule> queryReleaseSchedulerListByProcessDefinitionId(int processDefinitionId) {
return scheduleMapper.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId);
}
/**
* query need failover process instance
* @param host host
* @return process instance list
*/
public List<ProcessInstance> queryNeedFailoverProcessInstances(String host){
return processInstanceMapper.queryByHostAndStatus(host, stateArray);
}
/**
* process need failover process instance
* @param processInstance processInstance
*/
@Transactional(rollbackFor = Exception.class)
public void processNeedFailoverProcessInstances(ProcessInstance processInstance){
//1 update processInstance host is null
processInstance.setHost("null");
processInstanceMapper.updateById(processInstance);
//2 insert into recover command
Command cmd = new Command();
cmd.setProcessDefinitionId(processInstance.getProcessDefinitionId());
cmd.setCommandParam(String.format("{\"%s\":%d}", Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING, processInstance.getId()));
cmd.setExecutorId(processInstance.getExecutorId());
cmd.setCommandType(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS);
createCommand(cmd);
}
/**
* query all need failover task instances by host
* @param host host
* @return task instance list
*/
public List<TaskInstance> queryNeedFailoverTaskInstances(String host){
return taskInstanceMapper.queryByHostAndStatus(host,
stateArray);
}
/**
* find data source by id
* @param id id
* @return datasource
*/
public DataSource findDataSourceById(int id){
return dataSourceMapper.selectById(id);
}
/**
* update process instance state by id
* @param processInstanceId processInstanceId
* @param executionStatus executionStatus
* @return update process result
*/
public int updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) {
ProcessInstance instance = processInstanceMapper.selectById(processInstanceId);
instance.setState(executionStatus);
return processInstanceMapper.updateById(instance);
}
/**
* find process instance by the task id
* @param taskId taskId
* @return process instance
*/
public ProcessInstance findProcessInstanceByTaskId(int taskId){
TaskInstance taskInstance = taskInstanceMapper.selectById(taskId);
if(taskInstance!= null){
return processInstanceMapper.selectById(taskInstance.getProcessInstanceId());
}
return null;
}
/**
* find udf function list by id list string
* @param ids ids
* @return udf function list
*/
public List<UdfFunc> queryUdfFunListByids(int[] ids){
return udfFuncMapper.queryUdfByIdStr(ids, null);
}
/**
* find tenant code by resource name
* @param resName resource name
* @param resourceType resource type
* @return tenant code
*/
public String queryTenantCodeByResName(String resName,ResourceType resourceType){
return resourceMapper.queryTenantCodeByResourceName(resName, resourceType.ordinal());
}
/**
* find schedule list by process define id.
* @param ids ids
* @return schedule list
*/
public List<Schedule> selectAllByProcessDefineId(int[] ids){
return scheduleMapper.selectAllByProcessDefineArray(
ids);
}
/**
* get dependency cycle by work process define id and scheduler fire time
* @param masterId masterId
* @param processDefinitionId processDefinitionId
* @param scheduledFireTime the time the task schedule is expected to trigger
* @return CycleDependency
* @throws Exception if error throws Exception
*/
public CycleDependency getCycleDependency(int masterId, int processDefinitionId, Date scheduledFireTime) throws Exception {
List<CycleDependency> list = getCycleDependencies(masterId,new int[]{processDefinitionId},scheduledFireTime);
return list.size()>0 ? list.get(0) : null;
}
/**
* get dependency cycle list by work process define id list and scheduler fire time
* @param masterId masterId
* @param ids ids
* @param scheduledFireTime the time the task schedule is expected to trigger
* @return CycleDependency list
* @throws Exception if error throws Exception
*/
public List<CycleDependency> getCycleDependencies(int masterId,int[] ids,Date scheduledFireTime) throws Exception {
List<CycleDependency> cycleDependencyList = new ArrayList<CycleDependency>();
if(ArrayUtils.isEmpty(ids)){
logger.warn("ids[] is empty!is invalid!");
return cycleDependencyList;
}
if(scheduledFireTime == null){
logger.warn("scheduledFireTime is null!is invalid!");
return cycleDependencyList;
}
String strCrontab = "";
CronExpression depCronExpression;
Cron depCron;
List<Date> list;
List<Schedule> schedules = this.selectAllByProcessDefineId(ids);
// for all scheduling information
for(Schedule depSchedule:schedules){
strCrontab = depSchedule.getCrontab();
depCronExpression = CronUtils.parse2CronExpression(strCrontab);
depCron = CronUtils.parse2Cron(strCrontab);
CycleEnum cycleEnum = CronUtils.getMiniCycle(depCron);
if(cycleEnum == null){
logger.error("{} is not valid",strCrontab);
continue;
}
Calendar calendar = Calendar.getInstance();
switch (cycleEnum){
/*case MINUTE:
calendar.add(Calendar.MINUTE,-61);*/
case HOUR:
calendar.add(Calendar.HOUR,-25);
break;
case DAY:
calendar.add(Calendar.DATE,-32);
break;
case WEEK:
calendar.add(Calendar.DATE,-32);
break;
case MONTH:
calendar.add(Calendar.MONTH,-13);
break;
default:
logger.warn("Dependent process definition's cycleEnum is {},not support!!", cycleEnum.name());
continue;
}
Date start = calendar.getTime();
if(depSchedule.getProcessDefinitionId() == masterId){
list = CronUtils.getSelfFireDateList(start, scheduledFireTime, depCronExpression);
}else {
list = CronUtils.getFireDateList(start, scheduledFireTime, depCronExpression);
}
if(list.size()>=1){
start = list.get(list.size()-1);
CycleDependency dependency = new CycleDependency(depSchedule.getProcessDefinitionId(),start, CronUtils.getExpirationTime(start, cycleEnum), cycleEnum);
cycleDependencyList.add(dependency);
}
}
return cycleDependencyList;
}
/**
* find last scheduler process instance in the date interval
* @param definitionId definitionId
* @param dateInterval dateInterval
* @return process instance
*/
public ProcessInstance findLastSchedulerProcessInterval(int definitionId, DateInterval dateInterval) {
return processInstanceMapper.queryLastSchedulerProcess(definitionId,
dateInterval.getStartTime(),
dateInterval.getEndTime());
}
/**
* find last manual process instance interval
* @param definitionId process definition id
* @param dateInterval dateInterval
* @return process instance
*/
public ProcessInstance findLastManualProcessInterval(int definitionId, DateInterval dateInterval) {
return processInstanceMapper.queryLastManualProcess(definitionId,
dateInterval.getStartTime(),
dateInterval.getEndTime());
}
/**
* find last running process instance
* @param definitionId process definition id
* @param startTime start time
* @param endTime end time
* @return process instance
*/
public ProcessInstance findLastRunningProcess(int definitionId, Date startTime, Date endTime) {
return processInstanceMapper.queryLastRunningProcess(definitionId,
startTime,
endTime,
stateArray);
}
/**
* query user queue by process instance id
* @param processInstanceId processInstanceId
* @return queue
*/
public String queryUserQueueByProcessInstanceId(int processInstanceId){
String queue = "";
ProcessInstance processInstance = processInstanceMapper.selectById(processInstanceId);
if(processInstance == null){
return queue;
}
User executor = userMapper.selectById(processInstance.getExecutorId());
if(executor != null){
queue = executor.getQueue();
}
return queue;
}
/**
* get task worker group
* @param taskInstance taskInstance
* @return workerGroupId
*/
public String getTaskWorkerGroup(TaskInstance taskInstance) {
String workerGroup = taskInstance.getWorkerGroup();
if(StringUtils.isNotBlank(workerGroup)){
return workerGroup;
}
int processInstanceId = taskInstance.getProcessInstanceId();
ProcessInstance processInstance = findProcessInstanceById(processInstanceId);
if(processInstance != null){
return processInstance.getWorkerGroup();
}
logger.info("task : {} will use default worker group", taskInstance.getId());
return Constants.DEFAULT_WORKER_GROUP;
}
/**
* get have perm project list
* @param userId userId
* @return project list
*/
public List<Project> getProjectListHavePerm(int userId){
List<Project> createProjects = projectMapper.queryProjectCreatedByUser(userId);
List<Project> authedProjects = projectMapper.queryAuthedProjectListByUserId(userId);
if(createProjects == null){
createProjects = new ArrayList<>();
}
if(authedProjects != null){
createProjects.addAll(authedProjects);
}
return createProjects;
}
/**
* get have perm project ids
* @param userId userId
* @return project ids
*/
public List<Integer> getProjectIdListHavePerm(int userId){
List<Integer> projectIdList = new ArrayList<>();
for(Project project : getProjectListHavePerm(userId)){
projectIdList.add(project.getId());
}
return projectIdList;
}
/**
* list unauthorized udf function
* @param userId user id
* @param needChecks data source id array
* @return unauthorized udf function list
*/
public <T> List<T> listUnauthorized(int userId,T[] needChecks,AuthorizationType authorizationType){
List<T> resultList = new ArrayList<T>();
if (!ArrayUtils.isEmpty(needChecks)) {
Set<T> originResSet = new HashSet<T>(Arrays.asList(needChecks));
switch (authorizationType){
case RESOURCE_FILE_ID:
Set<Integer> authorizedResourceFiles = resourceMapper.listAuthorizedResourceById(userId, needChecks).stream().map(t -> t.getId()).collect(toSet());
originResSet.removeAll(authorizedResourceFiles);
break;
case RESOURCE_FILE_NAME:
Set<String> authorizedResources = resourceMapper.listAuthorizedResource(userId, needChecks).stream().map(t -> t.getFullName()).collect(toSet());
originResSet.removeAll(authorizedResources);
break;
case UDF_FILE:
Set<Integer> authorizedUdfFiles = resourceMapper.listAuthorizedResourceById(userId, needChecks).stream().map(t -> t.getId()).collect(toSet());
originResSet.removeAll(authorizedUdfFiles);
break;
case DATASOURCE:
Set<Integer> authorizedDatasources = dataSourceMapper.listAuthorizedDataSource(userId,needChecks).stream().map(t -> t.getId()).collect(toSet());
originResSet.removeAll(authorizedDatasources);
break;
case UDF:
Set<Integer> authorizedUdfs = udfFuncMapper.listAuthorizedUdfFunc(userId, needChecks).stream().map(t -> t.getId()).collect(toSet());
originResSet.removeAll(authorizedUdfs);
break;
}
resultList.addAll(originResSet);
}
return resultList;
}
/**
* get user by user id
* @param userId user id
* @return User
*/
public User getUserById(int userId){
return userMapper.queryDetailsById(userId);
}
/**
* get resource by resoruce id
* @param resoruceId resource id
* @return Resource
*/
public Resource getResourceById(int resoruceId){
return resourceMapper.selectById(resoruceId);
}
/**
* list resources by ids
* @param resIds resIds
* @return resource list
*/
public List<Resource> listResourceByIds(Integer[] resIds){
return resourceMapper.listResourceByIds(resIds);
}
/**
* format task app id in task instance
* @param taskInstance
* @return
*/
public String formatTaskAppId(TaskInstance taskInstance){
ProcessDefinition definition = this.findProcessDefineById(taskInstance.getProcessDefinitionId());
ProcessInstance processInstanceById = this.findProcessInstanceById(taskInstance.getProcessInstanceId());
if(definition == null || processInstanceById == null){
return "";
}
return String.format("%s_%s_%s",
definition.getId(),
processInstanceById.getId(),
taskInstance.getId());
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,739 | [BUG] SQLTask replace the ${} of the SQL statement to "?" does not work | **Describe the bug**
In the SqlTask
the function ParameterUtils.replaceScheduleTime(sql, taskExecutionContext.getScheduleTime(), paramsMap) replaces all $ {} to real value, so the function setSqlParamsMap(sql, rgex, sqlParamsMap, paramsMap); does not work
**Screenshots**
![image](https://user-images.githubusercontent.com/10829956/82187951-bb156980-991f-11ea-9f69-bf978ae83429.png)
**Which version of Dolphin Scheduler:**
-[dev-1.3.0]
| https://github.com/apache/dolphinscheduler/issues/2739 | https://github.com/apache/dolphinscheduler/pull/2741 | cfd83235a29ec9e19f3c87a27e7d77edf3381c63 | 6d0375e46d07f386125a3c6e091e605a515df03d | "2020-05-18T07:56:57Z" | java | "2020-05-21T08:09:36Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ParameterUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import com.alibaba.fastjson.JSON;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DataType;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.PlaceholderUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.TimePlaceholderUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.PreparedStatement;
import java.text.ParseException;
import java.util.*;
/**
* parameter parse utils
*/
public class ParameterUtils {
private static final Logger logger = LoggerFactory.getLogger(ParameterUtils.class);
/**
* convert parameters place holders
*
* @param parameterString parameter
* @param parameterMap parameter map
* @return convert parameters place holders
*/
public static String convertParameterPlaceholders(String parameterString, Map<String, String> parameterMap) {
if (StringUtils.isEmpty(parameterString)) {
return parameterString;
}
//Get current time, schedule execute time
String cronTimeStr = parameterMap.get(Constants.PARAMETER_DATETIME);
Date cronTime = null;
if (StringUtils.isNotEmpty(cronTimeStr)) {
try {
cronTime = DateUtils.parseDate(cronTimeStr, new String[]{Constants.PARAMETER_FORMAT_TIME});
} catch (ParseException e) {
logger.error("parse {} exception", cronTimeStr, e);
}
} else {
cronTime = new Date();
}
// replace variable ${} form,refers to the replacement of system variables and custom variables
parameterString = PlaceholderUtils.replacePlaceholders(parameterString, parameterMap, true);
// replace time $[...] form, eg. $[yyyyMMdd]
if (cronTime != null) {
parameterString = TimePlaceholderUtils.replacePlaceholders(parameterString, cronTime, true);
}
return parameterString;
}
/**
* new
* convert parameters place holders
*
* @param parameterString parameter
* @param parameterMap parameter map
* @return convert parameters place holders
*/
public static String convertParameterPlaceholders2(String parameterString, Map<String, String> parameterMap) {
if (StringUtils.isEmpty(parameterString)) {
return parameterString;
}
//Get current time, schedule execute time
String cronTimeStr = parameterMap.get(Constants.PARAMETER_SHECDULE_TIME);
Date cronTime = null;
if (StringUtils.isNotEmpty(cronTimeStr)) {
try {
cronTime = DateUtils.parseDate(cronTimeStr, new String[]{Constants.PARAMETER_FORMAT_TIME});
} catch (ParseException e) {
logger.error(String.format("parse %s exception", cronTimeStr), e);
}
} else {
cronTime = new Date();
}
// replace variable ${} form,refers to the replacement of system variables and custom variables
parameterString = PlaceholderUtils.replacePlaceholders(parameterString, parameterMap, true);
// replace time $[...] form, eg. $[yyyyMMdd]
if (cronTime != null) {
parameterString = TimePlaceholderUtils.replacePlaceholders(parameterString, cronTime, true);
}
return parameterString;
}
/**
* set in parameter
* @param index index
* @param stmt preparedstatement
* @param dataType data type
* @param value value
* @throws Exception errors
*/
public static void setInParameter(int index, PreparedStatement stmt, DataType dataType, String value)throws Exception{
if (dataType.equals(DataType.VARCHAR)){
stmt.setString(index,value);
}else if (dataType.equals(DataType.INTEGER)){
stmt.setInt(index, Integer.parseInt(value));
}else if (dataType.equals(DataType.LONG)){
stmt.setLong(index, Long.parseLong(value));
}else if (dataType.equals(DataType.FLOAT)){
stmt.setFloat(index, Float.parseFloat(value));
}else if (dataType.equals(DataType.DOUBLE)){
stmt.setDouble(index, Double.parseDouble(value));
}else if (dataType.equals(DataType.DATE)){
stmt.setDate(index, java.sql.Date.valueOf(value));
}else if (dataType.equals(DataType.TIME)){
stmt.setString(index, value);
}else if (dataType.equals(DataType.TIMESTAMP)){
stmt.setTimestamp(index, java.sql.Timestamp.valueOf(value));
}else if (dataType.equals(DataType.BOOLEAN)){
stmt.setBoolean(index,Boolean.parseBoolean(value));
}
}
/**
* curing user define parameters
*
* @param globalParamMap global param map
* @param globalParamList global param list
* @param commandType command type
* @param scheduleTime schedule time
* @return curing user define parameters
*/
public static String curingGlobalParams(Map<String,String> globalParamMap, List<Property> globalParamList,
CommandType commandType, Date scheduleTime){
if (globalParamList == null || globalParamList.isEmpty()) {
return null;
}
Map<String, String> globalMap = new HashMap<>();
if (globalParamMap!= null){
globalMap.putAll(globalParamMap);
}
Map<String,String> allParamMap = new HashMap<>();
//If it is a complement, a complement time needs to be passed in, according to the task type
Map<String,String> timeParams = BusinessTimeUtils
.getBusinessTime(commandType, scheduleTime);
if (timeParams != null) {
allParamMap.putAll(timeParams);
}
allParamMap.putAll(globalMap);
Set<Map.Entry<String, String>> entries = allParamMap.entrySet();
Map<String,String> resolveMap = new HashMap<>();
for (Map.Entry<String,String> entry : entries){
String val = entry.getValue();
if (val.startsWith("$")){
String str = ParameterUtils.convertParameterPlaceholders(val, allParamMap);
resolveMap.put(entry.getKey(),str);
}
}
globalMap.putAll(resolveMap);
for (Property property : globalParamList){
String val = globalMap.get(property.getProp());
if (val != null){
property.setValue(val);
}
}
return JSON.toJSONString(globalParamList);
}
/**
* handle escapes
* @param inputString input string
* @return string filter escapes
*/
public static String handleEscapes(String inputString){
if(StringUtils.isNotEmpty(inputString)){
return inputString.replace("%", "////%");
}
return inputString;
}
/**
* new
* $[yyyyMMdd] replace scheduler time
* @param text
* @param paramsMap
* @return
*/
public static String replaceScheduleTime(String text, Date scheduleTime, Map<String, Property> paramsMap) {
if (paramsMap != null) {
//if getScheduleTime null ,is current date
if (null == scheduleTime) {
scheduleTime = new Date();
}
String dateTime = org.apache.dolphinscheduler.common.utils.DateUtils.format(scheduleTime, Constants.PARAMETER_FORMAT_TIME);
Property p = new Property();
p.setValue(dateTime);
p.setProp(Constants.PARAMETER_SHECDULE_TIME);
paramsMap.put(Constants.PARAMETER_SHECDULE_TIME, p);
text = ParameterUtils.convertParameterPlaceholders2(text, convert(paramsMap));
}
return text;
}
/**
* format convert
* @param paramsMap params map
* @return Map of converted
* see org.apache.dolphinscheduler.server.utils.ParamUtils.convert
*/
public static Map<String,String> convert(Map<String,Property> paramsMap){
Map<String,String> map = new HashMap<>();
Iterator<Map.Entry<String, Property>> iter = paramsMap.entrySet().iterator();
while (iter.hasNext()){
Map.Entry<String, Property> en = iter.next();
map.put(en.getKey(),en.getValue().getValue());
}
return map;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,739 | [BUG] SQLTask replace the ${} of the SQL statement to "?" does not work | **Describe the bug**
In the SqlTask
the function ParameterUtils.replaceScheduleTime(sql, taskExecutionContext.getScheduleTime(), paramsMap) replaces all $ {} to real value, so the function setSqlParamsMap(sql, rgex, sqlParamsMap, paramsMap); does not work
**Screenshots**
![image](https://user-images.githubusercontent.com/10829956/82187951-bb156980-991f-11ea-9f69-bf978ae83429.png)
**Which version of Dolphin Scheduler:**
-[dev-1.3.0]
| https://github.com/apache/dolphinscheduler/issues/2739 | https://github.com/apache/dolphinscheduler/pull/2741 | cfd83235a29ec9e19f3c87a27e7d77edf3381c63 | 6d0375e46d07f386125a3c6e091e605a515df03d | "2020-05-18T07:56:57Z" | java | "2020-05-21T08:09:36Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.task.sql;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SerializerFeature;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.alert.utils.MailUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.ShowType;
import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.task.sql.SqlBinds;
import org.apache.dolphinscheduler.common.task.sql.SqlParameters;
import org.apache.dolphinscheduler.common.task.sql.SqlType;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.AlertDao;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.server.entity.SQLTaskExecutionContext;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.utils.ParamUtils;
import org.apache.dolphinscheduler.server.utils.UDFUtils;
import org.apache.dolphinscheduler.server.worker.task.AbstractTask;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.slf4j.Logger;
import java.sql.*;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static org.apache.dolphinscheduler.common.Constants.*;
import static org.apache.dolphinscheduler.common.enums.DbType.HIVE;
/**
* sql task
*/
public class SqlTask extends AbstractTask {
/**
* sql parameters
*/
private SqlParameters sqlParameters;
/**
* alert dao
*/
private AlertDao alertDao;
/**
* base datasource
*/
private BaseDataSource baseDataSource;
/**
* taskExecutionContext
*/
private TaskExecutionContext taskExecutionContext;
/**
* default query sql limit
*/
private static final int LIMIT = 10000;
public SqlTask(TaskExecutionContext taskExecutionContext, Logger logger) {
super(taskExecutionContext, logger);
this.taskExecutionContext = taskExecutionContext;
logger.info("sql task params {}", taskExecutionContext.getTaskParams());
this.sqlParameters = JSONObject.parseObject(taskExecutionContext.getTaskParams(), SqlParameters.class);
if (!sqlParameters.checkParameters()) {
throw new RuntimeException("sql task params is not valid");
}
this.alertDao = SpringApplicationContext.getBean(AlertDao.class);
}
@Override
public void handle() throws Exception {
// set the name of the current thread
String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, taskExecutionContext.getTaskAppId());
Thread.currentThread().setName(threadLoggerInfoName);
logger.info("Full sql parameters: {}", sqlParameters);
logger.info("sql type : {}, datasource : {}, sql : {} , localParams : {},udfs : {},showType : {},connParams : {}",
sqlParameters.getType(),
sqlParameters.getDatasource(),
sqlParameters.getSql(),
sqlParameters.getLocalParams(),
sqlParameters.getUdfs(),
sqlParameters.getShowType(),
sqlParameters.getConnParams());
try {
SQLTaskExecutionContext sqlTaskExecutionContext = taskExecutionContext.getSqlTaskExecutionContext();
// load class
DataSourceFactory.loadClass(DbType.valueOf(sqlParameters.getType()));
// get datasource
baseDataSource = DataSourceFactory.getDatasource(DbType.valueOf(sqlParameters.getType()),
sqlTaskExecutionContext.getConnectionParams());
// ready to execute SQL and parameter entity Map
SqlBinds mainSqlBinds = getSqlAndSqlParamsMap(sqlParameters.getSql());
List<SqlBinds> preStatementSqlBinds = Optional.ofNullable(sqlParameters.getPreStatements())
.orElse(new ArrayList<>())
.stream()
.map(this::getSqlAndSqlParamsMap)
.collect(Collectors.toList());
List<SqlBinds> postStatementSqlBinds = Optional.ofNullable(sqlParameters.getPostStatements())
.orElse(new ArrayList<>())
.stream()
.map(this::getSqlAndSqlParamsMap)
.collect(Collectors.toList());
List<String> createFuncs = UDFUtils.createFuncs(sqlTaskExecutionContext.getUdfFuncList(),
taskExecutionContext.getTenantCode(),
logger);
// execute sql task
executeFuncAndSql(mainSqlBinds, preStatementSqlBinds, postStatementSqlBinds, createFuncs);
setExitStatusCode(Constants.EXIT_CODE_SUCCESS);
} catch (Exception e) {
setExitStatusCode(Constants.EXIT_CODE_FAILURE);
logger.error("sql task error", e);
throw e;
}
}
/**
* ready to execute SQL and parameter entity Map
* @return
*/
private SqlBinds getSqlAndSqlParamsMap(String sql) {
Map<Integer,Property> sqlParamsMap = new HashMap<>();
StringBuilder sqlBuilder = new StringBuilder();
// find process instance by task id
Map<String, Property> paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()),
taskExecutionContext.getDefinedParams(),
sqlParameters.getLocalParametersMap(),
CommandType.of(taskExecutionContext.getCmdTypeIfComplement()),
taskExecutionContext.getScheduleTime());
// spell SQL according to the final user-defined variable
if(paramsMap == null){
sqlBuilder.append(sql);
return new SqlBinds(sqlBuilder.toString(), sqlParamsMap);
}
if (StringUtils.isNotEmpty(sqlParameters.getTitle())){
String title = ParameterUtils.convertParameterPlaceholders(sqlParameters.getTitle(),
ParamUtils.convert(paramsMap));
logger.info("SQL title : {}",title);
sqlParameters.setTitle(title);
}
//new
//replace variable TIME with $[YYYYmmddd...] in sql when history run job and batch complement job
sql = ParameterUtils.replaceScheduleTime(sql, taskExecutionContext.getScheduleTime(), paramsMap);
// special characters need to be escaped, ${} needs to be escaped
String rgex = "['\"]*\\$\\{(.*?)\\}['\"]*";
setSqlParamsMap(sql, rgex, sqlParamsMap, paramsMap);
// replace the ${} of the SQL statement with the Placeholder
String formatSql = sql.replaceAll(rgex, "?");
sqlBuilder.append(formatSql);
// print repalce sql
printReplacedSql(sql, formatSql, rgex, sqlParamsMap);
return new SqlBinds(sqlBuilder.toString(), sqlParamsMap);
}
@Override
public AbstractParameters getParameters() {
return this.sqlParameters;
}
/**
* execute function and sql
* @param mainSqlBinds main sql binds
* @param preStatementsBinds pre statements binds
* @param postStatementsBinds post statements binds
* @param createFuncs create functions
*/
public void executeFuncAndSql(SqlBinds mainSqlBinds,
List<SqlBinds> preStatementsBinds,
List<SqlBinds> postStatementsBinds,
List<String> createFuncs){
Connection connection = null;
PreparedStatement stmt = null;
ResultSet resultSet = null;
try {
// if upload resource is HDFS and kerberos startup
CommonUtils.loadKerberosConf();
// create connection
connection = createConnection();
// create temp function
if (CollectionUtils.isNotEmpty(createFuncs)) {
createTempFunction(connection,createFuncs);
}
// pre sql
preSql(connection,preStatementsBinds);
stmt = prepareStatementAndBind(connection, mainSqlBinds);
// decide whether to executeQuery or executeUpdate based on sqlType
if (sqlParameters.getSqlType() == SqlType.QUERY.ordinal()) {
// query statements need to be convert to JsonArray and inserted into Alert to send
resultSet = stmt.executeQuery();
resultProcess(resultSet);
} else if (sqlParameters.getSqlType() == SqlType.NON_QUERY.ordinal()) {
// non query statement
stmt.executeUpdate();
}
postSql(connection,postStatementsBinds);
} catch (Exception e) {
logger.error("execute sql error",e);
throw new RuntimeException("execute sql error");
} finally {
close(resultSet,stmt,connection);
}
}
/**
* result process
*
* @param resultSet resultSet
* @throws Exception
*/
private void resultProcess(ResultSet resultSet) throws Exception{
JSONArray resultJSONArray = new JSONArray();
ResultSetMetaData md = resultSet.getMetaData();
int num = md.getColumnCount();
int rowCount = 0;
while (rowCount < LIMIT && resultSet.next()) {
JSONObject mapOfColValues = new JSONObject(true);
for (int i = 1; i <= num; i++) {
mapOfColValues.put(md.getColumnName(i), resultSet.getObject(i));
}
resultJSONArray.add(mapOfColValues);
rowCount++;
}
logger.debug("execute sql : {}", JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue));
// if there is a result set
if (!resultJSONArray.isEmpty() ) {
if (StringUtils.isNotEmpty(sqlParameters.getTitle())) {
sendAttachment(sqlParameters.getTitle(),
JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue));
}else{
sendAttachment(taskExecutionContext.getTaskName() + " query resultsets ",
JSONObject.toJSONString(resultJSONArray, SerializerFeature.WriteMapNullValue));
}
}
}
/**
* pre sql
*
* @param connection connection
* @param preStatementsBinds preStatementsBinds
*/
private void preSql(Connection connection,
List<SqlBinds> preStatementsBinds) throws Exception{
for (SqlBinds sqlBind: preStatementsBinds) {
try (PreparedStatement pstmt = prepareStatementAndBind(connection, sqlBind)){
int result = pstmt.executeUpdate();
logger.info("pre statement execute result: {}, for sql: {}",result,sqlBind.getSql());
}
}
}
/**
* post psql
*
* @param connection connection
* @param postStatementsBinds postStatementsBinds
* @throws Exception
*/
private void postSql(Connection connection,
List<SqlBinds> postStatementsBinds) throws Exception{
for (SqlBinds sqlBind: postStatementsBinds) {
try (PreparedStatement pstmt = prepareStatementAndBind(connection, sqlBind)){
int result = pstmt.executeUpdate();
logger.info("post statement execute result: {},for sql: {}",result,sqlBind.getSql());
}
}
}
/**
* create temp function
*
* @param connection connection
* @param createFuncs createFuncs
* @throws Exception
*/
private void createTempFunction(Connection connection,
List<String> createFuncs) throws Exception{
try (Statement funcStmt = connection.createStatement()) {
for (String createFunc : createFuncs) {
logger.info("hive create function sql: {}", createFunc);
funcStmt.execute(createFunc);
}
}
}
/**
* create connection
*
* @return connection
* @throws Exception
*/
private Connection createConnection() throws Exception{
// if hive , load connection params if exists
Connection connection = null;
if (HIVE == DbType.valueOf(sqlParameters.getType())) {
Properties paramProp = new Properties();
paramProp.setProperty(USER, baseDataSource.getUser());
paramProp.setProperty(PASSWORD, baseDataSource.getPassword());
Map<String, String> connParamMap = CollectionUtils.stringToMap(sqlParameters.getConnParams(),
SEMICOLON,
HIVE_CONF);
paramProp.putAll(connParamMap);
connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(),
paramProp);
}else{
connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(),
baseDataSource.getUser(),
baseDataSource.getPassword());
}
return connection;
}
/**
* close jdbc resource
*
* @param resultSet resultSet
* @param pstmt pstmt
* @param connection connection
*/
private void close(ResultSet resultSet,
PreparedStatement pstmt,
Connection connection){
if (resultSet != null){
try {
connection.close();
} catch (SQLException e) {
}
}
if (pstmt != null){
try {
connection.close();
} catch (SQLException e) {
}
}
if (connection != null){
try {
connection.close();
} catch (SQLException e) {
}
}
}
/**
* preparedStatement bind
* @param connection
* @param sqlBinds
* @return
* @throws Exception
*/
private PreparedStatement prepareStatementAndBind(Connection connection, SqlBinds sqlBinds) throws Exception {
// is the timeout set
boolean timeoutFlag = TaskTimeoutStrategy.of(taskExecutionContext.getTaskTimeoutStrategy()) == TaskTimeoutStrategy.FAILED ||
TaskTimeoutStrategy.of(taskExecutionContext.getTaskTimeoutStrategy()) == TaskTimeoutStrategy.WARNFAILED;
PreparedStatement stmt = connection.prepareStatement(sqlBinds.getSql());
if(timeoutFlag){
stmt.setQueryTimeout(taskExecutionContext.getTaskTimeout());
}
Map<Integer, Property> params = sqlBinds.getParamsMap();
if(params != null) {
for (Map.Entry<Integer, Property> entry : params.entrySet()) {
Property prop = entry.getValue();
ParameterUtils.setInParameter(entry.getKey(), stmt, prop.getType(), prop.getValue());
}
}
logger.info("prepare statement replace sql : {} ", stmt);
return stmt;
}
/**
* send mail as an attachment
* @param title title
* @param content content
*/
public void sendAttachment(String title,String content){
List<User> users = alertDao.queryUserByAlertGroupId(taskExecutionContext.getSqlTaskExecutionContext().getWarningGroupId());
// receiving group list
List<String> receviersList = new ArrayList<>();
for(User user:users){
receviersList.add(user.getEmail().trim());
}
// custom receiver
String receivers = sqlParameters.getReceivers();
if (StringUtils.isNotEmpty(receivers)){
String[] splits = receivers.split(COMMA);
for (String receiver : splits){
receviersList.add(receiver.trim());
}
}
// copy list
List<String> receviersCcList = new ArrayList<>();
// Custom Copier
String receiversCc = sqlParameters.getReceiversCc();
if (StringUtils.isNotEmpty(receiversCc)){
String[] splits = receiversCc.split(COMMA);
for (String receiverCc : splits){
receviersCcList.add(receiverCc.trim());
}
}
String showTypeName = sqlParameters.getShowType().replace(COMMA,"").trim();
if(EnumUtils.isValidEnum(ShowType.class,showTypeName)){
Map<String, Object> mailResult = MailUtils.sendMails(receviersList,
receviersCcList, title, content, ShowType.valueOf(showTypeName).getDescp());
if(!(boolean) mailResult.get(STATUS)){
throw new RuntimeException("send mail failed!");
}
}else{
logger.error("showType: {} is not valid " ,showTypeName);
throw new RuntimeException(String.format("showType: %s is not valid ",showTypeName));
}
}
/**
* regular expressions match the contents between two specified strings
* @param content content
* @param rgex rgex
* @param sqlParamsMap sql params map
* @param paramsPropsMap params props map
*/
public void setSqlParamsMap(String content, String rgex, Map<Integer,Property> sqlParamsMap, Map<String,Property> paramsPropsMap){
Pattern pattern = Pattern.compile(rgex);
Matcher m = pattern.matcher(content);
int index = 1;
while (m.find()) {
String paramName = m.group(1);
Property prop = paramsPropsMap.get(paramName);
sqlParamsMap.put(index,prop);
index ++;
}
}
/**
* print replace sql
* @param content content
* @param formatSql format sql
* @param rgex rgex
* @param sqlParamsMap sql params map
*/
public void printReplacedSql(String content, String formatSql,String rgex, Map<Integer,Property> sqlParamsMap){
//parameter print style
logger.info("after replace sql , preparing : {}" , formatSql);
StringBuilder logPrint = new StringBuilder("replaced sql , parameters:");
for(int i=1;i<=sqlParamsMap.size();i++){
logPrint.append(sqlParamsMap.get(i).getValue()+"("+sqlParamsMap.get(i).getType()+")");
}
logger.info("Sql Params are {}", logPrint);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,735 | [BUG]The IP and process pid of the master and worker in the monitoring center are not displayed |
![image](https://user-images.githubusercontent.com/55787491/82179785-c06bb780-9911-11ea-996a-ec3aad68bf41.png)
![image](https://user-images.githubusercontent.com/55787491/82179769-b5188c00-9911-11ea-8e31-699b85b3a304.png)
**Which version of Dolphin Scheduler:**
-[dev_1.3.0]
| https://github.com/apache/dolphinscheduler/issues/2735 | https://github.com/apache/dolphinscheduler/pull/2770 | 3d857bba925348f4b92b9ba857c37fbca28549dd | 396b1716e4e9b0f55f3686ba8e0283555c65c085 | "2020-05-18T06:14:37Z" | java | "2020-05-22T03:20:54Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import java.util.regex.Pattern;
/**
* Constants
*/
public final class Constants {
private Constants() {
throw new IllegalStateException("Constants class");
}
/**
* quartz config
*/
public static final String ORG_QUARTZ_JOBSTORE_DRIVERDELEGATECLASS = "org.quartz.jobStore.driverDelegateClass";
public static final String ORG_QUARTZ_SCHEDULER_INSTANCENAME = "org.quartz.scheduler.instanceName";
public static final String ORG_QUARTZ_SCHEDULER_INSTANCEID = "org.quartz.scheduler.instanceId";
public static final String ORG_QUARTZ_SCHEDULER_MAKESCHEDULERTHREADDAEMON = "org.quartz.scheduler.makeSchedulerThreadDaemon";
public static final String ORG_QUARTZ_JOBSTORE_USEPROPERTIES = "org.quartz.jobStore.useProperties";
public static final String ORG_QUARTZ_THREADPOOL_CLASS = "org.quartz.threadPool.class";
public static final String ORG_QUARTZ_THREADPOOL_THREADCOUNT = "org.quartz.threadPool.threadCount";
public static final String ORG_QUARTZ_THREADPOOL_MAKETHREADSDAEMONS = "org.quartz.threadPool.makeThreadsDaemons";
public static final String ORG_QUARTZ_THREADPOOL_THREADPRIORITY = "org.quartz.threadPool.threadPriority";
public static final String ORG_QUARTZ_JOBSTORE_CLASS = "org.quartz.jobStore.class";
public static final String ORG_QUARTZ_JOBSTORE_TABLEPREFIX = "org.quartz.jobStore.tablePrefix";
public static final String ORG_QUARTZ_JOBSTORE_ISCLUSTERED = "org.quartz.jobStore.isClustered";
public static final String ORG_QUARTZ_JOBSTORE_MISFIRETHRESHOLD = "org.quartz.jobStore.misfireThreshold";
public static final String ORG_QUARTZ_JOBSTORE_CLUSTERCHECKININTERVAL = "org.quartz.jobStore.clusterCheckinInterval";
public static final String ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK = "org.quartz.jobStore.acquireTriggersWithinLock";
public static final String ORG_QUARTZ_JOBSTORE_DATASOURCE = "org.quartz.jobStore.dataSource";
public static final String ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS = "org.quartz.dataSource.myDs.connectionProvider.class";
/**
* quartz config default value
*/
public static final String QUARTZ_TABLE_PREFIX = "QRTZ_";
public static final String QUARTZ_MISFIRETHRESHOLD = "60000";
public static final String QUARTZ_CLUSTERCHECKININTERVAL = "5000";
public static final String QUARTZ_DATASOURCE = "myDs";
public static final String QUARTZ_THREADCOUNT = "25";
public static final String QUARTZ_THREADPRIORITY = "5";
public static final String QUARTZ_INSTANCENAME = "DolphinScheduler";
public static final String QUARTZ_INSTANCEID = "AUTO";
public static final String QUARTZ_ACQUIRETRIGGERSWITHINLOCK = "true";
/**
* common properties path
*/
public static final String COMMON_PROPERTIES_PATH = "/common.properties";
/**
* fs.defaultFS
*/
public static final String FS_DEFAULTFS = "fs.defaultFS";
/**
* fs s3a endpoint
*/
public static final String FS_S3A_ENDPOINT = "fs.s3a.endpoint";
/**
* fs s3a access key
*/
public static final String FS_S3A_ACCESS_KEY = "fs.s3a.access.key";
/**
* fs s3a secret key
*/
public static final String FS_S3A_SECRET_KEY = "fs.s3a.secret.key";
/**
* yarn.resourcemanager.ha.rm.ids
*/
public static final String YARN_RESOURCEMANAGER_HA_RM_IDS = "yarn.resourcemanager.ha.rm.ids";
public static final String YARN_RESOURCEMANAGER_HA_XX = "xx";
/**
* yarn.application.status.address
*/
public static final String YARN_APPLICATION_STATUS_ADDRESS = "yarn.application.status.address";
/**
* hdfs configuration
* hdfs.root.user
*/
public static final String HDFS_ROOT_USER = "hdfs.root.user";
/**
* hdfs/s3 configuration
* resource.upload.path
*/
public static final String RESOURCE_UPLOAD_PATH = "resource.upload.path";
/**
* data basedir path
*/
public static final String DATA_BASEDIR_PATH = "data.basedir.path";
/**
* dolphinscheduler.env.path
*/
public static final String DOLPHINSCHEDULER_ENV_PATH = "dolphinscheduler.env.path";
/**
* environment properties default path
*/
public static final String ENV_PATH = "env/dolphinscheduler_env.sh";
/**
* python home
*/
public static final String PYTHON_HOME="PYTHON_HOME";
/**
* resource.view.suffixs
*/
public static final String RESOURCE_VIEW_SUFFIXS = "resource.view.suffixs";
public static final String RESOURCE_VIEW_SUFFIXS_DEFAULT_VALUE = "txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties";
/**
* development.state
*/
public static final String DEVELOPMENT_STATE = "development.state";
public static final String DEVELOPMENT_STATE_DEFAULT_VALUE = "true";
/**
* string true
*/
public static final String STRING_TRUE = "true";
/**
* string false
*/
public static final String STRING_FALSE = "false";
/**
* resource storage type
*/
public static final String RESOURCE_STORAGE_TYPE = "resource.storage.type";
/**
* MasterServer directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_MASTERS = "/nodes/master";
/**
* WorkerServer directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_WORKERS = "/nodes/worker";
/**
* all servers directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_DEAD_SERVERS = "/dead-servers";
/**
* MasterServer lock directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_MASTERS = "/lock/masters";
/**
* MasterServer failover directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_MASTERS = "/lock/failover/masters";
/**
* WorkerServer failover directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_WORKERS = "/lock/failover/workers";
/**
* MasterServer startup failover runing and fault tolerance process
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS = "/lock/failover/startup-masters";
/**
* comma ,
*/
public static final String COMMA = ",";
/**
* slash /
*/
public static final String SLASH = "/";
/**
* COLON :
*/
public static final String COLON = ":";
/**
* SINGLE_SLASH /
*/
public static final String SINGLE_SLASH = "/";
/**
* DOUBLE_SLASH //
*/
public static final String DOUBLE_SLASH = "//";
/**
* SEMICOLON ;
*/
public static final String SEMICOLON = ";";
/**
* EQUAL SIGN
*/
public static final String EQUAL_SIGN = "=";
public static final String WORKER_MAX_CPULOAD_AVG = "worker.max.cpuload.avg";
public static final String WORKER_RESERVED_MEMORY = "worker.reserved.memory";
public static final String MASTER_MAX_CPULOAD_AVG = "master.max.cpuload.avg";
public static final String MASTER_RESERVED_MEMORY = "master.reserved.memory";
/**
* date format of yyyy-MM-dd HH:mm:ss
*/
public static final String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss";
/**
* date format of yyyyMMddHHmmss
*/
public static final String YYYYMMDDHHMMSS = "yyyyMMddHHmmss";
/**
* http connect time out
*/
public static final int HTTP_CONNECT_TIMEOUT = 60 * 1000;
/**
* http connect request time out
*/
public static final int HTTP_CONNECTION_REQUEST_TIMEOUT = 60 * 1000;
/**
* httpclient soceket time out
*/
public static final int SOCKET_TIMEOUT = 60 * 1000;
/**
* http header
*/
public static final String HTTP_HEADER_UNKNOWN = "unKnown";
/**
* http X-Forwarded-For
*/
public static final String HTTP_X_FORWARDED_FOR = "X-Forwarded-For";
/**
* http X-Real-IP
*/
public static final String HTTP_X_REAL_IP = "X-Real-IP";
/**
* UTF-8
*/
public static final String UTF_8 = "UTF-8";
/**
* user name regex
*/
public static final Pattern REGEX_USER_NAME = Pattern.compile("^[a-zA-Z0-9._-]{3,20}$");
/**
* email regex
*/
public static final Pattern REGEX_MAIL_NAME = Pattern.compile("^([a-z0-9A-Z]+[_|\\-|\\.]?)+[a-z0-9A-Z]@([a-z0-9A-Z]+(-[a-z0-9A-Z]+)?\\.)+[a-zA-Z]{2,}$");
/**
* read permission
*/
public static final int READ_PERMISSION = 2 * 1;
/**
* write permission
*/
public static final int WRITE_PERMISSION = 2 * 2;
/**
* execute permission
*/
public static final int EXECUTE_PERMISSION = 1;
/**
* default admin permission
*/
public static final int DEFAULT_ADMIN_PERMISSION = 7;
/**
* all permissions
*/
public static final int ALL_PERMISSIONS = READ_PERMISSION | WRITE_PERMISSION | EXECUTE_PERMISSION;
/**
* max task timeout
*/
public static final int MAX_TASK_TIMEOUT = 24 * 3600;
/**
* master cpu load
*/
public static final int DEFAULT_MASTER_CPU_LOAD = Runtime.getRuntime().availableProcessors() * 2;
/**
* master reserved memory
*/
public static final double DEFAULT_MASTER_RESERVED_MEMORY = OSUtils.totalMemorySize() / 10;
/**
* worker cpu load
*/
public static final int DEFAULT_WORKER_CPU_LOAD = Runtime.getRuntime().availableProcessors() * 2;
/**
* worker reserved memory
*/
public static final double DEFAULT_WORKER_RESERVED_MEMORY = OSUtils.totalMemorySize() / 10;
/**
* default log cache rows num,output when reach the number
*/
public static final int DEFAULT_LOG_ROWS_NUM = 4 * 16;
/**
* log flush interval?output when reach the interval
*/
public static final int DEFAULT_LOG_FLUSH_INTERVAL = 1000;
/**
* time unit secong to minutes
*/
public static final int SEC_2_MINUTES_TIME_UNIT = 60;
/***
*
* rpc port
*/
public static final int RPC_PORT = 50051;
/**
* forbid running task
*/
public static final String FLOWNODE_RUN_FLAG_FORBIDDEN = "FORBIDDEN";
/**
* datasource configuration path
*/
public static final String DATASOURCE_PROPERTIES = "/datasource.properties";
public static final String TASK_RECORD_URL = "task.record.datasource.url";
public static final String TASK_RECORD_FLAG = "task.record.flag";
public static final String TASK_RECORD_USER = "task.record.datasource.username";
public static final String TASK_RECORD_PWD = "task.record.datasource.password";
public static final String DEFAULT = "Default";
public static final String USER = "user";
public static final String PASSWORD = "password";
public static final String XXXXXX = "******";
public static final String NULL = "NULL";
public static final String THREAD_NAME_MASTER_SERVER = "Master-Server";
public static final String THREAD_NAME_WORKER_SERVER = "Worker-Server";
public static final String TASK_RECORD_TABLE_HIVE_LOG = "eamp_hive_log_hd";
public static final String TASK_RECORD_TABLE_HISTORY_HIVE_LOG = "eamp_hive_hist_log_hd";
/**
* command parameter keys
*/
public static final String CMDPARAM_RECOVER_PROCESS_ID_STRING = "ProcessInstanceId";
public static final String CMDPARAM_RECOVERY_START_NODE_STRING = "StartNodeIdList";
public static final String CMDPARAM_RECOVERY_WAITTING_THREAD = "WaittingThreadInstanceId";
public static final String CMDPARAM_SUB_PROCESS = "processInstanceId";
public static final String CMDPARAM_EMPTY_SUB_PROCESS = "0";
public static final String CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID = "parentProcessInstanceId";
public static final String CMDPARAM_SUB_PROCESS_DEFINE_ID = "processDefinitionId";
public static final String CMDPARAM_START_NODE_NAMES = "StartNodeNameList";
/**
* complement data start date
*/
public static final String CMDPARAM_COMPLEMENT_DATA_START_DATE = "complementStartDate";
/**
* complement data end date
*/
public static final String CMDPARAM_COMPLEMENT_DATA_END_DATE = "complementEndDate";
/**
* hadoop configuration
*/
public static final String HADOOP_RM_STATE_ACTIVE = "ACTIVE";
public static final String HADOOP_RM_STATE_STANDBY = "STANDBY";
public static final String HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT = "resource.manager.httpaddress.port";
/**
* data source config
*/
public static final String SPRING_DATASOURCE_DRIVER_CLASS_NAME = "spring.datasource.driver-class-name";
public static final String SPRING_DATASOURCE_URL = "spring.datasource.url";
public static final String SPRING_DATASOURCE_USERNAME = "spring.datasource.username";
public static final String SPRING_DATASOURCE_PASSWORD = "spring.datasource.password";
public static final String SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT = "spring.datasource.validationQueryTimeout";
public static final String SPRING_DATASOURCE_INITIAL_SIZE = "spring.datasource.initialSize";
public static final String SPRING_DATASOURCE_MIN_IDLE = "spring.datasource.minIdle";
public static final String SPRING_DATASOURCE_MAX_ACTIVE = "spring.datasource.maxActive";
public static final String SPRING_DATASOURCE_MAX_WAIT = "spring.datasource.maxWait";
public static final String SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS = "spring.datasource.timeBetweenEvictionRunsMillis";
public static final String SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS = "spring.datasource.timeBetweenConnectErrorMillis";
public static final String SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS = "spring.datasource.minEvictableIdleTimeMillis";
public static final String SPRING_DATASOURCE_VALIDATION_QUERY = "spring.datasource.validationQuery";
public static final String SPRING_DATASOURCE_TEST_WHILE_IDLE = "spring.datasource.testWhileIdle";
public static final String SPRING_DATASOURCE_TEST_ON_BORROW = "spring.datasource.testOnBorrow";
public static final String SPRING_DATASOURCE_TEST_ON_RETURN = "spring.datasource.testOnReturn";
public static final String SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS = "spring.datasource.poolPreparedStatements";
public static final String SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT = "spring.datasource.defaultAutoCommit";
public static final String SPRING_DATASOURCE_KEEP_ALIVE = "spring.datasource.keepAlive";
public static final String SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE = "spring.datasource.maxPoolPreparedStatementPerConnectionSize";
public static final String DEVELOPMENT = "development";
public static final String QUARTZ_PROPERTIES_PATH = "quartz.properties";
/**
* sleep time
*/
public static final int SLEEP_TIME_MILLIS = 1000;
/**
* heartbeat for zk info length
*/
public static final int HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH = 9;
/**
* hadoop params constant
*/
/**
* jar
*/
public static final String JAR = "jar";
/**
* hadoop
*/
public static final String HADOOP = "hadoop";
/**
* -D parameter
*/
public static final String D = "-D";
/**
* -D mapreduce.job.queuename=ququename
*/
public static final String MR_QUEUE = "mapreduce.job.queuename";
/**
* spark params constant
*/
public static final String MASTER = "--master";
public static final String DEPLOY_MODE = "--deploy-mode";
/**
* --class CLASS_NAME
*/
public static final String MAIN_CLASS = "--class";
/**
* --driver-cores NUM
*/
public static final String DRIVER_CORES = "--driver-cores";
/**
* --driver-memory MEM
*/
public static final String DRIVER_MEMORY = "--driver-memory";
/**
* --num-executors NUM
*/
public static final String NUM_EXECUTORS = "--num-executors";
/**
* --executor-cores NUM
*/
public static final String EXECUTOR_CORES = "--executor-cores";
/**
* --executor-memory MEM
*/
public static final String EXECUTOR_MEMORY = "--executor-memory";
/**
* --queue QUEUE
*/
public static final String SPARK_QUEUE = "--queue";
/**
* --queue --qu
*/
public static final String FLINK_QUEUE = "--qu";
/**
* exit code success
*/
public static final int EXIT_CODE_SUCCESS = 0;
/**
* exit code kill
*/
public static final int EXIT_CODE_KILL = 137;
/**
* exit code failure
*/
public static final int EXIT_CODE_FAILURE = -1;
/**
* date format of yyyyMMdd
*/
public static final String PARAMETER_FORMAT_DATE = "yyyyMMdd";
/**
* date format of yyyyMMddHHmmss
*/
public static final String PARAMETER_FORMAT_TIME = "yyyyMMddHHmmss";
/**
* system date(yyyyMMddHHmmss)
*/
public static final String PARAMETER_DATETIME = "system.datetime";
/**
* system date(yyyymmdd) today
*/
public static final String PARAMETER_CURRENT_DATE = "system.biz.curdate";
/**
* system date(yyyymmdd) yesterday
*/
public static final String PARAMETER_BUSINESS_DATE = "system.biz.date";
/**
* ACCEPTED
*/
public static final String ACCEPTED = "ACCEPTED";
/**
* SUCCEEDED
*/
public static final String SUCCEEDED = "SUCCEEDED";
/**
* NEW
*/
public static final String NEW = "NEW";
/**
* NEW_SAVING
*/
public static final String NEW_SAVING = "NEW_SAVING";
/**
* SUBMITTED
*/
public static final String SUBMITTED = "SUBMITTED";
/**
* FAILED
*/
public static final String FAILED = "FAILED";
/**
* KILLED
*/
public static final String KILLED = "KILLED";
/**
* RUNNING
*/
public static final String RUNNING = "RUNNING";
/**
* underline "_"
*/
public static final String UNDERLINE = "_";
/**
* quartz job prifix
*/
public static final String QUARTZ_JOB_PRIFIX = "job";
/**
* quartz job group prifix
*/
public static final String QUARTZ_JOB_GROUP_PRIFIX = "jobgroup";
/**
* projectId
*/
public static final String PROJECT_ID = "projectId";
/**
* processId
*/
public static final String SCHEDULE_ID = "scheduleId";
/**
* schedule
*/
public static final String SCHEDULE = "schedule";
/**
* application regex
*/
public static final String APPLICATION_REGEX = "application_\\d+_\\d+";
public static final String PID = OSUtils.isWindows() ? "handle" : "pid";
/**
* month_begin
*/
public static final String MONTH_BEGIN = "month_begin";
/**
* add_months
*/
public static final String ADD_MONTHS = "add_months";
/**
* month_end
*/
public static final String MONTH_END = "month_end";
/**
* week_begin
*/
public static final String WEEK_BEGIN = "week_begin";
/**
* week_end
*/
public static final String WEEK_END = "week_end";
/**
* timestamp
*/
public static final String TIMESTAMP = "timestamp";
public static final char SUBTRACT_CHAR = '-';
public static final char ADD_CHAR = '+';
public static final char MULTIPLY_CHAR = '*';
public static final char DIVISION_CHAR = '/';
public static final char LEFT_BRACE_CHAR = '(';
public static final char RIGHT_BRACE_CHAR = ')';
public static final String ADD_STRING = "+";
public static final String MULTIPLY_STRING = "*";
public static final String DIVISION_STRING = "/";
public static final String LEFT_BRACE_STRING = "(";
public static final char P = 'P';
public static final char N = 'N';
public static final String SUBTRACT_STRING = "-";
public static final String GLOBAL_PARAMS = "globalParams";
public static final String LOCAL_PARAMS = "localParams";
public static final String PROCESS_INSTANCE_STATE = "processInstanceState";
public static final String TASK_LIST = "taskList";
public static final String RWXR_XR_X = "rwxr-xr-x";
/**
* master/worker server use for zk
*/
public static final String MASTER_PREFIX = "master";
public static final String WORKER_PREFIX = "worker";
public static final String DELETE_ZK_OP = "delete";
public static final String ADD_ZK_OP = "add";
public static final String ALIAS = "alias";
public static final String CONTENT = "content";
public static final String DEPENDENT_SPLIT = ":||";
public static final String DEPENDENT_ALL = "ALL";
/**
* preview schedule execute count
*/
public static final int PREVIEW_SCHEDULE_EXECUTE_COUNT = 5;
/**
* kerberos
*/
public static final String KERBEROS = "kerberos";
/**
* kerberos expire time
*/
public static final String KERBEROS_EXPIRE_TIME = "kerberos.expire.time";
/**
* java.security.krb5.conf
*/
public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf";
/**
* java.security.krb5.conf.path
*/
public static final String JAVA_SECURITY_KRB5_CONF_PATH = "java.security.krb5.conf.path";
/**
* hadoop.security.authentication
*/
public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
/**
* hadoop.security.authentication
*/
public static final String HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE = "hadoop.security.authentication.startup.state";
/**
* loginUserFromKeytab user
*/
public static final String LOGIN_USER_KEY_TAB_USERNAME = "login.user.keytab.username";
/**
* default worker group id
*/
public static final int DEFAULT_WORKER_ID = -1;
/**
* loginUserFromKeytab path
*/
public static final String LOGIN_USER_KEY_TAB_PATH = "login.user.keytab.path";
/**
* task log info format
*/
public static final String TASK_LOG_INFO_FORMAT = "TaskLogInfo-%s";
/**
* hive conf
*/
public static final String HIVE_CONF = "hiveconf:";
//flink ??
public static final String FLINK_YARN_CLUSTER = "yarn-cluster";
public static final String FLINK_RUN_MODE = "-m";
public static final String FLINK_YARN_SLOT = "-ys";
public static final String FLINK_APP_NAME = "-ynm";
public static final String FLINK_TASK_MANAGE = "-yn";
public static final String FLINK_JOB_MANAGE_MEM = "-yjm";
public static final String FLINK_TASK_MANAGE_MEM = "-ytm";
public static final String FLINK_DETACH = "-d";
public static final String FLINK_MAIN_CLASS = "-c";
public static final int[] NOT_TERMINATED_STATES = new int[]{
ExecutionStatus.SUBMITTED_SUCCESS.ordinal(),
ExecutionStatus.RUNNING_EXEUTION.ordinal(),
ExecutionStatus.READY_PAUSE.ordinal(),
ExecutionStatus.READY_STOP.ordinal(),
ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal(),
ExecutionStatus.WAITTING_THREAD.ordinal(),
ExecutionStatus.WAITTING_DEPEND.ordinal()
};
/**
* status
*/
public static final String STATUS = "status";
/**
* message
*/
public static final String MSG = "msg";
/**
* data total
*/
public static final String COUNT = "count";
/**
* page size
*/
public static final String PAGE_SIZE = "pageSize";
/**
* current page no
*/
public static final String PAGE_NUMBER = "pageNo";
/**
*
*/
public static final String DATA_LIST = "data";
public static final String TOTAL_LIST = "totalList";
public static final String CURRENT_PAGE = "currentPage";
public static final String TOTAL_PAGE = "totalPage";
public static final String TOTAL = "total";
/**
* session user
*/
public static final String SESSION_USER = "session.user";
public static final String SESSION_ID = "sessionId";
public static final String PASSWORD_DEFAULT = "******";
/**
* driver
*/
public static final String ORG_POSTGRESQL_DRIVER = "org.postgresql.Driver";
public static final String COM_MYSQL_JDBC_DRIVER = "com.mysql.jdbc.Driver";
public static final String ORG_APACHE_HIVE_JDBC_HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver";
public static final String COM_CLICKHOUSE_JDBC_DRIVER = "ru.yandex.clickhouse.ClickHouseDriver";
public static final String COM_ORACLE_JDBC_DRIVER = "oracle.jdbc.driver.OracleDriver";
public static final String COM_SQLSERVER_JDBC_DRIVER = "com.microsoft.sqlserver.jdbc.SQLServerDriver";
public static final String COM_DB2_JDBC_DRIVER = "com.ibm.db2.jcc.DB2Driver";
/**
* database type
*/
public static final String MYSQL = "MYSQL";
public static final String POSTGRESQL = "POSTGRESQL";
public static final String HIVE = "HIVE";
public static final String SPARK = "SPARK";
public static final String CLICKHOUSE = "CLICKHOUSE";
public static final String ORACLE = "ORACLE";
public static final String SQLSERVER = "SQLSERVER";
public static final String DB2 = "DB2";
/**
* jdbc url
*/
public static final String JDBC_MYSQL = "jdbc:mysql://";
public static final String JDBC_POSTGRESQL = "jdbc:postgresql://";
public static final String JDBC_HIVE_2 = "jdbc:hive2://";
public static final String JDBC_CLICKHOUSE = "jdbc:clickhouse://";
public static final String JDBC_ORACLE_SID = "jdbc:oracle:thin:@";
public static final String JDBC_ORACLE_SERVICE_NAME = "jdbc:oracle:thin:@//";
public static final String JDBC_SQLSERVER = "jdbc:sqlserver://";
public static final String JDBC_DB2 = "jdbc:db2://";
public static final String ADDRESS = "address";
public static final String DATABASE = "database";
public static final String JDBC_URL = "jdbcUrl";
public static final String PRINCIPAL = "principal";
public static final String OTHER = "other";
/**
* session timeout
*/
public static final int SESSION_TIME_OUT = 7200;
public static final int MAX_FILE_SIZE = 1024 * 1024 * 1024;
public static final String UDF = "UDF";
public static final String CLASS = "class";
public static final String RECEIVERS = "receivers";
public static final String RECEIVERS_CC = "receiversCc";
/**
* dataSource sensitive param
*/
public static final String DATASOURCE_PASSWORD_REGEX = "(?<=(\"password\":\")).*?(?=(\"))";
/**
* default worker group
*/
public static final String DEFAULT_WORKER_GROUP = "default";
public static final Integer TASK_INFO_LENGTH = 5;
/**
* new
* schedule time
*/
public static final String PARAMETER_SHECDULE_TIME = "schedule.time";
/**
* authorize writable perm
*/
public static final int AUTHORIZE_WRITABLE_PERM=7;
/**
* authorize readable perm
*/
public static final int AUTHORIZE_READABLE_PERM=4;
/**
* plugin configurations
*/
public static final String PLUGIN_JAR_SUFFIX = ".jar";
public static final int NORAML_NODE_STATUS = 0;
public static final int ABNORMAL_NODE_STATUS = 1;
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,735 | [BUG]The IP and process pid of the master and worker in the monitoring center are not displayed |
![image](https://user-images.githubusercontent.com/55787491/82179785-c06bb780-9911-11ea-996a-ec3aad68bf41.png)
![image](https://user-images.githubusercontent.com/55787491/82179769-b5188c00-9911-11ea-8e31-699b85b3a304.png)
**Which version of Dolphin Scheduler:**
-[dev_1.3.0]
| https://github.com/apache/dolphinscheduler/issues/2735 | https://github.com/apache/dolphinscheduler/pull/2770 | 3d857bba925348f4b92b9ba857c37fbca28549dd | 396b1716e4e9b0f55f3686ba8e0283555c65c085 | "2020-05-18T06:14:37Z" | java | "2020-05-22T03:20:54Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/ResInfo.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.model.Server;
import java.util.Date;
/**
* heartbeat for ZK reigster res info
*/
public class ResInfo {
/**
* cpuUsage
*/
private double cpuUsage;
/**
* memoryUsage
*/
private double memoryUsage;
/**
* loadAverage
*/
private double loadAverage;
public ResInfo(){}
public ResInfo(double cpuUsage , double memoryUsage){
this.cpuUsage = cpuUsage ;
this.memoryUsage = memoryUsage;
}
public ResInfo(double cpuUsage, double memoryUsage, double loadAverage) {
this(cpuUsage,memoryUsage);
this.loadAverage = loadAverage;
}
public double getCpuUsage() {
return cpuUsage;
}
public void setCpuUsage(double cpuUsage) {
this.cpuUsage = cpuUsage;
}
public double getMemoryUsage() {
return memoryUsage;
}
public void setMemoryUsage(double memoryUsage) {
this.memoryUsage = memoryUsage;
}
public double getLoadAverage() {
return loadAverage;
}
public void setLoadAverage(double loadAverage) {
this.loadAverage = loadAverage;
}
/**
* get CPU and memory usage
* @param cpuUsage cpu usage
* @param memoryUsage memory usage
* @param loadAverage load average
* @return cpu and memory usage
*/
public static String getResInfoJson(double cpuUsage , double memoryUsage,double loadAverage){
ResInfo resInfo = new ResInfo(cpuUsage,memoryUsage,loadAverage);
return JSONUtils.toJson(resInfo);
}
/**
* parse heartbeat info for zk
* @param heartBeatInfo heartbeat info
* @return heartbeat info to Server
*/
public static Server parseHeartbeatForZKInfo(String heartBeatInfo){
if (StringUtils.isEmpty(heartBeatInfo)) {
return null;
}
String[] masterArray = heartBeatInfo.split(Constants.COMMA);
if(masterArray.length != Constants.HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH){
return null;
}
Server masterServer = new Server();
masterServer.setResInfo(getResInfoJson(Double.parseDouble(masterArray[0]),
Double.parseDouble(masterArray[1]),
Double.parseDouble(masterArray[2])));
masterServer.setCreateTime(DateUtils.stringToDate(masterArray[6]));
masterServer.setLastHeartbeatTime(DateUtils.stringToDate(masterArray[7]));
return masterServer;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,735 | [BUG]The IP and process pid of the master and worker in the monitoring center are not displayed |
![image](https://user-images.githubusercontent.com/55787491/82179785-c06bb780-9911-11ea-996a-ec3aad68bf41.png)
![image](https://user-images.githubusercontent.com/55787491/82179769-b5188c00-9911-11ea-8e31-699b85b3a304.png)
**Which version of Dolphin Scheduler:**
-[dev_1.3.0]
| https://github.com/apache/dolphinscheduler/issues/2735 | https://github.com/apache/dolphinscheduler/pull/2770 | 3d857bba925348f4b92b9ba857c37fbca28549dd | 396b1716e4e9b0f55f3686ba8e0283555c65c085 | "2020-05-18T06:14:37Z" | java | "2020-05-22T03:20:54Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistry.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.master.registry;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.state.ConnectionState;
import org.apache.curator.framework.state.ConnectionStateListener;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.registry.HeartBeatTask;
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.util.Date;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import static org.apache.dolphinscheduler.remote.utils.Constants.COMMA;
/**
* master registry
*/
@Service
public class MasterRegistry {
private final Logger logger = LoggerFactory.getLogger(MasterRegistry.class);
/**
* zookeeper registry center
*/
@Autowired
private ZookeeperRegistryCenter zookeeperRegistryCenter;
/**
* master config
*/
@Autowired
private MasterConfig masterConfig;
/**
* heartbeat executor
*/
private ScheduledExecutorService heartBeatExecutor;
/**
* worker start time
*/
private String startTime;
@PostConstruct
public void init(){
this.startTime = DateUtils.dateToString(new Date());
this.heartBeatExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("HeartBeatExecutor"));
}
/**
* registry
*/
public void registry() {
String address = OSUtils.getHost();
String localNodePath = getMasterPath();
zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(localNodePath, "");
zookeeperRegistryCenter.getZookeeperCachedOperator().getZkClient().getConnectionStateListenable().addListener(new ConnectionStateListener() {
@Override
public void stateChanged(CuratorFramework client, ConnectionState newState) {
if(newState == ConnectionState.LOST){
logger.error("master : {} connection lost from zookeeper", address);
} else if(newState == ConnectionState.RECONNECTED){
logger.info("master : {} reconnected to zookeeper", address);
zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(localNodePath, "");
} else if(newState == ConnectionState.SUSPENDED){
logger.warn("master : {} connection SUSPENDED ", address);
}
}
});
int masterHeartbeatInterval = masterConfig.getMasterHeartbeatInterval();
HeartBeatTask heartBeatTask = new HeartBeatTask(startTime,
masterConfig.getMasterReservedMemory(),
masterConfig.getMasterMaxCpuloadAvg(),
getMasterPath(),
zookeeperRegistryCenter);
this.heartBeatExecutor.scheduleAtFixedRate(heartBeatTask, masterHeartbeatInterval, masterHeartbeatInterval, TimeUnit.SECONDS);
logger.info("master node : {} registry to ZK successfully with heartBeatInterval : {}s", address, masterHeartbeatInterval);
}
/**
* remove registry info
*/
public void unRegistry() {
String address = getLocalAddress();
String localNodePath = getMasterPath();
zookeeperRegistryCenter.getZookeeperCachedOperator().remove(localNodePath);
logger.info("master node : {} unRegistry to ZK.", address);
}
/**
* get master path
* @return
*/
private String getMasterPath() {
String address = getLocalAddress();
String localNodePath = this.zookeeperRegistryCenter.getMasterPath() + "/" + address;
return localNodePath;
}
/**
* get local address
* @return
*/
private String getLocalAddress(){
return OSUtils.getHost() + ":" + masterConfig.getListenPort();
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,735 | [BUG]The IP and process pid of the master and worker in the monitoring center are not displayed |
![image](https://user-images.githubusercontent.com/55787491/82179785-c06bb780-9911-11ea-996a-ec3aad68bf41.png)
![image](https://user-images.githubusercontent.com/55787491/82179769-b5188c00-9911-11ea-8e31-699b85b3a304.png)
**Which version of Dolphin Scheduler:**
-[dev_1.3.0]
| https://github.com/apache/dolphinscheduler/issues/2735 | https://github.com/apache/dolphinscheduler/pull/2770 | 3d857bba925348f4b92b9ba857c37fbca28549dd | 396b1716e4e9b0f55f3686ba8e0283555c65c085 | "2020-05-18T06:14:37Z" | java | "2020-05-22T03:20:54Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/HeartBeatTask.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.registry;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Date;
import static org.apache.dolphinscheduler.remote.utils.Constants.COMMA;
public class HeartBeatTask extends Thread{
private final Logger logger = LoggerFactory.getLogger(HeartBeatTask.class);
private String startTime;
private double reservedMemory;
private double maxCpuloadAvg;
private String heartBeatPath;
private ZookeeperRegistryCenter zookeeperRegistryCenter;
public HeartBeatTask(String startTime,
double reservedMemory,
double maxCpuloadAvg,
String heartBeatPath,
ZookeeperRegistryCenter zookeeperRegistryCenter){
this.startTime = startTime;
this.reservedMemory = reservedMemory;
this.maxCpuloadAvg = maxCpuloadAvg;
this.heartBeatPath = heartBeatPath;
this.zookeeperRegistryCenter = zookeeperRegistryCenter;
}
@Override
public void run() {
try {
double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize();
double loadAverage = OSUtils.loadAverage();
int status = Constants.NORAML_NODE_STATUS;
if(availablePhysicalMemorySize < reservedMemory
|| loadAverage > maxCpuloadAvg){
logger.warn("load is too high or availablePhysicalMemorySize(G) is too low, it's availablePhysicalMemorySize(G):{},loadAvg:{}", availablePhysicalMemorySize , loadAverage);
status = Constants.ABNORMAL_NODE_STATUS;
}
StringBuilder builder = new StringBuilder(100);
builder.append(OSUtils.cpuUsage()).append(COMMA);
builder.append(OSUtils.memoryUsage()).append(COMMA);
builder.append(OSUtils.loadAverage()).append(COMMA);
builder.append(OSUtils.availablePhysicalMemorySize()).append(Constants.COMMA);
builder.append(maxCpuloadAvg).append(Constants.COMMA);
builder.append(reservedMemory).append(Constants.COMMA);
builder.append(startTime).append(Constants.COMMA);
builder.append(DateUtils.dateToString(new Date())).append(Constants.COMMA);
builder.append(status);
zookeeperRegistryCenter.getZookeeperCachedOperator().update(heartBeatPath, builder.toString());
} catch (Throwable ex){
logger.error("error write heartbeat info", ex);
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,735 | [BUG]The IP and process pid of the master and worker in the monitoring center are not displayed |
![image](https://user-images.githubusercontent.com/55787491/82179785-c06bb780-9911-11ea-996a-ec3aad68bf41.png)
![image](https://user-images.githubusercontent.com/55787491/82179769-b5188c00-9911-11ea-8e31-699b85b3a304.png)
**Which version of Dolphin Scheduler:**
-[dev_1.3.0]
| https://github.com/apache/dolphinscheduler/issues/2735 | https://github.com/apache/dolphinscheduler/pull/2770 | 3d857bba925348f4b92b9ba857c37fbca28549dd | 396b1716e4e9b0f55f3686ba8e0283555c65c085 | "2020-05-18T06:14:37Z" | java | "2020-05-22T03:20:54Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.registry;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.state.ConnectionState;
import org.apache.curator.framework.state.ConnectionStateListener;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory;
import org.apache.dolphinscheduler.server.registry.HeartBeatTask;
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.util.Date;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import static org.apache.dolphinscheduler.common.Constants.COMMA;
import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP;
import static org.apache.dolphinscheduler.common.Constants.SLASH;
/**
* worker registry
*/
@Service
public class WorkerRegistry {
private final Logger logger = LoggerFactory.getLogger(WorkerRegistry.class);
/**
* zookeeper registry center
*/
@Autowired
private ZookeeperRegistryCenter zookeeperRegistryCenter;
/**
* worker config
*/
@Autowired
private WorkerConfig workerConfig;
/**
* heartbeat executor
*/
private ScheduledExecutorService heartBeatExecutor;
/**
* worker start time
*/
private String startTime;
private String workerGroup;
@PostConstruct
public void init(){
this.workerGroup = workerConfig.getWorkerGroup();
this.startTime = DateUtils.dateToString(new Date());
this.heartBeatExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("HeartBeatExecutor"));
}
/**
* registry
*/
public void registry() {
String address = OSUtils.getHost();
String localNodePath = getWorkerPath();
zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(localNodePath, "");
zookeeperRegistryCenter.getZookeeperCachedOperator().getZkClient().getConnectionStateListenable().addListener(new ConnectionStateListener() {
@Override
public void stateChanged(CuratorFramework client, ConnectionState newState) {
if(newState == ConnectionState.LOST){
logger.error("worker : {} connection lost from zookeeper", address);
} else if(newState == ConnectionState.RECONNECTED){
logger.info("worker : {} reconnected to zookeeper", address);
zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(localNodePath, "");
} else if(newState == ConnectionState.SUSPENDED){
logger.warn("worker : {} connection SUSPENDED ", address);
}
}
});
int workerHeartbeatInterval = workerConfig.getWorkerHeartbeatInterval();
HeartBeatTask heartBeatTask = new HeartBeatTask(startTime,
workerConfig.getWorkerReservedMemory(),
workerConfig.getWorkerMaxCpuloadAvg(),
getWorkerPath(),
zookeeperRegistryCenter);
this.heartBeatExecutor.scheduleAtFixedRate(heartBeatTask, workerHeartbeatInterval, workerHeartbeatInterval, TimeUnit.SECONDS);
logger.info("worker node : {} registry to ZK successfully with heartBeatInterval : {}s", address, workerHeartbeatInterval);
}
/**
* remove registry info
*/
public void unRegistry() {
String address = getLocalAddress();
String localNodePath = getWorkerPath();
zookeeperRegistryCenter.getZookeeperCachedOperator().remove(localNodePath);
this.heartBeatExecutor.shutdownNow();
logger.info("worker node : {} unRegistry to ZK.", address);
}
/**
* get worker path
* @return
*/
private String getWorkerPath() {
String address = getLocalAddress();
StringBuilder builder = new StringBuilder(100);
String workerPath = this.zookeeperRegistryCenter.getWorkerPath();
builder.append(workerPath).append(SLASH);
if(StringUtils.isEmpty(workerGroup)){
workerGroup = DEFAULT_WORKER_GROUP;
}
//trim and lower case is need
builder.append(workerGroup.trim().toLowerCase()).append(SLASH);
builder.append(address);
return builder.toString();
}
/**
* get local address
* @return
*/
private String getLocalAddress(){
return OSUtils.getHost() + ":" + workerConfig.getListenPort();
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,735 | [BUG]The IP and process pid of the master and worker in the monitoring center are not displayed |
![image](https://user-images.githubusercontent.com/55787491/82179785-c06bb780-9911-11ea-996a-ec3aad68bf41.png)
![image](https://user-images.githubusercontent.com/55787491/82179769-b5188c00-9911-11ea-8e31-699b85b3a304.png)
**Which version of Dolphin Scheduler:**
-[dev_1.3.0]
| https://github.com/apache/dolphinscheduler/issues/2735 | https://github.com/apache/dolphinscheduler/pull/2770 | 3d857bba925348f4b92b9ba857c37fbca28549dd | 396b1716e4e9b0f55f3686ba8e0283555c65c085 | "2020-05-18T06:14:37Z" | java | "2020-05-22T03:20:54Z" | dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/AbstractZKClient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.zk;
import org.apache.curator.framework.recipes.locks.InterProcessMutex;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ZKNodeType;
import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.common.utils.ResInfo;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import java.util.*;
import static org.apache.dolphinscheduler.common.Constants.*;
/**
* abstract zookeeper client
*/
@Component
public abstract class AbstractZKClient extends ZookeeperCachedOperator {
private static final Logger logger = LoggerFactory.getLogger(AbstractZKClient.class);
/**
* remove dead server by host
* @param host host
* @param serverType serverType
* @throws Exception
*/
public void removeDeadServerByHost(String host, String serverType) throws Exception {
List<String> deadServers = super.getChildrenKeys(getDeadZNodeParentPath());
for(String serverPath : deadServers){
if(serverPath.startsWith(serverType+UNDERLINE+host)){
String server = getDeadZNodeParentPath() + SINGLE_SLASH + serverPath;
super.remove(server);
logger.info("{} server {} deleted from zk dead server path success" , serverType , host);
}
}
}
/**
* opType(add): if find dead server , then add to zk deadServerPath
* opType(delete): delete path from zk
*
* @param zNode node path
* @param zkNodeType master or worker
* @param opType delete or add
* @throws Exception errors
*/
public void handleDeadServer(String zNode, ZKNodeType zkNodeType, String opType) throws Exception {
String host = getHostByEventDataPath(zNode);
String type = (zkNodeType == ZKNodeType.MASTER) ? MASTER_PREFIX : WORKER_PREFIX;
//check server restart, if restart , dead server path in zk should be delete
if(opType.equals(DELETE_ZK_OP)){
removeDeadServerByHost(host, type);
}else if(opType.equals(ADD_ZK_OP)){
String deadServerPath = getDeadZNodeParentPath() + SINGLE_SLASH + type + UNDERLINE + host;
if(!super.isExisted(deadServerPath)){
//add dead server info to zk dead server path : /dead-servers/
super.persist(deadServerPath,(type + UNDERLINE + host));
logger.info("{} server dead , and {} added to zk dead server path success" ,
zkNodeType.toString(), zNode);
}
}
}
/**
* get active master num
* @return active master number
*/
public int getActiveMasterNum(){
List<String> childrenList = new ArrayList<>();
try {
// read master node parent path from conf
if(super.isExisted(getZNodeParentPath(ZKNodeType.MASTER))){
childrenList = super.getChildrenKeys(getZNodeParentPath(ZKNodeType.MASTER));
}
} catch (Exception e) {
logger.error("getActiveMasterNum error",e);
}
return childrenList.size();
}
/**
*
* @return zookeeper quorum
*/
public String getZookeeperQuorum(){
return getZookeeperConfig().getServerList();
}
/**
* get server list.
* @param zkNodeType zookeeper node type
* @return server list
*/
public List<Server> getServersList(ZKNodeType zkNodeType){
Map<String, String> masterMap = getServerMaps(zkNodeType);
String parentPath = getZNodeParentPath(zkNodeType);
List<Server> masterServers = new ArrayList<>();
int i = 0;
for (Map.Entry<String, String> entry : masterMap.entrySet()) {
Server masterServer = ResInfo.parseHeartbeatForZKInfo(entry.getValue());
masterServer.setZkDirectory(parentPath + "/"+ entry.getKey());
masterServer.setId(i);
i ++;
masterServers.add(masterServer);
}
return masterServers;
}
/**
* get master server list map.
* @param zkNodeType zookeeper node type
* @return result : {host : resource info}
*/
public Map<String, String> getServerMaps(ZKNodeType zkNodeType){
Map<String, String> masterMap = new HashMap<>();
try {
String path = getZNodeParentPath(zkNodeType);
List<String> serverList = super.getChildrenKeys(path);
if(zkNodeType == ZKNodeType.WORKER){
List<String> workerList = new ArrayList<>();
for(String group : serverList){
List<String> groupServers = super.getChildrenKeys(path + Constants.SLASH + group);
for(String groupServer : groupServers){
workerList.add(group + Constants.SLASH + groupServer);
}
}
serverList = workerList;
}
for(String server : serverList){
masterMap.putIfAbsent(server, super.get(path + Constants.SLASH + server));
}
} catch (Exception e) {
logger.error("get server list failed", e);
}
return masterMap;
}
/**
* check the zookeeper node already exists
* @param host host
* @param zkNodeType zookeeper node type
* @return true if exists
*/
public boolean checkZKNodeExists(String host, ZKNodeType zkNodeType) {
String path = getZNodeParentPath(zkNodeType);
if(StringUtils.isEmpty(path)){
logger.error("check zk node exists error, host:{}, zk node type:{}",
host, zkNodeType.toString());
return false;
}
Map<String, String> serverMaps = getServerMaps(zkNodeType);
for(String hostKey : serverMaps.keySet()){
if(hostKey.startsWith(host)){
return true;
}
}
return false;
}
/**
*
* @return get worker node parent path
*/
protected String getWorkerZNodeParentPath(){
return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_WORKERS;
}
/**
*
* @return get master node parent path
*/
protected String getMasterZNodeParentPath(){
return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_MASTERS;
}
/**
*
* @return get master lock path
*/
public String getMasterLockPath(){
return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_LOCK_MASTERS;
}
/**
*
* @param zkNodeType zookeeper node type
* @return get zookeeper node parent path
*/
public String getZNodeParentPath(ZKNodeType zkNodeType) {
String path = "";
switch (zkNodeType){
case MASTER:
return getMasterZNodeParentPath();
case WORKER:
return getWorkerZNodeParentPath();
case DEAD_SERVER:
return getDeadZNodeParentPath();
default:
break;
}
return path;
}
/**
*
* @return get dead server node parent path
*/
protected String getDeadZNodeParentPath(){
return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_DEAD_SERVERS;
}
/**
*
* @return get master start up lock path
*/
public String getMasterStartUpLockPath(){
return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS;
}
/**
*
* @return get master failover lock path
*/
public String getMasterFailoverLockPath(){
return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_MASTERS;
}
/**
*
* @return get worker failover lock path
*/
public String getWorkerFailoverLockPath(){
return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_WORKERS;
}
/**
* release mutex
* @param mutex mutex
*/
public void releaseMutex(InterProcessMutex mutex) {
if (mutex != null){
try {
mutex.release();
} catch (Exception e) {
if(e.getMessage().equals("instance must be started before calling this method")){
logger.warn("lock release");
}else{
logger.error("lock release failed",e);
}
}
}
}
/**
* init system znode
*/
protected void initSystemZNode(){
try {
persist(getMasterZNodeParentPath(), "");
persist(getWorkerZNodeParentPath(), "");
persist(getDeadZNodeParentPath(), "");
logger.info("initialize server nodes success.");
} catch (Exception e) {
logger.error("init system znode failed",e);
}
}
/**
* get host ip, string format: masterParentPath/ip
* @param path path
* @return host ip, string format: masterParentPath/ip
*/
protected String getHostByEventDataPath(String path) {
if(StringUtils.isEmpty(path)){
logger.error("empty path!");
return "";
}
String[] pathArray = path.split(SINGLE_SLASH);
if(pathArray.length < 1){
logger.error("parse ip error: {}", path);
return "";
}
return pathArray[pathArray.length - 1];
}
@Override
public String toString() {
return "AbstractZKClient{" +
"zkClient=" + zkClient +
", deadServerZNodeParentPath='" + getZNodeParentPath(ZKNodeType.DEAD_SERVER) + '\'' +
", masterZNodeParentPath='" + getZNodeParentPath(ZKNodeType.MASTER) + '\'' +
", workerZNodeParentPath='" + getZNodeParentPath(ZKNodeType.WORKER) + '\'' +
'}';
}
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,735 | [BUG]The IP and process pid of the master and worker in the monitoring center are not displayed |
![image](https://user-images.githubusercontent.com/55787491/82179785-c06bb780-9911-11ea-996a-ec3aad68bf41.png)
![image](https://user-images.githubusercontent.com/55787491/82179769-b5188c00-9911-11ea-8e31-699b85b3a304.png)
**Which version of Dolphin Scheduler:**
-[dev_1.3.0]
| https://github.com/apache/dolphinscheduler/issues/2735 | https://github.com/apache/dolphinscheduler/pull/2770 | 3d857bba925348f4b92b9ba857c37fbca28549dd | 396b1716e4e9b0f55f3686ba8e0283555c65c085 | "2020-05-18T06:14:37Z" | java | "2020-05-22T03:20:54Z" | dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/master.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<m-list-construction :title="'Master ' + $t('Manage')">
<template slot="content">
<div class="servers-wrapper" v-show="masterList.length">
<div class="row-box" v-for="(item,$index) in masterList" :key="$index">
<div class="row-title">
<div class="left">
<span class="sp">IP: {{item.host}}</span>
<span class="sp">{{$t('Process Pid')}}: {{item.port}}</span>
<span class="sp">{{$t('Zk registration directory')}}: {{item.zkDirectory}}</span>
</div>
<div class="right">
<span class="sp">{{$t('Create Time')}}: {{item.createTime | formatDate}}</span>
<span class="sp">{{$t('Last heartbeat time')}}: {{item.lastHeartbeatTime | formatDate}}</span>
</div>
</div>
<div class="row-cont">
<div class="col-md-4">
<m-gauge
:value="(item.resInfo.cpuUsage * 100).toFixed(2)"
:name="'cpuUsage'"
:id="'gauge-cpu-' + item.id">
</m-gauge>
</div>
<div class="col-md-4">
<m-gauge
:value="(item.resInfo.memoryUsage * 100).toFixed(2)"
:name="'memoryUsage'"
:id="'gauge-memory-' + item.id">
</m-gauge>
</div>
<div class="col-md-4">
<div class="text-num-model">
<div class="value-p">
<strong :style="{color:color[$index]}">{{item.resInfo.loadAverage > 0? item.resInfo.loadAverage.toFixed(2):0}}</strong>
</div>
<div class="text-1">
loadAverage
</div>
</div>
</div>
</div>
</div>
</div>
<div v-if="!masterList.length">
<m-no-data></m-no-data>
</div>
<m-spin :is-spin="isLoading"></m-spin>
</template>
</m-list-construction>
</template>
<script>
import _ from 'lodash'
import { mapActions } from 'vuex'
import mGauge from './_source/gauge'
import mList from './_source/zookeeperList'
import mSpin from '@/module/components/spin/spin'
import mNoData from '@/module/components/noData/noData'
import themeData from '@/module/echarts/themeData.json'
import mListConstruction from '@/module/components/listConstruction/listConstruction'
export default {
name: 'servers-master',
data () {
return {
isLoading: false,
masterList: [],
color: themeData.color
}
},
props: {},
methods: {
...mapActions('monitor', ['getMasterData'])
},
watch: {},
created () {
},
mounted () {
this.isLoading = true
this.getMasterData().then(res => {
this.masterList = _.map(res, (v, i) => {
return _.assign(v, {
resInfo: JSON.parse(v.resInfo)
})
})
this.isLoading = false
}).catch(() => {
this.isLoading = false
})
},
components: { mList, mListConstruction, mSpin, mNoData, mGauge }
}
</script>
<style lang="scss" rel="stylesheet/scss">
@import "./servers";
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,735 | [BUG]The IP and process pid of the master and worker in the monitoring center are not displayed |
![image](https://user-images.githubusercontent.com/55787491/82179785-c06bb780-9911-11ea-996a-ec3aad68bf41.png)
![image](https://user-images.githubusercontent.com/55787491/82179769-b5188c00-9911-11ea-8e31-699b85b3a304.png)
**Which version of Dolphin Scheduler:**
-[dev_1.3.0]
| https://github.com/apache/dolphinscheduler/issues/2735 | https://github.com/apache/dolphinscheduler/pull/2770 | 3d857bba925348f4b92b9ba857c37fbca28549dd | 396b1716e4e9b0f55f3686ba8e0283555c65c085 | "2020-05-18T06:14:37Z" | java | "2020-05-22T03:20:54Z" | dolphinscheduler-ui/src/js/conf/home/pages/monitor/pages/servers/worker.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<m-list-construction :title="'Worker ' + $t('Manage')">
<template slot="content">
<div class="servers-wrapper" v-show="workerList.length">
<div class="row-box" v-for="(item,$index) in workerList" :key="$index">
<div class="row-title">
<div class="left">
<span class="sp">IP: {{item.host}}</span>
<span class="sp">{{$t('Process Pid')}}: {{item.port}}</span>
<span class="sp">{{$t('Zk registration directory')}}: {{item.zkDirectory}}</span>
</div>
<div class="right">
<span class="sp">{{$t('Create Time')}}: {{item.createTime | formatDate}}</span>
<span class="sp">{{$t('Last heartbeat time')}}: {{item.lastHeartbeatTime | formatDate}}</span>
</div>
</div>
<div class="row-cont">
<div class="col-md-4">
<m-gauge
:value="(item.resInfo.cpuUsage * 100).toFixed(2)"
:name="'cpuUsage'"
:id="'gauge-cpu-' + item.id">
</m-gauge>
</div>
<div class="col-md-4">
<m-gauge
:value="(item.resInfo.memoryUsage * 100).toFixed(2)"
:name="'memoryUsage'"
:id="'gauge-memory-' + item.id">
</m-gauge>
</div>
<div class="col-md-4">
<div class="text-num-model">
<div class="value-p">
<strong :style="{color:color[$index]}">{{item.resInfo.loadAverage > 0? item.resInfo.loadAverage.toFixed(2):0}}</strong>
</div>
<div class="text-1">
loadAverage
</div>
</div>
</div>
</div>
</div>
</div>
<div v-if="!workerList.length">
<m-no-data></m-no-data>
</div>
<m-spin :is-spin="isLoading"></m-spin>
</template>
</m-list-construction>
</template>
<script>
import _ from 'lodash'
import { mapActions } from 'vuex'
import mGauge from './_source/gauge'
import mList from './_source/zookeeperList'
import mSpin from '@/module/components/spin/spin'
import mNoData from '@/module/components/noData/noData'
import themeData from '@/module/echarts/themeData.json'
import mListConstruction from '@/module/components/listConstruction/listConstruction'
export default {
name: 'servers-worker',
data () {
return {
isLoading: false,
workerList: [],
color: themeData.color
}
},
props: {},
methods: {
...mapActions('monitor', ['getWorkerData'])
},
watch: {},
created () {
},
mounted () {
this.isLoading = true
this.getWorkerData().then(res => {
this.workerList = _.map(res, (v, i) => {
return _.assign(v, {
resInfo: JSON.parse(v.resInfo)
})
})
this.isLoading = false
}).catch(() => {
this.isLoading = true
})
},
components: { mList, mListConstruction, mSpin, mNoData, mGauge }
}
</script>
<style lang="scss" rel="stylesheet/scss">
@import "./servers";
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,735 | [BUG]The IP and process pid of the master and worker in the monitoring center are not displayed |
![image](https://user-images.githubusercontent.com/55787491/82179785-c06bb780-9911-11ea-996a-ec3aad68bf41.png)
![image](https://user-images.githubusercontent.com/55787491/82179769-b5188c00-9911-11ea-8e31-699b85b3a304.png)
**Which version of Dolphin Scheduler:**
-[dev_1.3.0]
| https://github.com/apache/dolphinscheduler/issues/2735 | https://github.com/apache/dolphinscheduler/pull/2770 | 3d857bba925348f4b92b9ba857c37fbca28549dd | 396b1716e4e9b0f55f3686ba8e0283555c65c085 | "2020-05-18T06:14:37Z" | java | "2020-05-22T03:20:54Z" | dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': '用户名',
'Please enter user name': '请输入用户名',
Password: '密码',
'Please enter your password': '请输入密码',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': '密码至少包含数字,字母和字符的两种组合,长度在6-22之间',
Login: '登录',
Home: '首页',
'Failed to create node to save': '未创建节点保存失败',
'Global parameters': '全局参数',
'Local parameters': '局部参数',
'Copy success': '复制成功',
'The browser does not support automatic copying': '该浏览器不支持自动复制',
'Whether to save the DAG graph': '是否保存DAG图',
'Current node settings': '当前节点设置',
'View history': '查看历史',
'View log': '查看日志',
'Enter this child node': '进入该子节点',
'Node name': '节点名称',
'Please enter name(required)': '请输入名称(必填)',
'Run flag': '运行标志',
Normal: '正常',
'Prohibition execution': '禁止执行',
'Please enter description': '请输入描述',
'Number of failed retries': '失败重试次数',
Times: '次',
'Failed retry interval': '失败重试间隔',
Minute: '分',
Cancel: '取消',
'Confirm add': '确认添加',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': '新创建子工作流还未执行,不能进入子工作流',
'The task has not been executed and cannot enter the sub-Process': '该任务还未执行,不能进入子工作流',
'Please enter name (required)': '请输入名称(必填)',
'Name already exists': '名称已存在请重新输入',
'Download Log': '下载日志',
'Refresh Log': '刷新日志',
'Enter full screen': '进入全屏',
'Cancel full screen': '取消全屏',
Close: '关闭',
'Update log success': '更新日志成功',
'No more logs': '暂无更多日志',
'No log': '暂无日志',
'Loading Log...': '正在努力请求日志中...',
'Set the DAG diagram name': '设置DAG图名称',
'Please enter description(optional)': '请输入描述(选填)',
'Set global': '设置全局',
'Whether to update the process definition': '是否更新流程定义',
Add: '添加',
'DAG graph name cannot be empty': 'DAG图名称不能为空',
'Create Datasource': '创建数据源',
'Project Home': '项目首页',
'Project Manage': '项目管理',
'Create Project': '创建项目',
'Cron Manage': '定时管理',
'Copy Workflow': '复制工作流',
'Tenant Manage': '租户管理',
'Create Tenant': '创建租户',
'User Manage': '用户管理',
'Create User': '创建用户',
'User Information': '用户信息',
'Edit Password': '密码修改',
success: '成功',
failed: '失败',
delete: '删除',
'Please choose': '请选择',
'Please enter a positive integer': '请输入正整数',
'Program Type': '程序类型',
'Main class': '主函数的class',
'Main jar package': '主jar包',
'Please enter main jar package': '请选择主jar包',
'Command-line parameters': '命令行参数',
'Please enter Command-line parameters': '请输入命令行参数',
'Other parameters': '其他参数',
'Please enter other parameters': '请输入其他参数',
Resources: '资源',
'Custom Parameters': '自定义参数',
'Custom template': '自定义模版',
'Please enter main class': '请填写主函数的class',
Datasource: '数据源',
methods: '方法',
'Please enter method(optional)': '请输入方法(选填)',
Script: '脚本',
'Please enter script(required)': '请输入脚本(必填)',
'Deploy Mode': '部署方式',
'Driver core number': 'Driver内核数',
'Please enter driver core number': '请输入Driver内核数',
'Driver memory use': 'Driver内存数',
'Please enter driver memory use': '请输入Driver内存数',
'Number of Executors': 'Executor数量',
'Please enter the number of Executor': '请输入Executor数量',
'Executor memory': 'Executor内存数',
'Please enter the Executor memory': '请输入Executor内存数',
'Executor core number': 'Executor内核数',
'Please enter Executor core number': '请输入Executor内核数',
'The number of Executors should be a positive integer': 'Executor数量为正整数',
'Memory should be a positive integer': '内存数为数字',
'Please enter ExecutorPlease enter Executor core number': '请填写Executor内核数',
'Core number should be positive integer': '内核数为正整数',
'SQL Type': 'sql类型',
Title: '主题',
'Please enter the title of email': '请输入邮件主题',
Table: '表名',
TableMode: '表格',
Attachment: '附件',
'SQL Parameter': 'sql参数',
'SQL Statement': 'sql语句',
'UDF Function': 'UDF函数',
FILE: '文件',
UDF: 'UDF',
'File Subdirectory': '文件子目录',
'Please enter a SQL Statement(required)': '请输入sql语句(必填)',
'Please enter a JSON Statement(required)': '请输入json语句(必填)',
'One form or attachment must be selected': '表格、附件必须勾选一个',
'Recipient required': '收件人邮箱必填',
'Mail subject required': '邮件主题必填',
'Child Node': '子节点',
'Please select a sub-Process': '请选择子工作流',
Edit: '编辑',
'Datasource Name': '数据源名称',
'Please enter datasource name': '请输入数据源名称',
IP: 'IP主机名',
'Please enter IP': '请输入IP主机名',
Port: '端口',
'Please enter port': '请输入端口',
'Database Name': '数据库名',
'Please enter database name': '请输入数据库名',
'Oracle Connect Type': '服务名或SID',
'Oracle Service Name': '服务名',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc连接参数',
'Test Connect': '测试连接',
'Please enter resource name': '请输入数据源名称',
'Please enter resource folder name': '请输入资源文件夹名称',
'Please enter a non-query SQL statement': '请输入非查询sql语句',
'Please enter IP/hostname': '请输入IP/主机名',
'jdbc connection parameters is not a correct JSON format': 'jdbc连接参数不是一个正确的JSON格式',
'#': '编号',
'Datasource Type': '数据源类型',
'Datasource Parameter': '数据源参数',
'Create Time': '创建时间',
'Update Time': '更新时间',
Operation: '操作',
'Click to view': '点击查看',
'Delete?': '确定删除吗?',
Confirm: '确定',
'Task status statistics': '任务状态统计',
Number: '数量',
State: '状态',
'Process Status Statistics': '流程状态统计',
'Process Definition Statistics': '流程定义统计',
'Project Name': '项目名称',
'Please enter name': '请输入名称',
'Owned Users': '所属用户',
'Process Pid': '进程pid',
'Zk registration directory': 'zk注册目录',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': '最后心跳时间',
'Edit Tenant': '编辑租户',
'Tenant Code': '租户编码',
'Tenant Name': '租户名称',
Queue: '队列',
'Please enter the tenant code in English': '请输入租户编码只允许英文',
'Please enter tenant code in English': '请输入英文租户编码',
'Edit User': '编辑用户',
Tenant: '租户',
Email: '邮件',
Phone: '手机',
'Please enter phone number': '请输入手机',
'Please enter email': '请输入邮箱',
'Please enter the correct email format': '请输入正确的邮箱格式',
'Please enter the correct mobile phone format': '请输入正确的手机格式',
Project: '项目',
Authorize: '授权',
'File resources': '文件资源',
'UDF resources': 'UDF资源',
'UDF resources directory': 'UDF资源目录',
'Please select UDF resources directory': '请选择UDF资源目录',
'Edit alarm group': '编辑告警组',
'Create alarm group': '创建告警组',
'Group Name': '组名称',
'Please enter group name': '请输入组名称',
'Group Type': '组类型',
Remarks: '备注',
SMS: '短信',
'Managing Users': '管理用户',
Permission: '权限',
Administrator: '管理员',
'Confirm Password': '确认密码',
'Please enter confirm password': '请输入确认密码',
'Password cannot be in Chinese': '密码不能为中文',
'Please enter a password (6-22) character password': '请输入密码(6-22)字符密码',
'Confirmation password cannot be in Chinese': '确认密码不能为中文',
'Please enter a confirmation password (6-22) character password': '请输入确认密码(6-22)字符密码',
'The password is inconsistent with the confirmation password': '密码与确认密码不一致,请重新确认',
'Please select the datasource': '请选择数据源',
'Please select resources': '请选择资源',
Query: '查询',
'Non Query': '非查询',
'prop(required)': 'prop(必填)',
'value(optional)': 'value(选填)',
'value(required)': 'value(必填)',
'prop is empty': 'prop不能为空',
'value is empty': 'value不能为空',
'prop is repeat': 'prop中有重复',
'Start Time': '开始时间',
'End Time': '结束时间',
crontab: 'crontab',
'Failure Strategy': '失败策略',
online: '上线',
offline: '下线',
'Task Status': '任务状态',
'Process Instance': '工作流实例',
'Task Instance': '任务实例',
'Select date range': '选择日期区间',
Date: '日期',
waiting: '等待',
execution: '执行中',
finish: '完成',
'Create File': '创建文件',
'Create folder': '创建文件夹',
'File Name': '文件名称',
'Folder Name': '文件夹名称',
'File Format': '文件格式',
'Folder Format': '文件夹格式',
'File Content': '文件内容',
'Upload File Size': '文件大小不能超过1G',
Create: '创建',
'Please enter the resource content': '请输入资源内容',
'Resource content cannot exceed 3000 lines': '资源内容不能超过3000行',
'File Details': '文件详情',
'Download Details': '下载详情',
Return: '返回',
Save: '保存',
'File Manage': '文件管理',
'Upload Files': '上传文件',
'Create UDF Function': '创建UDF函数',
'Upload UDF Resources': '上传UDF资源',
'Service-Master': '服务管理-Master',
'Service-Worker': '服务管理-Worker',
'Process Name': '工作流名称',
Executor: '执行用户',
'Run Type': '运行类型',
'Scheduling Time': '调度时间',
'Run Times': '运行次数',
host: 'host',
'fault-tolerant sign': '容错标识',
Rerun: '重跑',
'Recovery Failed': '恢复失败',
Stop: '停止',
Pause: '暂停',
'Recovery Suspend': '恢复运行',
Gantt: '甘特图',
Name: '名称',
'Node Type': '节点类型',
'Submit Time': '提交时间',
Duration: '运行时长',
'Retry Count': '重试次数',
'Task Name': '任务名称',
'Task Date': '任务日期',
'Source Table': '源表',
'Record Number': '记录数',
'Target Table': '目标表',
'Online viewing type is not supported': '不支持在线查看类型',
Size: '大小',
Rename: '重命名',
Download: '下载',
Export: '导出',
Submit: '提交',
'Edit UDF Function': '编辑UDF函数',
type: '类型',
'UDF Function Name': 'UDF函数名称',
'Please enter a function name': '请输入函数名',
'Package Name': '包名类名',
'Please enter a Package name': '请输入包名类名',
Parameter: '参数',
'Please enter a parameter': '请输入参数',
'UDF Resources': 'UDF资源',
'Upload Resources': '上传资源',
Instructions: '使用说明',
'Please enter a instructions': '请输入使用说明',
'Please enter a UDF function name': '请输入UDF函数名称',
'Select UDF Resources': '请选择UDF资源',
'Class Name': '类名',
'Jar Package': 'jar包',
'Library Name': '库名',
'UDF Resource Name': 'UDF资源名称',
'File Size': '文件大小',
Description: '描述',
'Drag Nodes and Selected Items': '拖动节点和选中项',
'Select Line Connection': '选择线条连接',
'Delete selected lines or nodes': '删除选中的线或节点',
'Full Screen': '全屏',
Unpublished: '未发布',
'Start Process': '启动工作流',
'Execute from the current node': '从当前节点开始执行',
'Recover tolerance fault process': '恢复被容错的工作流',
'Resume the suspension process': '恢复运行流程',
'Execute from the failed nodes': '从失败节点开始执行',
'Complement Data': '补数',
'Scheduling execution': '调度执行',
'Recovery waiting thread': '恢复等待线程',
'Submitted successfully': '提交成功',
Executing: '正在执行',
'Ready to pause': '准备暂停',
'Ready to stop': '准备停止',
'Need fault tolerance': '需要容错',
kill: 'kill',
'Waiting for thread': '等待线程',
'Waiting for dependence': '等待依赖',
Start: '运行',
Copy: '复制节点',
'Copy name': '复制名称',
Delete: '删除',
'Please enter keyword': '请输入关键词',
'File Upload': '文件上传',
'Drag the file into the current upload window': '请将文件拖拽到当前上传窗口内!',
'Drag area upload': '拖动区域上传',
Upload: '上传',
'Please enter file name': '请输入文件名',
'Please select the file to upload': '请选择要上传的文件',
'Resources manage': '资源中心',
Security: '安全中心',
Logout: '退出',
'No data': '查询无数据',
'Uploading...': '文件上传中',
'Loading...': '正在努力加载中...',
List: '列表',
'Unable to download without proper url': '无下载url无法下载',
Process: '工作流',
'Process definition': '工作流定义',
'Task record': '任务记录',
'Warning group manage': '告警组管理',
'Servers manage': '服务管理',
'UDF manage': 'UDF管理',
'Resource manage': '资源管理',
'Function manage': '函数管理',
'Edit password': '修改密码',
'Ordinary users': '普通用户',
'Create process': '创建工作流',
'Import process': '导入工作流',
'Timing state': '定时状态',
Timing: '定时',
TreeView: '树形图',
'Mailbox already exists! Recipients and copyers cannot repeat': '邮箱已存在!收件人和抄送人不能重复',
'Mailbox input is illegal': '邮箱输入不合法',
'Please set the parameters before starting': '启动前请先设置参数',
Continue: '继续',
End: '结束',
'Node execution': '节点执行',
'Backward execution': '向后执行',
'Forward execution': '向前执行',
'Execute only the current node': '仅执行当前节点',
'Notification strategy': '通知策略',
'Notification group': '通知组',
'Please select a notification group': '请选择通知组',
Recipient: '收件人',
Cc: '抄送人',
'Whether it is a complement process?': '是否补数',
'Schedule date': '调度日期',
'Mode of execution': '执行方式',
'Serial execution': '串行执行',
'Parallel execution': '并行执行',
'Set parameters before timing': '定时前请先设置参数',
'Start and stop time': '起止时间',
'Please select time': '请选择时间',
'Please enter crontab': '请输入crontab',
none_1: '都不发',
success_1: '成功发',
failure_1: '失败发',
All_1: '成功或失败都发',
Toolbar: '工具栏',
'View variables': '查看变量',
'Format DAG': '格式化DAG',
'Refresh DAG status': '刷新DAG状态',
Return_1: '返回上一节点',
'Please enter format': '请输入格式为',
'connection parameter': '连接参数',
'Process definition details': '流程定义详情',
'Create process definition': '创建流程定义',
'Scheduled task list': '定时任务列表',
'Process instance details': '流程实例详情',
'Create Resource': '创建资源',
'User Center': '用户中心',
'Please enter method': '请输入方法',
none: '无',
name: '名称',
'Process priority': '流程优先级',
'Task priority': '任务优先级',
'Task timeout alarm': '任务超时告警',
'Timeout strategy': '超时策略',
'Timeout alarm': '超时告警',
'Timeout failure': '超时失败',
'Timeout period': '超时时长',
'Timeout strategy must be selected': '超时策略必须选一个',
'Timeout must be a positive integer': '超时时长必须为正整数',
'Add dependency': '添加依赖',
and: '且',
or: '或',
month: '月',
week: '周',
day: '日',
hour: '时',
Running: '正在运行',
'Waiting for dependency to complete': '等待依赖完成',
Selected: '已选',
CurrentHour: '当前小时',
Last1Hour: '前1小时',
Last2Hours: '前2小时',
Last3Hours: '前3小时',
Last24Hours: '前24小时',
today: '今天',
Last1Days: '昨天',
Last2Days: '前两天',
Last3Days: '前三天',
Last7Days: '前七天',
ThisWeek: '本周',
LastWeek: '上周',
LastMonday: '上周一',
LastTuesday: '上周二',
LastWednesday: '上周三',
LastThursday: '上周四',
LastFriday: '上周五',
LastSaturday: '上周六',
LastSunday: '上周日',
ThisMonth: '本月',
LastMonth: '上月',
LastMonthBegin: '上月初',
LastMonthEnd: '上月末',
'Refresh status succeeded': '刷新状态成功',
'Queue manage': '队列管理',
'Create queue': '创建队列',
'Edit queue': '编辑队列',
'Datasource manage': '数据源中心',
'History task record': '历史任务记录',
'Please go online': '不要忘记上线',
'Queue value': '队列值',
'Please enter queue value': '请输入队列值',
'Worker group manage': 'Worker分组管理',
'Create worker group': '创建Worker分组',
'Edit worker group': '编辑Worker分组',
'Token manage': '令牌管理',
'Create token': '创建令牌',
'Edit token': '编辑令牌',
'Please enter the IP address separated by commas': '请输入IP地址多个用英文逗号隔开',
'Note: Multiple IP addresses have been comma separated': '注意:多个IP地址以英文逗号分割',
'Failure time': '失效时间',
User: '用户',
'Please enter token': '请输入令牌',
'Generate token': '生成令牌',
Monitor: '监控中心',
Group: '分组',
'Queue statistics': '队列统计',
'Command status statistics': '命令状态统计',
'Task kill': '等待kill任务',
'Task queue': '等待执行任务',
'Error command count': '错误指令数',
'Normal command count': '正确指令数',
Manage: '管理',
'Number of connections': '连接数',
Sent: '发送量',
Received: '接收量',
'Min latency': '最低延时',
'Avg latency': '平均延时',
'Max latency': '最大延时',
'Node count': '节点数',
'Query time': '当前查询时间',
'Node self-test status': '节点自检状态',
'Health status': '健康状态',
'Max connections': '最大连接数',
'Threads connections': '当前连接数',
'Max used connections': '同时使用连接最大数',
'Threads running connections': '数据库当前活跃连接数',
'Worker group': 'Worker分组',
'Please enter a positive integer greater than 0': '请输入大于 0 的正整数',
'Pre Statement': '前置sql',
'Post Statement': '后置sql',
'Statement cannot be empty': '语句不能为空',
'Process Define Count': '工作流定义数',
'Process Instance Running Count': '正在运行的流程数',
'Please select a queue': '默认为租户关联队列',
'command number of waiting for running': '待执行的命令数',
'failure command number': '执行失败的命令数',
'tasks number of waiting running': '待运行任务数',
'task number of ready to kill': '待杀死任务数',
'Statistics manage': '统计管理',
statistics: '统计',
'select tenant': '选择租户',
'Please enter Principal': '请输入Principal',
'The start time must not be the same as the end': '开始时间和结束时间不能相同',
'Startup parameter': '启动参数',
'Startup type': '启动类型',
'warning of timeout': '超时告警',
'Next five execution times': '接下来五次执行时间',
'Execute time': '执行时间',
'Complement range': '补数范围',
slot: 'slot数量',
taskManager: 'taskManage数量',
jobManagerMemory: 'jobManager内存数',
taskManagerMemory: 'taskManager内存数',
'Http Url': '请求地址',
'Http Method': '请求类型',
'Http Parameters': '请求参数',
'Http Parameters Key': '参数名',
'Http Parameters Position': '参数位置',
'Http Parameters Value': '参数值',
'Http Check Condition': '校验条件',
'Http Condition': '校验内容',
'Please Enter Http Url': '请填写请求地址(必填)',
'Please Enter Http Condition': '请填写校验内容',
'There is no data for this period of time': '该时间段无数据',
'IP address cannot be empty': 'IP地址不能为空',
'Please enter the correct IP': '请输入正确的IP',
'Please generate token': '请生成Token',
'Spark Version': 'Spark版本',
TargetDataBase: '目标库',
TargetTable: '目标表',
'Please enter the table of target': '请输入目标表名',
'Please enter a Target Table(required)': '请输入目标表(必填)',
SpeedByte: '限流(字节数)',
SpeedRecord: '限流(记录数)',
'0 means unlimited by byte': 'KB,0代表不限制',
'0 means unlimited by count': '0代表不限制',
'Modify User': '修改用户',
'Whether directory': '是否文件夹',
Yes: '是',
No: '否',
'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)',
'Please enter Mysql Table(required)': '请输入Mysql表名(必填)',
'Please enter Columns (Comma separated)': '请输入列名,用 , 隔开',
'Please enter Target Dir(required)': '请输入目标路径(必填)',
'Please enter Export Dir(required)': '请输入数据源路径(必填)',
'Please enter Hive Database(required)': '请输入Hive数据库(必填)',
'Please enter Hive Table(required)': '请输入Hive表名(必填)',
'Please enter Hive Partition Keys': '请输入分区键',
'Please enter Hive Partition Values': '请输入分区值',
'Please enter Replace Delimiter': '请输入替换分隔符',
'Please enter Fields Terminated': '请输入列分隔符',
'Please enter Lines Terminated': '请输入行分隔符',
'Please enter Concurrency': '请输入并发度',
'Please enter Update Key': '请输入更新列',
Direct: '流向',
Type: '类型',
ModelType: '模式',
ColumnType: '列类型',
Database: '数据库',
Column: '列',
'Map Column Hive': 'Hive类型映射',
'Map Column Java': 'Java类型映射',
'Export Dir': '数据源路径',
'Hive partition Keys': 'Hive 分区键',
'Hive partition Values': 'Hive 分区值',
FieldsTerminated: '列分隔符',
LinesTerminated: '行分隔符',
IsUpdate: '是否更新',
UpdateKey: '更新列',
UpdateMode: '更新类型',
'Target Dir': '目标路径',
DeleteTargetDir: '是否删除目录',
FileType: '保存格式',
CompressionCodec: '压缩类型',
CreateHiveTable: '是否创建新表',
DropDelimiter: '是否删除分隔符',
OverWriteSrc: '是否覆盖数据源',
ReplaceDelimiter: '替换分隔符',
Concurrency: '并发度',
Form: '表单',
OnlyUpdate: '只更新',
AllowInsert: '无更新便插入',
'Data Source': '数据来源',
'Data Target': '数据目的',
'All Columns': '全表导入',
'Some Columns': '选择列',
'Branch flow': '分支流转',
'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点',
'Successful branch flow and failed branch flow are required': 'conditions节点成功和失败分支流转必填',
'Unauthorized or deleted resources': '未授权或已删除资源',
'Please delete all non-existent resources': '请删除所有未授权或已删除资源',
'The Worker group no longer exists, please select the correct Worker group!': '该Worker分组已经不存在,请选择正确的Worker分组!'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,499 | [BUG]The task is in progress, the master and worker hang up , start the master and worker again, the process instance and task are not fault-tolerant | 1.The task is in progress
2. Worker hangs first, then hangs master
3.start the master and worker again, the process instance and task are not fault-tolerant,Status has been in progress
![image](https://user-images.githubusercontent.com/55787491/80054593-3e76b180-8552-11ea-9779-b243169b6e44.png)
![image](https://user-images.githubusercontent.com/55787491/80054556-299a1e00-8552-11ea-998c-9696719264a4.png)
**Which version of Dolphin Scheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/2499 | https://github.com/apache/dolphinscheduler/pull/2833 | f25069bf839cb76c6dd3bcd58887040f8317d7f6 | d67436ffad7afe4985d843bc63cab260667ed7d7 | "2020-04-23T03:05:33Z" | java | "2020-05-28T04:31:12Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.processor;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.remote.NettyRemotingClient;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.config.NettyClientConfig;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import static org.apache.dolphinscheduler.common.Constants.SLEEP_TIME_MILLIS;
/**
* taks callback service
*/
@Service
public class TaskCallbackService {
private final Logger logger = LoggerFactory.getLogger(TaskCallbackService.class);
/**
* remote channels
*/
private static final ConcurrentHashMap<Integer, NettyRemoteChannel> REMOTE_CHANNELS = new ConcurrentHashMap<>();
/**
* zookeeper register center
*/
@Autowired
private ZookeeperRegistryCenter zookeeperRegistryCenter;
/**
* netty remoting client
*/
private final NettyRemotingClient nettyRemotingClient;
public TaskCallbackService(){
final NettyClientConfig clientConfig = new NettyClientConfig();
this.nettyRemotingClient = new NettyRemotingClient(clientConfig);
}
/**
* add callback channel
* @param taskInstanceId taskInstanceId
* @param channel channel
*/
public void addRemoteChannel(int taskInstanceId, NettyRemoteChannel channel){
REMOTE_CHANNELS.put(taskInstanceId, channel);
}
/**
* get callback channel
* @param taskInstanceId taskInstanceId
* @return callback channel
*/
private NettyRemoteChannel getRemoteChannel(int taskInstanceId){
NettyRemoteChannel nettyRemoteChannel = REMOTE_CHANNELS.get(taskInstanceId);
if(nettyRemoteChannel == null){
throw new IllegalArgumentException("nettyRemoteChannel is empty, should call addRemoteChannel first");
}
if(nettyRemoteChannel.isActive()){
return nettyRemoteChannel;
}
Channel newChannel = nettyRemotingClient.getChannel(nettyRemoteChannel.getHost());
if(newChannel != null){
return getRemoteChannel(newChannel, nettyRemoteChannel.getOpaque(), taskInstanceId);
}
logger.warn("original master : {} is not reachable, random select master", nettyRemoteChannel.getHost());
Set<String> masterNodes = null;
while (Stopper.isRunning()) {
masterNodes = zookeeperRegistryCenter.getMasterNodesDirectly();
if (CollectionUtils.isEmpty(masterNodes)) {
logger.error("no available master node");
ThreadUtils.sleep(SLEEP_TIME_MILLIS);
}else {
break;
}
}
for(String masterNode : masterNodes){
newChannel = nettyRemotingClient.getChannel(Host.of(masterNode));
if(newChannel != null){
return getRemoteChannel(newChannel, nettyRemoteChannel.getOpaque(), taskInstanceId);
}
}
throw new IllegalStateException(String.format("all available master nodes : %s are not reachable", masterNodes));
}
private NettyRemoteChannel getRemoteChannel(Channel newChannel, long opaque, int taskInstanceId){
NettyRemoteChannel remoteChannel = new NettyRemoteChannel(newChannel, opaque);
addRemoteChannel(taskInstanceId, remoteChannel);
return remoteChannel;
}
/**
* remove callback channels
* @param taskInstanceId taskInstanceId
*/
public void remove(int taskInstanceId){
REMOTE_CHANNELS.remove(taskInstanceId);
}
/**
* send ack
* @param taskInstanceId taskInstanceId
* @param command command
*/
public void sendAck(int taskInstanceId, Command command){
NettyRemoteChannel nettyRemoteChannel = getRemoteChannel(taskInstanceId);
nettyRemoteChannel.writeAndFlush(command);
}
/**
* send result
*
* @param taskInstanceId taskInstanceId
* @param command command
*/
public void sendResult(int taskInstanceId, Command command){
NettyRemoteChannel nettyRemoteChannel = getRemoteChannel(taskInstanceId);
nettyRemoteChannel.writeAndFlush(command).addListener(new ChannelFutureListener(){
@Override
public void operationComplete(ChannelFuture future) throws Exception {
if(future.isSuccess()){
remove(taskInstanceId);
return;
}
}
});
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,499 | [BUG]The task is in progress, the master and worker hang up , start the master and worker again, the process instance and task are not fault-tolerant | 1.The task is in progress
2. Worker hangs first, then hangs master
3.start the master and worker again, the process instance and task are not fault-tolerant,Status has been in progress
![image](https://user-images.githubusercontent.com/55787491/80054593-3e76b180-8552-11ea-9779-b243169b6e44.png)
![image](https://user-images.githubusercontent.com/55787491/80054556-299a1e00-8552-11ea-998c-9696719264a4.png)
**Which version of Dolphin Scheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/2499 | https://github.com/apache/dolphinscheduler/pull/2833 | f25069bf839cb76c6dd3bcd58887040f8317d7f6 | d67436ffad7afe4985d843bc63cab260667ed7d7 | "2020-04-23T03:05:33Z" | java | "2020-05-28T04:31:12Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/zk/ZKMasterClient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.zk;
import org.apache.commons.lang.StringUtils;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.recipes.cache.TreeCacheEvent;
import org.apache.curator.framework.recipes.locks.InterProcessMutex;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.ZKNodeType;
import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.server.builder.TaskExecutionContextBuilder;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.utils.ProcessUtils;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.zk.AbstractZKClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.Date;
import java.util.List;
import static org.apache.dolphinscheduler.common.Constants.*;
/**
* zookeeper master client
*
* single instance
*/
@Component
public class ZKMasterClient extends AbstractZKClient {
/**
* logger
*/
private static final Logger logger = LoggerFactory.getLogger(ZKMasterClient.class);
/**
* process service
*/
@Autowired
private ProcessService processService;
public void start() {
InterProcessMutex mutex = null;
try {
// create distributed lock with the root node path of the lock space as /dolphinscheduler/lock/failover/master
String znodeLock = getMasterStartUpLockPath();
mutex = new InterProcessMutex(getZkClient(), znodeLock);
mutex.acquire();
// init system znode
this.initSystemZNode();
while (!checkZKNodeExists(OSUtils.getHost(), ZKNodeType.MASTER)){
ThreadUtils.sleep(SLEEP_TIME_MILLIS);
}
// self tolerant
if (getActiveMasterNum() == 1) {
failoverWorker(null, true);
failoverMaster(null);
}
}catch (Exception e){
logger.error("master start up exception",e);
}finally {
releaseMutex(mutex);
}
}
@Override
public void close(){
super.close();
}
/**
* handle path events that this class cares about
* @param client zkClient
* @param event path event
* @param path zk path
*/
@Override
protected void dataChanged(CuratorFramework client, TreeCacheEvent event, String path) {
//monitor master
if(path.startsWith(getZNodeParentPath(ZKNodeType.MASTER)+Constants.SINGLE_SLASH)){
handleMasterEvent(event,path);
}else if(path.startsWith(getZNodeParentPath(ZKNodeType.WORKER)+Constants.SINGLE_SLASH)){
//monitor worker
handleWorkerEvent(event,path);
}
}
/**
* remove zookeeper node path
*
* @param path zookeeper node path
* @param zkNodeType zookeeper node type
* @param failover is failover
*/
private void removeZKNodePath(String path, ZKNodeType zkNodeType, boolean failover) {
logger.info("{} node deleted : {}", zkNodeType.toString(), path);
InterProcessMutex mutex = null;
try {
String failoverPath = getFailoverLockPath(zkNodeType);
// create a distributed lock
mutex = new InterProcessMutex(getZkClient(), failoverPath);
mutex.acquire();
String serverHost = getHostByEventDataPath(path);
// handle dead server
handleDeadServer(path, zkNodeType, Constants.ADD_ZK_OP);
//failover server
if(failover){
failoverServerWhenDown(serverHost, zkNodeType);
}
}catch (Exception e){
logger.error("{} server failover failed.", zkNodeType.toString());
logger.error("failover exception ",e);
}
finally {
releaseMutex(mutex);
}
}
/**
* failover server when server down
*
* @param serverHost server host
* @param zkNodeType zookeeper node type
* @throws Exception exception
*/
private void failoverServerWhenDown(String serverHost, ZKNodeType zkNodeType) throws Exception {
if(StringUtils.isEmpty(serverHost) || serverHost.startsWith(OSUtils.getHost())){
return ;
}
switch (zkNodeType){
case MASTER:
failoverMaster(serverHost);
break;
case WORKER:
failoverWorker(serverHost, true);
default:
break;
}
}
/**
* get failover lock path
*
* @param zkNodeType zookeeper node type
* @return fail over lock path
*/
private String getFailoverLockPath(ZKNodeType zkNodeType){
switch (zkNodeType){
case MASTER:
return getMasterFailoverLockPath();
case WORKER:
return getWorkerFailoverLockPath();
default:
return "";
}
}
/**
* monitor master
* @param event event
* @param path path
*/
public void handleMasterEvent(TreeCacheEvent event, String path){
switch (event.getType()) {
case NODE_ADDED:
logger.info("master node added : {}", path);
break;
case NODE_REMOVED:
removeZKNodePath(path, ZKNodeType.MASTER, true);
break;
default:
break;
}
}
/**
* monitor worker
* @param event event
* @param path path
*/
public void handleWorkerEvent(TreeCacheEvent event, String path){
switch (event.getType()) {
case NODE_ADDED:
logger.info("worker node added : {}", path);
break;
case NODE_REMOVED:
logger.info("worker node deleted : {}", path);
removeZKNodePath(path, ZKNodeType.WORKER, true);
break;
default:
break;
}
}
/**
* task needs failover if task start before worker starts
*
* @param taskInstance task instance
* @return true if task instance need fail over
*/
private boolean checkTaskInstanceNeedFailover(TaskInstance taskInstance) throws Exception {
boolean taskNeedFailover = true;
//now no host will execute this task instance,so no need to failover the task
if(taskInstance.getHost() == null){
return false;
}
// if the worker node exists in zookeeper, we must check the task starts after the worker
if(checkZKNodeExists(taskInstance.getHost(), ZKNodeType.WORKER)){
//if task start after worker starts, there is no need to failover the task.
if(checkTaskAfterWorkerStart(taskInstance)){
taskNeedFailover = false;
}
}
return taskNeedFailover;
}
/**
* check task start after the worker server starts.
*
* @param taskInstance task instance
* @return true if task instance start time after worker server start date
*/
private boolean checkTaskAfterWorkerStart(TaskInstance taskInstance) {
if(StringUtils.isEmpty(taskInstance.getHost())){
return false;
}
Date workerServerStartDate = null;
List<Server> workerServers = getServersList(ZKNodeType.WORKER);
for(Server workerServer : workerServers){
if(workerServer.getHost().equals(taskInstance.getHost())){
workerServerStartDate = workerServer.getCreateTime();
break;
}
}
if(workerServerStartDate != null){
return taskInstance.getStartTime().after(workerServerStartDate);
}else{
return false;
}
}
/**
* failover worker tasks
*
* 1. kill yarn job if there are yarn jobs in tasks.
* 2. change task state from running to need failover.
* 3. failover all tasks when workerHost is null
* @param workerHost worker host
*/
/**
* failover worker tasks
*
* 1. kill yarn job if there are yarn jobs in tasks.
* 2. change task state from running to need failover.
* 3. failover all tasks when workerHost is null
* @param workerHost worker host
* @param needCheckWorkerAlive need check worker alive
* @throws Exception exception
*/
private void failoverWorker(String workerHost, boolean needCheckWorkerAlive) throws Exception {
logger.info("start worker[{}] failover ...", workerHost);
List<TaskInstance> needFailoverTaskInstanceList = processService.queryNeedFailoverTaskInstances(workerHost);
for(TaskInstance taskInstance : needFailoverTaskInstanceList){
if(needCheckWorkerAlive){
if(!checkTaskInstanceNeedFailover(taskInstance)){
continue;
}
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(taskInstance.getProcessInstanceId());
if(processInstance != null){
taskInstance.setProcessInstance(processInstance);
}
TaskExecutionContext taskExecutionContext = TaskExecutionContextBuilder.get()
.buildTaskInstanceRelatedInfo(taskInstance)
.buildProcessInstanceRelatedInfo(processInstance)
.create();
// only kill yarn job if exists , the local thread has exited
ProcessUtils.killYarnJob(taskExecutionContext);
taskInstance.setState(ExecutionStatus.NEED_FAULT_TOLERANCE);
processService.saveTaskInstance(taskInstance);
}
logger.info("end worker[{}] failover ...", workerHost);
}
/**
* failover master tasks
*
* @param masterHost master host
*/
private void failoverMaster(String masterHost) {
logger.info("start master failover ...");
List<ProcessInstance> needFailoverProcessInstanceList = processService.queryNeedFailoverProcessInstances(masterHost);
//updateProcessInstance host is null and insert into command
for(ProcessInstance processInstance : needFailoverProcessInstanceList){
if(Constants.NULL.equals(processInstance.getHost()) ){
continue;
}
processService.processNeedFailoverProcessInstances(processInstance);
}
logger.info("master failover end");
}
public InterProcessMutex blockAcquireMutex() throws Exception {
InterProcessMutex mutex = new InterProcessMutex(getZkClient(), getMasterLockPath());
mutex.acquire();
return mutex;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,499 | [BUG]The task is in progress, the master and worker hang up , start the master and worker again, the process instance and task are not fault-tolerant | 1.The task is in progress
2. Worker hangs first, then hangs master
3.start the master and worker again, the process instance and task are not fault-tolerant,Status has been in progress
![image](https://user-images.githubusercontent.com/55787491/80054593-3e76b180-8552-11ea-9779-b243169b6e44.png)
![image](https://user-images.githubusercontent.com/55787491/80054556-299a1e00-8552-11ea-998c-9696719264a4.png)
**Which version of Dolphin Scheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/2499 | https://github.com/apache/dolphinscheduler/pull/2833 | f25069bf839cb76c6dd3bcd58887040f8317d7f6 | d67436ffad7afe4985d843bc63cab260667ed7d7 | "2020-04-23T03:05:33Z" | java | "2020-05-28T04:31:12Z" | dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/AbstractZKClient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.zk;
import org.apache.curator.framework.recipes.locks.InterProcessMutex;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ZKNodeType;
import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.common.utils.ResInfo;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import java.util.*;
import static org.apache.dolphinscheduler.common.Constants.*;
/**
* abstract zookeeper client
*/
@Component
public abstract class AbstractZKClient extends ZookeeperCachedOperator {
private static final Logger logger = LoggerFactory.getLogger(AbstractZKClient.class);
/**
* remove dead server by host
* @param host host
* @param serverType serverType
* @throws Exception
*/
public void removeDeadServerByHost(String host, String serverType) throws Exception {
List<String> deadServers = super.getChildrenKeys(getDeadZNodeParentPath());
for(String serverPath : deadServers){
if(serverPath.startsWith(serverType+UNDERLINE+host)){
String server = getDeadZNodeParentPath() + SINGLE_SLASH + serverPath;
super.remove(server);
logger.info("{} server {} deleted from zk dead server path success" , serverType , host);
}
}
}
/**
* opType(add): if find dead server , then add to zk deadServerPath
* opType(delete): delete path from zk
*
* @param zNode node path
* @param zkNodeType master or worker
* @param opType delete or add
* @throws Exception errors
*/
public void handleDeadServer(String zNode, ZKNodeType zkNodeType, String opType) throws Exception {
String host = getHostByEventDataPath(zNode);
String type = (zkNodeType == ZKNodeType.MASTER) ? MASTER_PREFIX : WORKER_PREFIX;
//check server restart, if restart , dead server path in zk should be delete
if(opType.equals(DELETE_ZK_OP)){
removeDeadServerByHost(host, type);
}else if(opType.equals(ADD_ZK_OP)){
String deadServerPath = getDeadZNodeParentPath() + SINGLE_SLASH + type + UNDERLINE + host;
if(!super.isExisted(deadServerPath)){
//add dead server info to zk dead server path : /dead-servers/
super.persist(deadServerPath,(type + UNDERLINE + host));
logger.info("{} server dead , and {} added to zk dead server path success" ,
zkNodeType.toString(), zNode);
}
}
}
/**
* get active master num
* @return active master number
*/
public int getActiveMasterNum(){
List<String> childrenList = new ArrayList<>();
try {
// read master node parent path from conf
if(super.isExisted(getZNodeParentPath(ZKNodeType.MASTER))){
childrenList = super.getChildrenKeys(getZNodeParentPath(ZKNodeType.MASTER));
}
} catch (Exception e) {
logger.error("getActiveMasterNum error",e);
}
return childrenList.size();
}
/**
*
* @return zookeeper quorum
*/
public String getZookeeperQuorum(){
return getZookeeperConfig().getServerList();
}
/**
* get server list.
* @param zkNodeType zookeeper node type
* @return server list
*/
public List<Server> getServersList(ZKNodeType zkNodeType){
Map<String, String> masterMap = getServerMaps(zkNodeType);
String parentPath = getZNodeParentPath(zkNodeType);
List<Server> masterServers = new ArrayList<>();
for (Map.Entry<String, String> entry : masterMap.entrySet()) {
Server masterServer = ResInfo.parseHeartbeatForZKInfo(entry.getValue());
if(masterServer == null){
continue;
}
String key = entry.getKey();
masterServer.setZkDirectory(parentPath + "/"+ key);
//set host and port
String[] hostAndPort=key.split(COLON);
String[] hosts=hostAndPort[0].split(DIVISION_STRING);
// fetch the last one
masterServer.setHost(hosts[hosts.length-1]);
masterServer.setPort(Integer.parseInt(hostAndPort[1]));
masterServers.add(masterServer);
}
return masterServers;
}
/**
* get master server list map.
* @param zkNodeType zookeeper node type
* @return result : {host : resource info}
*/
public Map<String, String> getServerMaps(ZKNodeType zkNodeType){
Map<String, String> masterMap = new HashMap<>();
try {
String path = getZNodeParentPath(zkNodeType);
List<String> serverList = super.getChildrenKeys(path);
if(zkNodeType == ZKNodeType.WORKER){
List<String> workerList = new ArrayList<>();
for(String group : serverList){
List<String> groupServers = super.getChildrenKeys(path + Constants.SLASH + group);
for(String groupServer : groupServers){
workerList.add(group + Constants.SLASH + groupServer);
}
}
serverList = workerList;
}
for(String server : serverList){
masterMap.putIfAbsent(server, super.get(path + Constants.SLASH + server));
}
} catch (Exception e) {
logger.error("get server list failed", e);
}
return masterMap;
}
/**
* check the zookeeper node already exists
* @param host host
* @param zkNodeType zookeeper node type
* @return true if exists
*/
public boolean checkZKNodeExists(String host, ZKNodeType zkNodeType) {
String path = getZNodeParentPath(zkNodeType);
if(StringUtils.isEmpty(path)){
logger.error("check zk node exists error, host:{}, zk node type:{}",
host, zkNodeType.toString());
return false;
}
Map<String, String> serverMaps = getServerMaps(zkNodeType);
for(String hostKey : serverMaps.keySet()){
if(hostKey.startsWith(host)){
return true;
}
}
return false;
}
/**
*
* @return get worker node parent path
*/
protected String getWorkerZNodeParentPath(){
return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_WORKERS;
}
/**
*
* @return get master node parent path
*/
protected String getMasterZNodeParentPath(){
return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_MASTERS;
}
/**
*
* @return get master lock path
*/
public String getMasterLockPath(){
return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_LOCK_MASTERS;
}
/**
*
* @param zkNodeType zookeeper node type
* @return get zookeeper node parent path
*/
public String getZNodeParentPath(ZKNodeType zkNodeType) {
String path = "";
switch (zkNodeType){
case MASTER:
return getMasterZNodeParentPath();
case WORKER:
return getWorkerZNodeParentPath();
case DEAD_SERVER:
return getDeadZNodeParentPath();
default:
break;
}
return path;
}
/**
*
* @return get dead server node parent path
*/
protected String getDeadZNodeParentPath(){
return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_DEAD_SERVERS;
}
/**
*
* @return get master start up lock path
*/
public String getMasterStartUpLockPath(){
return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS;
}
/**
*
* @return get master failover lock path
*/
public String getMasterFailoverLockPath(){
return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_MASTERS;
}
/**
*
* @return get worker failover lock path
*/
public String getWorkerFailoverLockPath(){
return getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_WORKERS;
}
/**
* release mutex
* @param mutex mutex
*/
public void releaseMutex(InterProcessMutex mutex) {
if (mutex != null){
try {
mutex.release();
} catch (Exception e) {
if(e.getMessage().equals("instance must be started before calling this method")){
logger.warn("lock release");
}else{
logger.error("lock release failed",e);
}
}
}
}
/**
* init system znode
*/
protected void initSystemZNode(){
try {
persist(getMasterZNodeParentPath(), "");
persist(getWorkerZNodeParentPath(), "");
persist(getDeadZNodeParentPath(), "");
logger.info("initialize server nodes success.");
} catch (Exception e) {
logger.error("init system znode failed",e);
}
}
/**
* get host ip, string format: masterParentPath/ip
* @param path path
* @return host ip, string format: masterParentPath/ip
*/
protected String getHostByEventDataPath(String path) {
if(StringUtils.isEmpty(path)){
logger.error("empty path!");
return "";
}
String[] pathArray = path.split(SINGLE_SLASH);
if(pathArray.length < 1){
logger.error("parse ip error: {}", path);
return "";
}
return pathArray[pathArray.length - 1];
}
@Override
public String toString() {
return "AbstractZKClient{" +
"zkClient=" + zkClient +
", deadServerZNodeParentPath='" + getZNodeParentPath(ZKNodeType.DEAD_SERVER) + '\'' +
", masterZNodeParentPath='" + getZNodeParentPath(ZKNodeType.MASTER) + '\'' +
", workerZNodeParentPath='" + getZNodeParentPath(ZKNodeType.WORKER) + '\'' +
'}';
}
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,619 | [BUG] /dolphinscheduler/projects/create doesn't return project ID when create successfuly |
**Describe the bug**
V1.2.0, when invoke /dolphinscheduler/projects/create, it doesn't return project ID when create successfuly just return
{
"code": 0,
"msg": "success",
"data": null
}
**Expected behavior**
it should return project ID, otherwise, how can I build relationship between dolphin and my system.
**Which version of Dolphin Scheduler:**
-[1.2.0]
**Additional context**
Add any other context about the problem here.
**Requirement or improvement
- Please fix this or tell me how to get project ID for newly created project.
| https://github.com/apache/dolphinscheduler/issues/2619 | https://github.com/apache/dolphinscheduler/pull/2804 | e89f543f04872b2b7d8d3c5cd94e68cd4ddaab7e | fbb8ff438ad0c0db1bfc78d33bbc73b2c7d40683 | "2020-05-07T08:17:58Z" | java | "2020-05-30T04:45:39Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProjectService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.ProjectUser;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectUserMapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.*;
import static org.apache.dolphinscheduler.api.utils.CheckUtils.checkDesc;
/**
* project service
*HttpTask./
**/
@Service
public class ProjectService extends BaseService{
private static final Logger logger = LoggerFactory.getLogger(ProjectService.class);
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectUserMapper projectUserMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
/**
* create project
*
* @param loginUser login user
* @param name project name
* @param desc description
* @return returns an error if it exists
*/
public Map<String, Object> createProject(User loginUser, String name, String desc) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> descCheck = checkDesc(desc);
if (descCheck.get(Constants.STATUS) != Status.SUCCESS) {
return descCheck;
}
Project project = projectMapper.queryByName(name);
if (project != null) {
putMsg(result, Status.PROJECT_ALREADY_EXISTS, name);
return result;
}
project = new Project();
Date now = new Date();
project.setName(name);
project.setDescription(desc);
project.setUserId(loginUser.getId());
project.setUserName(loginUser.getUserName());
project.setCreateTime(now);
project.setUpdateTime(now);
if (projectMapper.insert(project) > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.CREATE_PROJECT_ERROR);
}
return result;
}
/**
* query project details by id
*
* @param projectId project id
* @return project detail information
*/
public Map<String, Object> queryById(Integer projectId) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.selectById(projectId);
if (project != null) {
result.put(Constants.DATA_LIST, project);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.PROJECT_NOT_FOUNT, projectId);
}
return result;
}
/**
* check project and authorization
*
* @param loginUser login user
* @param project project
* @param projectName project name
* @return true if the login user have permission to see the project
*/
public Map<String, Object> checkProjectAndAuth(User loginUser, Project project, String projectName) {
Map<String, Object> result = new HashMap<>(5);
if (project == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
} else if (!checkReadPermission(loginUser, project)) {
// check read permission
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), projectName);
}else {
putMsg(result, Status.SUCCESS);
}
return result;
}
public boolean hasProjectAndPerm(User loginUser, Project project, Map<String, Object> result) {
boolean checkResult = false;
if (project == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, "");
} else if (!checkReadPermission(loginUser, project)) {
putMsg(result, Status.USER_NO_OPERATION_PROJECT_PERM, loginUser.getUserName(), project.getName());
} else {
checkResult = true;
}
return checkResult;
}
/**
* admin can view all projects
*
* @param loginUser login user
* @param searchVal search value
* @param pageSize page size
* @param pageNo page number
* @return project list which the login user have permission to see
*/
public Map<String, Object> queryProjectListPaging(User loginUser, Integer pageSize, Integer pageNo, String searchVal) {
Map<String, Object> result = new HashMap<>();
PageInfo pageInfo = new PageInfo<Project>(pageNo, pageSize);
Page<Project> page = new Page(pageNo, pageSize);
int userId = loginUser.getUserType() == UserType.ADMIN_USER ? 0 : loginUser.getId();
IPage<Project> projectIPage = projectMapper.queryProjectListPaging(page, userId, searchVal);
List<Project> projectList = projectIPage.getRecords();
if(userId != 0){
for (Project project : projectList) {
project.setPerm(org.apache.dolphinscheduler.common.Constants.DEFAULT_ADMIN_PERMISSION);
}
}
pageInfo.setTotalCount((int)projectIPage.getTotal());
pageInfo.setLists(projectList);
result.put(Constants.COUNT, (int)projectIPage.getTotal());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete project by id
*
* @param loginUser login user
* @param projectId project id
* @return delete result code
*/
public Map<String, Object> deleteProject(User loginUser, Integer projectId) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.selectById(projectId);
Map<String, Object> checkResult = getCheckResult(loginUser, project);
if (checkResult != null) {
return checkResult;
}
if (!hasPerm(loginUser, project.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryAllDefinitionList(projectId);
if(processDefinitionList.size() > 0){
putMsg(result, Status.DELETE_PROJECT_ERROR_DEFINES_NOT_NULL);
return result;
}
int delete = projectMapper.deleteById(projectId);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_PROJECT_ERROR);
}
return result;
}
/**
* get check result
*
* @param loginUser login user
* @param project project
* @return check result
*/
private Map<String, Object> getCheckResult(User loginUser, Project project) {
String projectName = project == null ? null:project.getName();
Map<String, Object> checkResult = checkProjectAndAuth(loginUser, project, projectName);
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
return null;
}
/**
* updateProcessInstance project
*
* @param loginUser login user
* @param projectId project id
* @param projectName project name
* @param desc description
* @return update result code
*/
public Map<String, Object> update(User loginUser, Integer projectId, String projectName, String desc) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> descCheck = checkDesc(desc);
if (descCheck.get(Constants.STATUS) != Status.SUCCESS) {
return descCheck;
}
Project project = projectMapper.selectById(projectId);
boolean hasProjectAndPerm = hasProjectAndPerm(loginUser, project, result);
if (!hasProjectAndPerm) {
return result;
}
Project tempProject = projectMapper.queryByName(projectName);
if (tempProject != null && tempProject.getId() != projectId) {
putMsg(result, Status.PROJECT_ALREADY_EXISTS, projectName);
return result;
}
project.setName(projectName);
project.setDescription(desc);
project.setUpdateTime(new Date());
int update = projectMapper.updateById(project);
if (update > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.UPDATE_PROJECT_ERROR);
}
return result;
}
/**
* query unauthorized project
*
* @param loginUser login user
* @param userId user id
* @return the projects which user have not permission to see
*/
public Map<String, Object> queryUnauthorizedProject(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>(5);
if (checkAdmin(loginUser, result)) {
return result;
}
/**
* query all project list except specified userId
*/
List<Project> projectList = projectMapper.queryProjectExceptUserId(userId);
List<Project> resultList = new ArrayList<>();
Set<Project> projectSet = null;
if (projectList != null && projectList.size() > 0) {
projectSet = new HashSet<>(projectList);
List<Project> authedProjectList = projectMapper.queryAuthedProjectListByUserId(userId);
resultList = getUnauthorizedProjects(projectSet, authedProjectList);
}
result.put(Constants.DATA_LIST, resultList);
putMsg(result,Status.SUCCESS);
return result;
}
/**
* get unauthorized project
*
* @param projectSet project set
* @param authedProjectList authed project list
* @return project list that authorization
*/
private List<Project> getUnauthorizedProjects(Set<Project> projectSet, List<Project> authedProjectList) {
List<Project> resultList;
Set<Project> authedProjectSet = null;
if (authedProjectList != null && authedProjectList.size() > 0) {
authedProjectSet = new HashSet<>(authedProjectList);
projectSet.removeAll(authedProjectSet);
}
resultList = new ArrayList<>(projectSet);
return resultList;
}
/**
* query authorized project
*
* @param loginUser login user
* @param userId user id
* @return projects which the user have permission to see, Except for items created by this user
*/
public Map<String, Object> queryAuthorizedProject(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (checkAdmin(loginUser, result)) {
return result;
}
List<Project> projects = projectMapper.queryAuthedProjectListByUserId(userId);
result.put(Constants.DATA_LIST, projects);
putMsg(result,Status.SUCCESS);
return result;
}
/**
* check whether have read permission
*
* @param user user
* @param project project
* @return true if the user have permission to see the project, otherwise return false
*/
private boolean checkReadPermission(User user, Project project) {
int permissionId = queryPermission(user, project);
return (permissionId & Constants.READ_PERMISSION) != 0;
}
/**
* query permission id
*
* @param user user
* @param project project
* @return permission
*/
private int queryPermission(User user, Project project) {
if (user.getUserType() == UserType.ADMIN_USER) {
return Constants.READ_PERMISSION;
}
if (project.getUserId() == user.getId()) {
return Constants.ALL_PERMISSIONS;
}
ProjectUser projectUser = projectUserMapper.queryProjectRelation(project.getId(), user.getId());
if (projectUser == null) {
return 0;
}
return projectUser.getPerm();
}
/**
* query all project list that have one or more process definitions.
* @return project list
*/
public Map<String, Object> queryAllProjectList() {
Map<String, Object> result = new HashMap<>();
List<Project> projects = projectMapper.selectList(null);
List<ProcessDefinition> processDefinitions = processDefinitionMapper.selectList(null);
if(projects != null){
Set set = new HashSet<>();
for (ProcessDefinition processDefinition : processDefinitions){
set.add(processDefinition.getProjectId());
}
List<Project> tempDeletelist = new ArrayList<Project>();
for (Project project : projects) {
if(!set.contains(project.getId())){
tempDeletelist.add(project);
}
}
projects.removeAll(tempDeletelist);
}
result.put(Constants.DATA_LIST, projects);
putMsg(result,Status.SUCCESS);
return result;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,619 | [BUG] /dolphinscheduler/projects/create doesn't return project ID when create successfuly |
**Describe the bug**
V1.2.0, when invoke /dolphinscheduler/projects/create, it doesn't return project ID when create successfuly just return
{
"code": 0,
"msg": "success",
"data": null
}
**Expected behavior**
it should return project ID, otherwise, how can I build relationship between dolphin and my system.
**Which version of Dolphin Scheduler:**
-[1.2.0]
**Additional context**
Add any other context about the problem here.
**Requirement or improvement
- Please fix this or tell me how to get project ID for newly created project.
| https://github.com/apache/dolphinscheduler/issues/2619 | https://github.com/apache/dolphinscheduler/pull/2804 | e89f543f04872b2b7d8d3c5cd94e68cd4ddaab7e | fbb8ff438ad0c0db1bfc78d33bbc73b2c7d40683 | "2020-05-07T08:17:58Z" | java | "2020-05-30T04:45:39Z" | dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProjectControllerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.MediaType;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* project controller
*/
public class ProjectControllerTest extends AbstractControllerTest{
private static Logger logger = LoggerFactory.getLogger(ProjectControllerTest.class);
@Test
public void testCreateProject() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("projectName","project_test1");
paramsMap.add("desc","the test project");
MvcResult mvcResult = mockMvc.perform(post("/projects/create")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isCreated())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testUpdateProject() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("projectId","18");
paramsMap.add("projectName","project_test_update");
paramsMap.add("desc","the test project update");
MvcResult mvcResult = mockMvc.perform(post("/projects/update")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testQueryProjectById() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("projectId","18");
MvcResult mvcResult = mockMvc.perform(get("/projects/query-by-id")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testQueryProjectListPaging() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("searchVal","test");
paramsMap.add("pageSize","2");
paramsMap.add("pageNo","2");
MvcResult mvcResult = mockMvc.perform(get("/projects/list-paging")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testQueryUnauthorizedProject() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("userId","2");
MvcResult mvcResult = mockMvc.perform(get("/projects/unauth-project")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testQueryAuthorizedProject() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("userId","2");
MvcResult mvcResult = mockMvc.perform(get("/projects/authed-project")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testQueryAllProjectList() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
MvcResult mvcResult = mockMvc.perform(get("/projects/query-project-list")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Ignore
@Test
public void testImportProcessDefinition() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("file","test");
MvcResult mvcResult = mockMvc.perform(post("/projects/import-definition")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.TEXT_PLAIN))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.IMPORT_PROCESS_DEFINE_ERROR.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
@Test
public void testDeleteProject() throws Exception {
MultiValueMap<String, String> paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("projectId","18");
MvcResult mvcResult = mockMvc.perform(get("/projects/delete")
.header(SESSION_ID, sessionId)
.params(paramsMap))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8))
.andReturn();
Result result = JSONUtils.parseObject(mvcResult.getResponse().getContentAsString(), Result.class);
Assert.assertEquals(Status.SUCCESS.getCode(),result.getCode().intValue());
logger.info(mvcResult.getResponse().getContentAsString());
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,638 | Improve the security of datasource management | **Is your feature request related to a problem? Please describe.**
Company database account management is relatively strict. We found that when the data source management is updated, the original password is actually brought out.
(公司数据库账号管理比较严格,我们发现目前数据源管理更新得时候实际上是把原始密码给带出来了。)
**Describe the solution you'd like**
The datasources / update-ui interface should not take out the password. The datasources / updateDataSource interface does not update this field without passing the password. You can call the query one more time when the server is updated.
(datasources/update-ui 接口不要将密码带出去,datasources/updateDataSource 接口不传 密码就不更新该字段,服务端更新时可以先多调用一次查询。)
| https://github.com/apache/dolphinscheduler/issues/2638 | https://github.com/apache/dolphinscheduler/pull/2844 | fbb8ff438ad0c0db1bfc78d33bbc73b2c7d40683 | 1c153454423f4b967f3fcd8fb906a4626799872f | "2020-05-08T08:42:20Z" | java | "2020-05-30T07:03:10Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.TypeReference;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbConnectType;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.datasource.*;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.*;
import static org.apache.dolphinscheduler.common.utils.PropertyUtils.getString;
/**
* datasource service
*/
@Service
public class DataSourceService extends BaseService{
private static final Logger logger = LoggerFactory.getLogger(DataSourceService.class);
public static final String NAME = "name";
public static final String NOTE = "note";
public static final String TYPE = "type";
public static final String HOST = "host";
public static final String PORT = "port";
public static final String PRINCIPAL = "principal";
public static final String DATABASE = "database";
public static final String USER_NAME = "userName";
public static final String PASSWORD = Constants.PASSWORD;
public static final String OTHER = "other";
@Autowired
private DataSourceMapper dataSourceMapper;
@Autowired
private DataSourceUserMapper datasourceUserMapper;
/**
* create data source
*
* @param loginUser login user
* @param name data source name
* @param desc data source description
* @param type data source type
* @param parameter datasource parameters
* @return create result code
*/
public Map<String, Object> createDataSource(User loginUser, String name, String desc, DbType type, String parameter) {
Map<String, Object> result = new HashMap<>(5);
// check name can use or not
if (checkName(name)) {
putMsg(result, Status.DATASOURCE_EXIST);
return result;
}
Boolean isConnection = checkConnection(type, parameter);
if (!isConnection) {
logger.info("connect failed, type:{}, parameter:{}", type, parameter);
putMsg(result, Status.DATASOURCE_CONNECT_FAILED);
return result;
}
BaseDataSource datasource = DataSourceFactory.getDatasource(type, parameter);
if (datasource == null) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, parameter);
return result;
}
// build datasource
DataSource dataSource = new DataSource();
Date now = new Date();
dataSource.setName(name.trim());
dataSource.setNote(desc);
dataSource.setUserId(loginUser.getId());
dataSource.setUserName(loginUser.getUserName());
dataSource.setType(type);
dataSource.setConnectionParams(parameter);
dataSource.setCreateTime(now);
dataSource.setUpdateTime(now);
dataSourceMapper.insert(dataSource);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* updateProcessInstance datasource
*
* @param loginUser login user
* @param name data source name
* @param desc data source description
* @param type data source type
* @param parameter datasource parameters
* @param id data source id
* @return update result code
*/
public Map<String, Object> updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter) {
Map<String, Object> result = new HashMap<>();
// determine whether the data source exists
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if(!hasPerm(loginUser, dataSource.getUserId())){
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
//check name can use or not
if(!name.trim().equals(dataSource.getName()) && checkName(name)){
putMsg(result, Status.DATASOURCE_EXIST);
return result;
}
Boolean isConnection = checkConnection(type, parameter);
if (!isConnection) {
logger.info("connect failed, type:{}, parameter:{}", type, parameter);
putMsg(result, Status.DATASOURCE_CONNECT_FAILED);
return result;
}
Date now = new Date();
dataSource.setName(name.trim());
dataSource.setNote(desc);
dataSource.setUserName(loginUser.getUserName());
dataSource.setType(type);
dataSource.setConnectionParams(parameter);
dataSource.setUpdateTime(now);
dataSourceMapper.updateById(dataSource);
putMsg(result, Status.SUCCESS);
return result;
}
private boolean checkName(String name) {
List<DataSource> queryDataSource = dataSourceMapper.queryDataSourceByName(name.trim());
if (queryDataSource != null && queryDataSource.size() > 0) {
return true;
}
return false;
}
/**
* updateProcessInstance datasource
* @param id datasource id
* @return data source detail
*/
public Map<String, Object> queryDataSource(int id) {
Map<String, Object> result = new HashMap<String, Object>(5);
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
// type
String dataSourceType = dataSource.getType().toString();
// name
String dataSourceName = dataSource.getName();
// desc
String desc = dataSource.getNote();
// parameter
String parameter = dataSource.getConnectionParams();
BaseDataSource datasourceForm = DataSourceFactory.getDatasource(dataSource.getType(), parameter);
String database = datasourceForm.getDatabase();
// jdbc connection params
String other = datasourceForm.getOther();
String address = datasourceForm.getAddress();
String[] hostsPorts = getHostsAndPort(address);
// ip host
String host = hostsPorts[0];
// prot
String port = hostsPorts[1];
String separator = "";
switch (dataSource.getType()) {
case HIVE:
case SQLSERVER:
separator = ";";
break;
case MYSQL:
case POSTGRESQL:
case CLICKHOUSE:
case ORACLE:
separator = "&";
break;
default:
separator = "&";
break;
}
Map<String, String> otherMap = new LinkedHashMap<String, String>();
if (other != null) {
String[] configs = other.split(separator);
for (String config : configs) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
}
Map<String, Object> map = new HashMap<>(10);
map.put(NAME, dataSourceName);
map.put(NOTE, desc);
map.put(TYPE, dataSourceType);
map.put(HOST, host);
map.put(PORT, port);
map.put(PRINCIPAL, datasourceForm.getPrincipal());
map.put(DATABASE, database);
map.put(USER_NAME, datasourceForm.getUser());
map.put(PASSWORD, datasourceForm.getPassword());
map.put(OTHER, otherMap);
result.put(Constants.DATA_LIST, map);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query datasource list by keyword
*
* @param loginUser login user
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return data source list page
*/
public Map<String, Object> queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>();
IPage<DataSource> dataSourceList = null;
Page<DataSource> dataSourcePage = new Page(pageNo, pageSize);
if (isAdmin(loginUser)) {
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal);
}else{
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal);
}
List<DataSource> dataSources = dataSourceList.getRecords();
handlePasswd(dataSources);
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize);
pageInfo.setTotalCount((int)(dataSourceList.getTotal()));
pageInfo.setLists(dataSources);
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* handle datasource connection password for safety
* @param dataSourceList
*/
private void handlePasswd(List<DataSource> dataSourceList) {
for (DataSource dataSource : dataSourceList) {
String connectionParams = dataSource.getConnectionParams();
JSONObject object = JSON.parseObject(connectionParams);
object.put(Constants.PASSWORD, Constants.XXXXXX);
dataSource.setConnectionParams(JSONUtils.toJson(object));
}
}
/**
* query data resource list
*
* @param loginUser login user
* @param type data source type
* @return data source list page
*/
public Map<String, Object> queryDataSourceList(User loginUser, Integer type) {
Map<String, Object> result = new HashMap<>(5);
List<DataSource> datasourceList;
if (isAdmin(loginUser)) {
datasourceList = dataSourceMapper.listAllDataSourceByType(type);
}else{
datasourceList = dataSourceMapper.queryDataSourceByType(loginUser.getId(), type);
}
result.put(Constants.DATA_LIST, datasourceList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify datasource exists
*
* @param loginUser login user
* @param name datasource name
* @return true if data datasource not exists, otherwise return false
*/
public Result verifyDataSourceName(User loginUser, String name) {
Result result = new Result();
List<DataSource> dataSourceList = dataSourceMapper.queryDataSourceByName(name);
if (dataSourceList != null && dataSourceList.size() > 0) {
logger.error("datasource name:{} has exist, can't create again.", name);
putMsg(result, Status.DATASOURCE_EXIST);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* get connection
*
* @param dbType datasource type
* @param parameter parameter
* @return connection for datasource
*/
private Connection getConnection(DbType dbType, String parameter) {
Connection connection = null;
BaseDataSource datasource = null;
try {
switch (dbType) {
case POSTGRESQL:
datasource = JSON.parseObject(parameter, PostgreDataSource.class);
Class.forName(Constants.ORG_POSTGRESQL_DRIVER);
break;
case MYSQL:
datasource = JSON.parseObject(parameter, MySQLDataSource.class);
Class.forName(Constants.COM_MYSQL_JDBC_DRIVER);
break;
case HIVE:
case SPARK:
if (CommonUtils.getKerberosStartupState()) {
System.setProperty(org.apache.dolphinscheduler.common.Constants.JAVA_SECURITY_KRB5_CONF,
getString(org.apache.dolphinscheduler.common.Constants.JAVA_SECURITY_KRB5_CONF_PATH));
Configuration configuration = new Configuration();
configuration.set(org.apache.dolphinscheduler.common.Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_USERNAME),
getString(org.apache.dolphinscheduler.common.Constants.LOGIN_USER_KEY_TAB_PATH));
}
if (dbType == DbType.HIVE){
datasource = JSON.parseObject(parameter, HiveDataSource.class);
}else if (dbType == DbType.SPARK){
datasource = JSON.parseObject(parameter, SparkDataSource.class);
}
Class.forName(Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER);
break;
case CLICKHOUSE:
datasource = JSON.parseObject(parameter, ClickHouseDataSource.class);
Class.forName(Constants.COM_CLICKHOUSE_JDBC_DRIVER);
break;
case ORACLE:
datasource = JSON.parseObject(parameter, OracleDataSource.class);
Class.forName(Constants.COM_ORACLE_JDBC_DRIVER);
break;
case SQLSERVER:
datasource = JSON.parseObject(parameter, SQLServerDataSource.class);
Class.forName(Constants.COM_SQLSERVER_JDBC_DRIVER);
break;
case DB2:
datasource = JSON.parseObject(parameter, DB2ServerDataSource.class);
Class.forName(Constants.COM_DB2_JDBC_DRIVER);
break;
default:
break;
}
if(datasource != null){
connection = DriverManager.getConnection(datasource.getJdbcUrl(), datasource.getUser(), datasource.getPassword());
}
} catch (Exception e) {
logger.error(e.getMessage(),e);
}
return connection;
}
/**
* check connection
*
* @param type data source type
* @param parameter data source parameters
* @return true if connect successfully, otherwise false
*/
public boolean checkConnection(DbType type, String parameter) {
Boolean isConnection = false;
Connection con = getConnection(type, parameter);
if (con != null) {
isConnection = true;
try {
con.close();
} catch (SQLException e) {
logger.error("close connection fail at DataSourceService::checkConnection()", e);
}
}
return isConnection;
}
/**
* test connection
*
* @param loginUser login user
* @param id datasource id
* @return connect result code
*/
public boolean connectionTest(User loginUser, int id) {
DataSource dataSource = dataSourceMapper.selectById(id);
return checkConnection(dataSource.getType(), dataSource.getConnectionParams());
}
/**
* build paramters
*
* @param name data source name
* @param desc data source description
* @param type data source type
* @param host data source host
* @param port data source port
* @param database data source database name
* @param userName user name
* @param password password
* @param other other parameters
* @param principal principal
* @return datasource parameter
*/
public String buildParameter(String name, String desc, DbType type, String host,
String port, String database, String principal, String userName,
String password, DbConnectType connectType, String other) {
String address = buildAddress(type, host, port, connectType);
String jdbcUrl;
if (Constants.ORACLE.equals(type.name())
&& connectType == DbConnectType.ORACLE_SID) {
jdbcUrl = address + ":" + database;
} else {
jdbcUrl = address + "/" + database;
}
if (CommonUtils.getKerberosStartupState() &&
(type == DbType.HIVE || type == DbType.SPARK)){
jdbcUrl += ";principal=" + principal;
}
String separator = "";
if (Constants.MYSQL.equals(type.name())
|| Constants.POSTGRESQL.equals(type.name())
|| Constants.CLICKHOUSE.equals(type.name())
|| Constants.ORACLE.equals(type.name())) {
separator = "&";
} else if (Constants.HIVE.equals(type.name())
|| Constants.SPARK.equals(type.name())
|| Constants.DB2.equals(type.name())
|| Constants.SQLSERVER.equals(type.name())) {
separator = ";";
}
Map<String, Object> parameterMap = new LinkedHashMap<String, Object>(6);
parameterMap.put(Constants.ADDRESS, address);
parameterMap.put(Constants.DATABASE, database);
parameterMap.put(Constants.JDBC_URL, jdbcUrl);
parameterMap.put(Constants.USER, userName);
parameterMap.put(Constants.PASSWORD, password);
if (CommonUtils.getKerberosStartupState() &&
(type == DbType.HIVE || type == DbType.SPARK)){
parameterMap.put(Constants.PRINCIPAL,principal);
}
if (other != null && !"".equals(other)) {
LinkedHashMap<String, String> map = JSON.parseObject(other, new TypeReference<LinkedHashMap<String, String>>() {
});
if (map.size() > 0) {
StringBuilder otherSb = new StringBuilder();
for (Map.Entry<String, String> entry: map.entrySet()) {
otherSb.append(String.format("%s=%s%s", entry.getKey(), entry.getValue(), separator));
}
if (!Constants.DB2.equals(type.name())) {
otherSb.deleteCharAt(otherSb.length() - 1);
}
parameterMap.put(Constants.OTHER, otherSb);
}
}
if(logger.isDebugEnabled()){
logger.info("parameters map-----" + JSON.toJSONString(parameterMap));
}
return JSON.toJSONString(parameterMap);
}
private String buildAddress(DbType type, String host, String port, DbConnectType connectType) {
StringBuilder sb = new StringBuilder();
if (Constants.MYSQL.equals(type.name())) {
sb.append(Constants.JDBC_MYSQL);
sb.append(host).append(":").append(port);
} else if (Constants.POSTGRESQL.equals(type.name())) {
sb.append(Constants.JDBC_POSTGRESQL);
sb.append(host).append(":").append(port);
} else if (Constants.HIVE.equals(type.name()) || Constants.SPARK.equals(type.name())) {
sb.append(Constants.JDBC_HIVE_2);
String[] hostArray = host.split(",");
if (hostArray.length > 0) {
for (String zkHost : hostArray) {
sb.append(String.format("%s:%s,", zkHost, port));
}
sb.deleteCharAt(sb.length() - 1);
}
} else if (Constants.CLICKHOUSE.equals(type.name())) {
sb.append(Constants.JDBC_CLICKHOUSE);
sb.append(host).append(":").append(port);
} else if (Constants.ORACLE.equals(type.name())) {
if (connectType == DbConnectType.ORACLE_SID) {
sb.append(Constants.JDBC_ORACLE_SID);
} else {
sb.append(Constants.JDBC_ORACLE_SERVICE_NAME);
}
sb.append(host).append(":").append(port);
} else if (Constants.SQLSERVER.equals(type.name())) {
sb.append(Constants.JDBC_SQLSERVER);
sb.append(host).append(":").append(port);
}else if (Constants.DB2.equals(type.name())) {
sb.append(Constants.JDBC_DB2);
sb.append(host).append(":").append(port);
}
return sb.toString();
}
/**
* delete datasource
*
* @param loginUser login user
* @param datasourceId data source id
* @return delete result code
*/
@Transactional(rollbackFor = Exception.class)
public Result delete(User loginUser, int datasourceId) {
Result result = new Result();
try {
//query datasource by id
DataSource dataSource = dataSourceMapper.selectById(datasourceId);
if(dataSource == null){
logger.error("resource id {} not exist", datasourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if(!hasPerm(loginUser, dataSource.getUserId())){
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
dataSourceMapper.deleteById(datasourceId);
datasourceUserMapper.deleteByDatasourceId(datasourceId);
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
logger.error("delete datasource error",e);
throw new RuntimeException("delete datasource error");
}
return result;
}
/**
* unauthorized datasource
*
* @param loginUser login user
* @param userId user id
* @return unauthed data source result code
*/
public Map<String, Object> unauthDatasource(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
//only admin operate
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
/**
* query all data sources except userId
*/
List<DataSource> resultList = new ArrayList<>();
List<DataSource> datasourceList = dataSourceMapper.queryDatasourceExceptUserId(userId);
Set<DataSource> datasourceSet = null;
if (datasourceList != null && datasourceList.size() > 0) {
datasourceSet = new HashSet<>(datasourceList);
List<DataSource> authedDataSourceList = dataSourceMapper.queryAuthedDatasource(userId);
Set<DataSource> authedDataSourceSet = null;
if (authedDataSourceList != null && authedDataSourceList.size() > 0) {
authedDataSourceSet = new HashSet<>(authedDataSourceList);
datasourceSet.removeAll(authedDataSourceSet);
}
resultList = new ArrayList<>(datasourceSet);
}
result.put(Constants.DATA_LIST, resultList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* authorized datasource
*
* @param loginUser login user
* @param userId user id
* @return authorized result code
*/
public Map<String, Object> authedDatasource(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>(5);
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
List<DataSource> authedDatasourceList = dataSourceMapper.queryAuthedDatasource(userId);
result.put(Constants.DATA_LIST, authedDatasourceList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get host and port by address
*
* @param address
* @return sting array: [host,port]
*/
private String[] getHostsAndPort(String address) {
String[] result = new String[2];
String[] tmpArray = address.split(Constants.DOUBLE_SLASH);
String hostsAndPorts = tmpArray[tmpArray.length - 1];
StringBuilder hosts = new StringBuilder();
String[] hostPortArray = hostsAndPorts.split(Constants.COMMA);
String port = hostPortArray[0].split(Constants.COLON)[1];
for (String hostPort : hostPortArray) {
hosts.append(hostPort.split(Constants.COLON)[0]).append(Constants.COMMA);
}
hosts.deleteCharAt(hosts.length() - 1);
result[0] = hosts.toString();
result[1] = port;
return result;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,860 | [BUG] When a user has a resource directory tree, update the user's tenant to report an error |
![image](https://user-images.githubusercontent.com/55787491/83390197-03ef1700-a424-11ea-8894-674e758cd92d.png)
![image](https://user-images.githubusercontent.com/55787491/83390230-10736f80-a424-11ea-8fd7-c7b9acf27a20.png)
**Which version of Dolphin Scheduler:**
-[dev_1.3.0]
**Additional context**
Add any other context about the problem here.
**Requirement or improvement
- Please describe about your requirements or improvement suggestions.
| https://github.com/apache/dolphinscheduler/issues/2860 | https://github.com/apache/dolphinscheduler/pull/2876 | 6723454df9c7d85d3f034d2c13a080db4c130fc1 | 31dca43cdbaec861b2bfc5dc7407f50424216e3c | "2020-06-01T08:22:32Z" | java | "2020-06-02T10:33:06Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent;
import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.*;
import java.util.stream.Collectors;
/**
* user service
*/
@Service
public class UsersService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(UsersService.class);
@Autowired
private UserMapper userMapper;
@Autowired
private TenantMapper tenantMapper;
@Autowired
private ProjectUserMapper projectUserMapper;
@Autowired
private ResourceUserMapper resourcesUserMapper;
@Autowired
private ResourceMapper resourceMapper;
@Autowired
private DataSourceUserMapper datasourceUserMapper;
@Autowired
private UDFUserMapper udfUserMapper;
@Autowired
private AlertGroupMapper alertGroupMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
/**
* create user, only system admin have permission
*
* @param loginUser login user
* @param userName user name
* @param userPassword user password
* @param email email
* @param tenantId tenant id
* @param phone phone
* @param queue queue
* @return create result code
* @throws Exception exception
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> createUser(User loginUser,
String userName,
String userPassword,
String email,
int tenantId,
String phone,
String queue) throws Exception {
Map<String, Object> result = new HashMap<>(5);
//check all user params
String msg = this.checkUserParams(userName, userPassword, email, phone);
if (!StringUtils.isEmpty(msg)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR,msg);
return result;
}
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
if (!checkTenantExists(tenantId)) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
User user = createUser(userName, userPassword, email, tenantId, phone, queue);
Tenant tenant = tenantMapper.queryById(tenantId);
// resource upload startup
if (PropertyUtils.getResUploadStartupState()){
// if tenant not exists
if (!HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(tenant.getTenantCode()))){
createTenantDirIfNotExists(tenant.getTenantCode());
}
String userPath = HadoopUtils.getHdfsUserDir(tenant.getTenantCode(),user.getId());
HadoopUtils.getInstance().mkdir(userPath);
}
putMsg(result, Status.SUCCESS);
return result;
}
@Transactional(rollbackFor = Exception.class)
public User createUser(String userName,
String userPassword,
String email,
int tenantId,
String phone,
String queue) throws Exception {
User user = new User();
Date now = new Date();
user.setUserName(userName);
user.setUserPassword(EncryptionUtils.getMd5(userPassword));
user.setEmail(email);
user.setTenantId(tenantId);
user.setPhone(phone);
// create general users, administrator users are currently built-in
user.setUserType(UserType.GENERAL_USER);
user.setCreateTime(now);
user.setUpdateTime(now);
if (StringUtils.isEmpty(queue)){
queue = "";
}
user.setQueue(queue);
// save user
userMapper.insert(user);
return user;
}
/**
* query user by id
* @param id id
* @return user info
*/
public User queryUser(int id) {
return userMapper.selectById(id);
}
/**
* query user
* @param name name
* @return user info
*/
public User queryUser(String name) {
return userMapper.queryByUserNameAccurately(name);
}
/**
* query user
*
* @param name name
* @param password password
* @return user info
*/
public User queryUser(String name, String password) {
String md5 = EncryptionUtils.getMd5(password);
return userMapper.queryUserByNamePassword(name, md5);
}
/**
* get user id by user name
* @param name user name
* @return if name empty 0, user not exists -1, user exist user id
*/
public int getUserIdByName(String name) {
//executor name query
int executorId = 0;
if (StringUtils.isNotEmpty(name)) {
User executor = queryUser(name);
if (null != executor) {
executorId = executor.getId();
} else {
executorId = -1;
}
}
return executorId;
}
/**
* query user list
*
* @param loginUser login user
* @param pageNo page number
* @param searchVal search avlue
* @param pageSize page size
* @return user list page
*/
public Map<String, Object> queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
Page<User> page = new Page(pageNo, pageSize);
IPage<User> scheduleList = userMapper.queryUserPaging(page, searchVal);
PageInfo<User> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount((int)scheduleList.getTotal());
pageInfo.setLists(scheduleList.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* updateProcessInstance user
*
* @param userId user id
* @param userName user name
* @param userPassword user password
* @param email email
* @param tenantId tennat id
* @param phone phone
* @param queue queue
* @return update result code
* @throws Exception exception
*/
public Map<String, Object> updateUser(int userId,
String userName,
String userPassword,
String email,
int tenantId,
String phone,
String queue) throws Exception {
Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false);
User user = userMapper.selectById(userId);
if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
if (StringUtils.isNotEmpty(userName)) {
if (!CheckUtils.checkUserName(userName)){
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR,userName);
return result;
}
User tempUser = userMapper.queryByUserNameAccurately(userName);
if (tempUser != null && tempUser.getId() != userId) {
putMsg(result, Status.USER_NAME_EXIST);
return result;
}
user.setUserName(userName);
}
if (StringUtils.isNotEmpty(userPassword)) {
if (!CheckUtils.checkPassword(userPassword)){
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR,userPassword);
return result;
}
user.setUserPassword(EncryptionUtils.getMd5(userPassword));
}
if (StringUtils.isNotEmpty(email)) {
if (!CheckUtils.checkEmail(email)){
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR,email);
return result;
}
user.setEmail(email);
}
if (StringUtils.isNotEmpty(phone)) {
if (!CheckUtils.checkPhone(phone)){
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR,phone);
return result;
}
user.setPhone(phone);
}
user.setQueue(queue);
Date now = new Date();
user.setUpdateTime(now);
//if user switches the tenant, the user's resources need to be copied to the new tenant
if (user.getTenantId() != tenantId) {
Tenant oldTenant = tenantMapper.queryById(user.getTenantId());
//query tenant
Tenant newTenant = tenantMapper.queryById(tenantId);
if (newTenant != null) {
// if hdfs startup
if (PropertyUtils.getResUploadStartupState() && oldTenant != null){
String newTenantCode = newTenant.getTenantCode();
String oldResourcePath = HadoopUtils.getHdfsResDir(oldTenant.getTenantCode());
String oldUdfsPath = HadoopUtils.getHdfsUdfDir(oldTenant.getTenantCode());
// if old tenant dir exists
if (HadoopUtils.getInstance().exists(oldResourcePath)){
String newResourcePath = HadoopUtils.getHdfsResDir(newTenantCode);
String newUdfsPath = HadoopUtils.getHdfsUdfDir(newTenantCode);
//file resources list
List<Resource> fileResourcesList = resourceMapper.queryResourceList(
null, userId, ResourceType.FILE.ordinal());
if (CollectionUtils.isNotEmpty(fileResourcesList)) {
ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(fileResourcesList);
ResourceComponent resourceComponent = resourceTreeVisitor.visit();
for (ResourceComponent resource : resourceComponent.getChildren()) {
HadoopUtils.getInstance().copy(oldResourcePath + "/" + resource.getName(), newResourcePath, false, true);
}
}
//udf resources
List<Resource> udfResourceList = resourceMapper.queryResourceList(
null, userId, ResourceType.UDF.ordinal());
if (CollectionUtils.isNotEmpty(udfResourceList)) {
ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(udfResourceList);
ResourceComponent resourceComponent = resourceTreeVisitor.visit();
for (ResourceComponent resource : resourceComponent.getChildren()) {
HadoopUtils.getInstance().copy(oldUdfsPath + "/" + resource.getName(), newUdfsPath, false, true);
}
}
//Delete the user from the old tenant directory
String oldUserPath = HadoopUtils.getHdfsUserDir(oldTenant.getTenantCode(),userId);
HadoopUtils.getInstance().delete(oldUserPath, true);
}else {
// if old tenant dir not exists , create
createTenantDirIfNotExists(oldTenant.getTenantCode());
}
if (HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(newTenant.getTenantCode()))){
//create user in the new tenant directory
String newUserPath = HadoopUtils.getHdfsUserDir(newTenant.getTenantCode(),user.getId());
HadoopUtils.getInstance().mkdir(newUserPath);
}else {
// if new tenant dir not exists , create
createTenantDirIfNotExists(newTenant.getTenantCode());
}
}
}
user.setTenantId(tenantId);
}
// updateProcessInstance user
userMapper.updateById(user);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete user
*
* @param loginUser login user
* @param id user id
* @return delete result code
* @throws Exception exception when operate hdfs
*/
public Map<String, Object> deleteUserById(User loginUser, int id) throws Exception {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM, id);
return result;
}
//check exist
User tempUser = userMapper.selectById(id);
if (tempUser == null) {
putMsg(result, Status.USER_NOT_EXIST, id);
return result;
}
// delete user
User user = userMapper.queryTenantCodeByUserId(id);
if (user != null) {
if (PropertyUtils.getResUploadStartupState()) {
String userPath = HadoopUtils.getHdfsUserDir(user.getTenantCode(),id);
if (HadoopUtils.getInstance().exists(userPath)) {
HadoopUtils.getInstance().delete(userPath, true);
}
}
}
userMapper.deleteById(id);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant project
*
* @param loginUser login user
* @param userId user id
* @param projectIds project id array
* @return grant result code
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> grantProject(User loginUser, int userId, String projectIds) {
Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
//check exist
User tempUser = userMapper.selectById(userId);
if (tempUser == null) {
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
//if the selected projectIds are empty, delete all items associated with the user
projectUserMapper.deleteProjectRelation(0, userId);
if (check(result, StringUtils.isEmpty(projectIds), Status.SUCCESS)) {
return result;
}
String[] projectIdArr = projectIds.split(",");
for (String projectId : projectIdArr) {
Date now = new Date();
ProjectUser projectUser = new ProjectUser();
projectUser.setUserId(userId);
projectUser.setProjectId(Integer.parseInt(projectId));
projectUser.setPerm(7);
projectUser.setCreateTime(now);
projectUser.setUpdateTime(now);
projectUserMapper.insert(projectUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant resource
*
* @param loginUser login user
* @param userId user id
* @param resourceIds resource id array
* @return grant result code
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> grantResources(User loginUser, int userId, String resourceIds) {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
User user = userMapper.selectById(userId);
if(user == null){
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
Set<Integer> needAuthorizeResIds = new HashSet();
if (StringUtils.isNotBlank(resourceIds)) {
String[] resourceFullIdArr = resourceIds.split(",");
// need authorize resource id set
for (String resourceFullId : resourceFullIdArr) {
String[] resourceIdArr = resourceFullId.split("-");
for (int i=0;i<=resourceIdArr.length-1;i++) {
int resourceIdValue = Integer.parseInt(resourceIdArr[i]);
needAuthorizeResIds.add(resourceIdValue);
}
}
}
//get the authorized resource id list by user id
List<Resource> oldAuthorizedRes = resourceMapper.queryAuthorizedResourceList(userId);
//if resource type is UDF,need check whether it is bound by UDF functon
Set<Integer> oldAuthorizedResIds = oldAuthorizedRes.stream().map(t -> t.getId()).collect(Collectors.toSet());
//get the unauthorized resource id list
oldAuthorizedResIds.removeAll(needAuthorizeResIds);
if (CollectionUtils.isNotEmpty(oldAuthorizedResIds)) {
// get all resource id of process definitions those is released
List<Map<String, Object>> list = processDefinitionMapper.listResourcesByUser(userId);
Map<Integer, Set<Integer>> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list);
Set<Integer> resourceIdSet = resourceProcessMap.keySet();
resourceIdSet.retainAll(oldAuthorizedResIds);
if (CollectionUtils.isNotEmpty(resourceIdSet)) {
logger.error("can't be deleted,because it is used of process definition");
for (Integer resId : resourceIdSet) {
logger.error("resource id:{} is used of process definition {}",resId,resourceProcessMap.get(resId));
}
putMsg(result, Status.RESOURCE_IS_USED);
return result;
}
}
resourcesUserMapper.deleteResourceUser(userId, 0);
if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS)) {
return result;
}
for (int resourceIdValue : needAuthorizeResIds) {
Resource resource = resourceMapper.selectById(resourceIdValue);
if (resource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
Date now = new Date();
ResourcesUser resourcesUser = new ResourcesUser();
resourcesUser.setUserId(userId);
resourcesUser.setResourcesId(resourceIdValue);
if (resource.isDirectory()) {
resourcesUser.setPerm(Constants.AUTHORIZE_READABLE_PERM);
}else{
resourcesUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM);
}
resourcesUser.setCreateTime(now);
resourcesUser.setUpdateTime(now);
resourcesUserMapper.insert(resourcesUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant udf function
*
* @param loginUser login user
* @param userId user id
* @param udfIds udf id array
* @return grant result code
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> grantUDFFunction(User loginUser, int userId, String udfIds) {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
User user = userMapper.selectById(userId);
if(user == null){
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
udfUserMapper.deleteByUserId(userId);
if (check(result, StringUtils.isEmpty(udfIds), Status.SUCCESS)) {
return result;
}
String[] resourcesIdArr = udfIds.split(",");
for (String udfId : resourcesIdArr) {
Date now = new Date();
UDFUser udfUser = new UDFUser();
udfUser.setUserId(userId);
udfUser.setUdfId(Integer.parseInt(udfId));
udfUser.setPerm(7);
udfUser.setCreateTime(now);
udfUser.setUpdateTime(now);
udfUserMapper.insert(udfUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant datasource
*
* @param loginUser login user
* @param userId user id
* @param datasourceIds data source id array
* @return grant result code
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> grantDataSource(User loginUser, int userId, String datasourceIds) {
Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
User user = userMapper.selectById(userId);
if(user == null){
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
datasourceUserMapper.deleteByUserId(userId);
if (check(result, StringUtils.isEmpty(datasourceIds), Status.SUCCESS)) {
return result;
}
String[] datasourceIdArr = datasourceIds.split(",");
for (String datasourceId : datasourceIdArr) {
Date now = new Date();
DatasourceUser datasourceUser = new DatasourceUser();
datasourceUser.setUserId(userId);
datasourceUser.setDatasourceId(Integer.parseInt(datasourceId));
datasourceUser.setPerm(7);
datasourceUser.setCreateTime(now);
datasourceUser.setUpdateTime(now);
datasourceUserMapper.insert(datasourceUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query user info
*
* @param loginUser login user
* @return user info
*/
public Map<String, Object> getUserInfo(User loginUser) {
Map<String, Object> result = new HashMap<>();
User user = null;
if (loginUser.getUserType() == UserType.ADMIN_USER) {
user = loginUser;
} else {
user = userMapper.queryDetailsById(loginUser.getId());
List<AlertGroup> alertGroups = alertGroupMapper.queryByUserId(loginUser.getId());
StringBuilder sb = new StringBuilder();
if (alertGroups != null && alertGroups.size() > 0) {
for (int i = 0; i < alertGroups.size() - 1; i++) {
sb.append(alertGroups.get(i).getGroupName() + ",");
}
sb.append(alertGroups.get(alertGroups.size() - 1));
user.setAlertGroup(sb.toString());
}
}
result.put(Constants.DATA_LIST, user);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query user list
*
* @param loginUser login user
* @return user list
*/
public Map<String, Object> queryAllGeneralUsers(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.queryAllGeneralUser();
result.put(Constants.DATA_LIST, userList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query user list
*
* @param loginUser login user
* @return user list
*/
public Map<String, Object> queryUserList(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.selectList(null );
result.put(Constants.DATA_LIST, userList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify user name exists
*
* @param userName user name
* @return true if user name not exists, otherwise return false
*/
public Result verifyUserName(String userName) {
Result result = new Result();
User user = userMapper.queryByUserNameAccurately(userName);
if (user != null) {
logger.error("user {} has exist, can't create again.", userName);
putMsg(result, Status.USER_NAME_EXIST);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* unauthorized user
*
* @param loginUser login user
* @param alertgroupId alert group id
* @return unauthorize result code
*/
public Map<String, Object> unauthorizedUser(User loginUser, Integer alertgroupId) {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.selectList(null );
List<User> resultUsers = new ArrayList<>();
Set<User> userSet = null;
if (userList != null && userList.size() > 0) {
userSet = new HashSet<>(userList);
List<User> authedUserList = userMapper.queryUserListByAlertGroupId(alertgroupId);
Set<User> authedUserSet = null;
if (authedUserList != null && authedUserList.size() > 0) {
authedUserSet = new HashSet<>(authedUserList);
userSet.removeAll(authedUserSet);
}
resultUsers = new ArrayList<>(userSet);
}
result.put(Constants.DATA_LIST, resultUsers);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* authorized user
*
* @param loginUser login user
* @param alertgroupId alert group id
* @return authorized result code
*/
public Map<String, Object> authorizedUser(User loginUser, Integer alertgroupId) {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.queryUserListByAlertGroupId(alertgroupId);
result.put(Constants.DATA_LIST, userList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* check
*
* @param result result
* @param bool bool
* @param userNoOperationPerm status
* @return check result
*/
private boolean check(Map<String, Object> result, boolean bool, Status userNoOperationPerm) {
//only admin can operate
if (bool) {
result.put(Constants.STATUS, userNoOperationPerm);
result.put(Constants.MSG, userNoOperationPerm.getMsg());
return true;
}
return false;
}
/**
* @param tenantId tenant id
* @return true if tenant exists, otherwise return false
*/
private boolean checkTenantExists(int tenantId) {
return tenantMapper.queryById(tenantId) != null ? true : false;
}
/**
*
* @param userName
* @param password
* @param email
* @param phone
* @return if check failed return the field, otherwise return null
*/
private String checkUserParams(String userName, String password, String email, String phone) {
String msg = null;
if (!CheckUtils.checkUserName(userName)) {
msg = userName;
} else if (!CheckUtils.checkPassword(password)) {
msg = password;
} else if (!CheckUtils.checkEmail(email)) {
msg = email;
} else if (!CheckUtils.checkPhone(phone)) {
msg = phone;
}
return msg;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,892 | [BUG] When editing user information, clear the phone number, the database table t_ds_user.phone is not cleared |
![image](https://user-images.githubusercontent.com/55787491/83727354-7f94d200-a677-11ea-9937-70d96123137e.png)
![image](https://user-images.githubusercontent.com/55787491/83727493-ba970580-a677-11ea-8dd4-6e1a40201d05.png)
**Which version of Dolphin Scheduler:**
-[dev-1.3.0]
| https://github.com/apache/dolphinscheduler/issues/2892 | https://github.com/apache/dolphinscheduler/pull/2898 | 4441d91dcf178653e55b94fe564c8c9907e897fa | ae9afcf77a930d10d1d61aa23172b831f1a41cdd | "2020-06-04T07:26:35Z" | java | "2020-06-04T09:26:59Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UsersService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.dolphinscheduler.api.dto.resources.ResourceComponent;
import org.apache.dolphinscheduler.api.dto.resources.visitor.ResourceTreeVisitor;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.dao.utils.ResourceProcessDefinitionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;
/**
* user service
*/
@Service
public class UsersService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(UsersService.class);
@Autowired
private UserMapper userMapper;
@Autowired
private TenantMapper tenantMapper;
@Autowired
private ProjectUserMapper projectUserMapper;
@Autowired
private ResourceUserMapper resourcesUserMapper;
@Autowired
private ResourceMapper resourceMapper;
@Autowired
private DataSourceUserMapper datasourceUserMapper;
@Autowired
private UDFUserMapper udfUserMapper;
@Autowired
private AlertGroupMapper alertGroupMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
/**
* create user, only system admin have permission
*
* @param loginUser login user
* @param userName user name
* @param userPassword user password
* @param email email
* @param tenantId tenant id
* @param phone phone
* @param queue queue
* @return create result code
* @throws Exception exception
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> createUser(User loginUser,
String userName,
String userPassword,
String email,
int tenantId,
String phone,
String queue) throws Exception {
Map<String, Object> result = new HashMap<>(5);
//check all user params
String msg = this.checkUserParams(userName, userPassword, email, phone);
if (!StringUtils.isEmpty(msg)) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR,msg);
return result;
}
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
if (!checkTenantExists(tenantId)) {
putMsg(result, Status.TENANT_NOT_EXIST);
return result;
}
User user = createUser(userName, userPassword, email, tenantId, phone, queue);
Tenant tenant = tenantMapper.queryById(tenantId);
// resource upload startup
if (PropertyUtils.getResUploadStartupState()){
// if tenant not exists
if (!HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(tenant.getTenantCode()))){
createTenantDirIfNotExists(tenant.getTenantCode());
}
String userPath = HadoopUtils.getHdfsUserDir(tenant.getTenantCode(),user.getId());
HadoopUtils.getInstance().mkdir(userPath);
}
putMsg(result, Status.SUCCESS);
return result;
}
@Transactional(rollbackFor = Exception.class)
public User createUser(String userName,
String userPassword,
String email,
int tenantId,
String phone,
String queue) throws Exception {
User user = new User();
Date now = new Date();
user.setUserName(userName);
user.setUserPassword(EncryptionUtils.getMd5(userPassword));
user.setEmail(email);
user.setTenantId(tenantId);
user.setPhone(phone);
// create general users, administrator users are currently built-in
user.setUserType(UserType.GENERAL_USER);
user.setCreateTime(now);
user.setUpdateTime(now);
if (StringUtils.isEmpty(queue)){
queue = "";
}
user.setQueue(queue);
// save user
userMapper.insert(user);
return user;
}
/**
* query user by id
* @param id id
* @return user info
*/
public User queryUser(int id) {
return userMapper.selectById(id);
}
/**
* query user
* @param name name
* @return user info
*/
public User queryUser(String name) {
return userMapper.queryByUserNameAccurately(name);
}
/**
* query user
*
* @param name name
* @param password password
* @return user info
*/
public User queryUser(String name, String password) {
String md5 = EncryptionUtils.getMd5(password);
return userMapper.queryUserByNamePassword(name, md5);
}
/**
* get user id by user name
* @param name user name
* @return if name empty 0, user not exists -1, user exist user id
*/
public int getUserIdByName(String name) {
//executor name query
int executorId = 0;
if (StringUtils.isNotEmpty(name)) {
User executor = queryUser(name);
if (null != executor) {
executorId = executor.getId();
} else {
executorId = -1;
}
}
return executorId;
}
/**
* query user list
*
* @param loginUser login user
* @param pageNo page number
* @param searchVal search avlue
* @param pageSize page size
* @return user list page
*/
public Map<String, Object> queryUserList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>(5);
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
Page<User> page = new Page(pageNo, pageSize);
IPage<User> scheduleList = userMapper.queryUserPaging(page, searchVal);
PageInfo<User> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount((int)scheduleList.getTotal());
pageInfo.setLists(scheduleList.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* updateProcessInstance user
*
* @param userId user id
* @param userName user name
* @param userPassword user password
* @param email email
* @param tenantId tennat id
* @param phone phone
* @param queue queue
* @return update result code
* @throws Exception exception
*/
public Map<String, Object> updateUser(int userId,
String userName,
String userPassword,
String email,
int tenantId,
String phone,
String queue) throws Exception {
Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false);
User user = userMapper.selectById(userId);
if (user == null) {
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
if (StringUtils.isNotEmpty(userName)) {
if (!CheckUtils.checkUserName(userName)){
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR,userName);
return result;
}
User tempUser = userMapper.queryByUserNameAccurately(userName);
if (tempUser != null && tempUser.getId() != userId) {
putMsg(result, Status.USER_NAME_EXIST);
return result;
}
user.setUserName(userName);
}
if (StringUtils.isNotEmpty(userPassword)) {
if (!CheckUtils.checkPassword(userPassword)){
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR,userPassword);
return result;
}
user.setUserPassword(EncryptionUtils.getMd5(userPassword));
}
if (StringUtils.isNotEmpty(email)) {
if (!CheckUtils.checkEmail(email)){
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR,email);
return result;
}
user.setEmail(email);
}
if (StringUtils.isNotEmpty(phone)) {
if (!CheckUtils.checkPhone(phone)){
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR,phone);
return result;
}
user.setPhone(phone);
}
user.setQueue(queue);
Date now = new Date();
user.setUpdateTime(now);
//if user switches the tenant, the user's resources need to be copied to the new tenant
if (user.getTenantId() != tenantId) {
Tenant oldTenant = tenantMapper.queryById(user.getTenantId());
//query tenant
Tenant newTenant = tenantMapper.queryById(tenantId);
if (newTenant != null) {
// if hdfs startup
if (PropertyUtils.getResUploadStartupState() && oldTenant != null){
String newTenantCode = newTenant.getTenantCode();
String oldResourcePath = HadoopUtils.getHdfsResDir(oldTenant.getTenantCode());
String oldUdfsPath = HadoopUtils.getHdfsUdfDir(oldTenant.getTenantCode());
// if old tenant dir exists
if (HadoopUtils.getInstance().exists(oldResourcePath)){
String newResourcePath = HadoopUtils.getHdfsResDir(newTenantCode);
String newUdfsPath = HadoopUtils.getHdfsUdfDir(newTenantCode);
//file resources list
List<Resource> fileResourcesList = resourceMapper.queryResourceList(
null, userId, ResourceType.FILE.ordinal());
if (CollectionUtils.isNotEmpty(fileResourcesList)) {
ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(fileResourcesList);
ResourceComponent resourceComponent = resourceTreeVisitor.visit();
copyResourceFiles(resourceComponent, oldResourcePath, newResourcePath);
}
//udf resources
List<Resource> udfResourceList = resourceMapper.queryResourceList(
null, userId, ResourceType.UDF.ordinal());
if (CollectionUtils.isNotEmpty(udfResourceList)) {
ResourceTreeVisitor resourceTreeVisitor = new ResourceTreeVisitor(udfResourceList);
ResourceComponent resourceComponent = resourceTreeVisitor.visit();
copyResourceFiles(resourceComponent, oldUdfsPath, newUdfsPath);
}
//Delete the user from the old tenant directory
String oldUserPath = HadoopUtils.getHdfsUserDir(oldTenant.getTenantCode(),userId);
HadoopUtils.getInstance().delete(oldUserPath, true);
}else {
// if old tenant dir not exists , create
createTenantDirIfNotExists(oldTenant.getTenantCode());
}
if (HadoopUtils.getInstance().exists(HadoopUtils.getHdfsTenantDir(newTenant.getTenantCode()))){
//create user in the new tenant directory
String newUserPath = HadoopUtils.getHdfsUserDir(newTenant.getTenantCode(),user.getId());
HadoopUtils.getInstance().mkdir(newUserPath);
}else {
// if new tenant dir not exists , create
createTenantDirIfNotExists(newTenant.getTenantCode());
}
}
}
user.setTenantId(tenantId);
}
// updateProcessInstance user
userMapper.updateById(user);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete user
*
* @param loginUser login user
* @param id user id
* @return delete result code
* @throws Exception exception when operate hdfs
*/
public Map<String, Object> deleteUserById(User loginUser, int id) throws Exception {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM, id);
return result;
}
//check exist
User tempUser = userMapper.selectById(id);
if (tempUser == null) {
putMsg(result, Status.USER_NOT_EXIST, id);
return result;
}
// delete user
User user = userMapper.queryTenantCodeByUserId(id);
if (user != null) {
if (PropertyUtils.getResUploadStartupState()) {
String userPath = HadoopUtils.getHdfsUserDir(user.getTenantCode(),id);
if (HadoopUtils.getInstance().exists(userPath)) {
HadoopUtils.getInstance().delete(userPath, true);
}
}
}
userMapper.deleteById(id);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant project
*
* @param loginUser login user
* @param userId user id
* @param projectIds project id array
* @return grant result code
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> grantProject(User loginUser, int userId, String projectIds) {
Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
//check exist
User tempUser = userMapper.selectById(userId);
if (tempUser == null) {
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
//if the selected projectIds are empty, delete all items associated with the user
projectUserMapper.deleteProjectRelation(0, userId);
if (check(result, StringUtils.isEmpty(projectIds), Status.SUCCESS)) {
return result;
}
String[] projectIdArr = projectIds.split(",");
for (String projectId : projectIdArr) {
Date now = new Date();
ProjectUser projectUser = new ProjectUser();
projectUser.setUserId(userId);
projectUser.setProjectId(Integer.parseInt(projectId));
projectUser.setPerm(7);
projectUser.setCreateTime(now);
projectUser.setUpdateTime(now);
projectUserMapper.insert(projectUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant resource
*
* @param loginUser login user
* @param userId user id
* @param resourceIds resource id array
* @return grant result code
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> grantResources(User loginUser, int userId, String resourceIds) {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
User user = userMapper.selectById(userId);
if(user == null){
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
Set<Integer> needAuthorizeResIds = new HashSet();
if (StringUtils.isNotBlank(resourceIds)) {
String[] resourceFullIdArr = resourceIds.split(",");
// need authorize resource id set
for (String resourceFullId : resourceFullIdArr) {
String[] resourceIdArr = resourceFullId.split("-");
for (int i=0;i<=resourceIdArr.length-1;i++) {
int resourceIdValue = Integer.parseInt(resourceIdArr[i]);
needAuthorizeResIds.add(resourceIdValue);
}
}
}
//get the authorized resource id list by user id
List<Resource> oldAuthorizedRes = resourceMapper.queryAuthorizedResourceList(userId);
//if resource type is UDF,need check whether it is bound by UDF functon
Set<Integer> oldAuthorizedResIds = oldAuthorizedRes.stream().map(t -> t.getId()).collect(Collectors.toSet());
//get the unauthorized resource id list
oldAuthorizedResIds.removeAll(needAuthorizeResIds);
if (CollectionUtils.isNotEmpty(oldAuthorizedResIds)) {
// get all resource id of process definitions those is released
List<Map<String, Object>> list = processDefinitionMapper.listResourcesByUser(userId);
Map<Integer, Set<Integer>> resourceProcessMap = ResourceProcessDefinitionUtils.getResourceProcessDefinitionMap(list);
Set<Integer> resourceIdSet = resourceProcessMap.keySet();
resourceIdSet.retainAll(oldAuthorizedResIds);
if (CollectionUtils.isNotEmpty(resourceIdSet)) {
logger.error("can't be deleted,because it is used of process definition");
for (Integer resId : resourceIdSet) {
logger.error("resource id:{} is used of process definition {}",resId,resourceProcessMap.get(resId));
}
putMsg(result, Status.RESOURCE_IS_USED);
return result;
}
}
resourcesUserMapper.deleteResourceUser(userId, 0);
if (check(result, StringUtils.isEmpty(resourceIds), Status.SUCCESS)) {
return result;
}
for (int resourceIdValue : needAuthorizeResIds) {
Resource resource = resourceMapper.selectById(resourceIdValue);
if (resource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
Date now = new Date();
ResourcesUser resourcesUser = new ResourcesUser();
resourcesUser.setUserId(userId);
resourcesUser.setResourcesId(resourceIdValue);
if (resource.isDirectory()) {
resourcesUser.setPerm(Constants.AUTHORIZE_READABLE_PERM);
}else{
resourcesUser.setPerm(Constants.AUTHORIZE_WRITABLE_PERM);
}
resourcesUser.setCreateTime(now);
resourcesUser.setUpdateTime(now);
resourcesUserMapper.insert(resourcesUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant udf function
*
* @param loginUser login user
* @param userId user id
* @param udfIds udf id array
* @return grant result code
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> grantUDFFunction(User loginUser, int userId, String udfIds) {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
User user = userMapper.selectById(userId);
if(user == null){
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
udfUserMapper.deleteByUserId(userId);
if (check(result, StringUtils.isEmpty(udfIds), Status.SUCCESS)) {
return result;
}
String[] resourcesIdArr = udfIds.split(",");
for (String udfId : resourcesIdArr) {
Date now = new Date();
UDFUser udfUser = new UDFUser();
udfUser.setUserId(userId);
udfUser.setUdfId(Integer.parseInt(udfId));
udfUser.setPerm(7);
udfUser.setCreateTime(now);
udfUser.setUpdateTime(now);
udfUserMapper.insert(udfUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* grant datasource
*
* @param loginUser login user
* @param userId user id
* @param datasourceIds data source id array
* @return grant result code
*/
@Transactional(rollbackFor = Exception.class)
public Map<String, Object> grantDataSource(User loginUser, int userId, String datasourceIds) {
Map<String, Object> result = new HashMap<>(5);
result.put(Constants.STATUS, false);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
User user = userMapper.selectById(userId);
if(user == null){
putMsg(result, Status.USER_NOT_EXIST, userId);
return result;
}
datasourceUserMapper.deleteByUserId(userId);
if (check(result, StringUtils.isEmpty(datasourceIds), Status.SUCCESS)) {
return result;
}
String[] datasourceIdArr = datasourceIds.split(",");
for (String datasourceId : datasourceIdArr) {
Date now = new Date();
DatasourceUser datasourceUser = new DatasourceUser();
datasourceUser.setUserId(userId);
datasourceUser.setDatasourceId(Integer.parseInt(datasourceId));
datasourceUser.setPerm(7);
datasourceUser.setCreateTime(now);
datasourceUser.setUpdateTime(now);
datasourceUserMapper.insert(datasourceUser);
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query user info
*
* @param loginUser login user
* @return user info
*/
public Map<String, Object> getUserInfo(User loginUser) {
Map<String, Object> result = new HashMap<>();
User user = null;
if (loginUser.getUserType() == UserType.ADMIN_USER) {
user = loginUser;
} else {
user = userMapper.queryDetailsById(loginUser.getId());
List<AlertGroup> alertGroups = alertGroupMapper.queryByUserId(loginUser.getId());
StringBuilder sb = new StringBuilder();
if (alertGroups != null && alertGroups.size() > 0) {
for (int i = 0; i < alertGroups.size() - 1; i++) {
sb.append(alertGroups.get(i).getGroupName() + ",");
}
sb.append(alertGroups.get(alertGroups.size() - 1));
user.setAlertGroup(sb.toString());
}
}
result.put(Constants.DATA_LIST, user);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query user list
*
* @param loginUser login user
* @return user list
*/
public Map<String, Object> queryAllGeneralUsers(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.queryAllGeneralUser();
result.put(Constants.DATA_LIST, userList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query user list
*
* @param loginUser login user
* @return user list
*/
public Map<String, Object> queryUserList(User loginUser) {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.selectList(null );
result.put(Constants.DATA_LIST, userList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify user name exists
*
* @param userName user name
* @return true if user name not exists, otherwise return false
*/
public Result verifyUserName(String userName) {
Result result = new Result();
User user = userMapper.queryByUserNameAccurately(userName);
if (user != null) {
logger.error("user {} has exist, can't create again.", userName);
putMsg(result, Status.USER_NAME_EXIST);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* unauthorized user
*
* @param loginUser login user
* @param alertgroupId alert group id
* @return unauthorize result code
*/
public Map<String, Object> unauthorizedUser(User loginUser, Integer alertgroupId) {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.selectList(null );
List<User> resultUsers = new ArrayList<>();
Set<User> userSet = null;
if (userList != null && userList.size() > 0) {
userSet = new HashSet<>(userList);
List<User> authedUserList = userMapper.queryUserListByAlertGroupId(alertgroupId);
Set<User> authedUserSet = null;
if (authedUserList != null && authedUserList.size() > 0) {
authedUserSet = new HashSet<>(authedUserList);
userSet.removeAll(authedUserSet);
}
resultUsers = new ArrayList<>(userSet);
}
result.put(Constants.DATA_LIST, resultUsers);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* authorized user
*
* @param loginUser login user
* @param alertgroupId alert group id
* @return authorized result code
*/
public Map<String, Object> authorizedUser(User loginUser, Integer alertgroupId) {
Map<String, Object> result = new HashMap<>(5);
//only admin can operate
if (check(result, !isAdmin(loginUser), Status.USER_NO_OPERATION_PERM)) {
return result;
}
List<User> userList = userMapper.queryUserListByAlertGroupId(alertgroupId);
result.put(Constants.DATA_LIST, userList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* check
*
* @param result result
* @param bool bool
* @param userNoOperationPerm status
* @return check result
*/
private boolean check(Map<String, Object> result, boolean bool, Status userNoOperationPerm) {
//only admin can operate
if (bool) {
result.put(Constants.STATUS, userNoOperationPerm);
result.put(Constants.MSG, userNoOperationPerm.getMsg());
return true;
}
return false;
}
/**
* @param tenantId tenant id
* @return true if tenant exists, otherwise return false
*/
private boolean checkTenantExists(int tenantId) {
return tenantMapper.queryById(tenantId) != null ? true : false;
}
/**
*
* @param userName
* @param password
* @param email
* @param phone
* @return if check failed return the field, otherwise return null
*/
private String checkUserParams(String userName, String password, String email, String phone) {
String msg = null;
if (!CheckUtils.checkUserName(userName)) {
msg = userName;
} else if (!CheckUtils.checkPassword(password)) {
msg = password;
} else if (!CheckUtils.checkEmail(email)) {
msg = email;
} else if (!CheckUtils.checkPhone(phone)) {
msg = phone;
}
return msg;
}
/**
* copy resource files
* @param resourceComponent resource component
* @param srcBasePath src base path
* @param dstBasePath dst base path
* @throws IOException io exception
*/
private void copyResourceFiles(ResourceComponent resourceComponent, String srcBasePath, String dstBasePath) throws IOException {
List<ResourceComponent> components = resourceComponent.getChildren();
if (CollectionUtils.isNotEmpty(components)) {
for (ResourceComponent component:components) {
// verify whether exist
if (!HadoopUtils.getInstance().exists(String.format("%s/%s",srcBasePath,component.getFullName()))){
logger.error("resource file: {} not exist,copy error",component.getFullName());
throw new ServiceException(Status.RESOURCE_NOT_EXIST);
}
if (!component.isDirctory()) {
// copy it to dst
HadoopUtils.getInstance().copy(String.format("%s/%s",srcBasePath,component.getFullName()),String.format("%s/%s",dstBasePath,component.getFullName()),false,true);
continue;
}
if(CollectionUtils.isEmpty(component.getChildren())) {
// if not exist,need create it
if (!HadoopUtils.getInstance().exists(String.format("%s/%s",dstBasePath,component.getFullName()))) {
HadoopUtils.getInstance().mkdir(String.format("%s/%s",dstBasePath,component.getFullName()));
}
}else{
copyResourceFiles(component,srcBasePath,dstBasePath);
}
}
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,910 | [BUG] master server will show exception for some time when master server restart, it's just dispatching task | *For better global communication, please give priority to using English description, thx! *
**Describe the bug**
[ERROR] 2020-06-06 15:46:22.683 org.apache.dolphinscheduler.server.master.consumer.TaskPriorityQueueConsumer:[152] - dispatch error
org.apache.dolphinscheduler.server.master.dispatch.exceptions.ExecuteException: fail to execute : Command [type=TASK_EXECUTE_REQUEST, opaque=1, bodyLen=1619] due to no suitable worker , current task need to default worker group execute
at org.apache.dolphinscheduler.server.master.dispatch.ExecutorDispatcher.dispatch(ExecutorDispatcher.java:87)
at org.apache.dolphinscheduler.server.master.consumer.TaskPriorityQueueConsumer.dispatch(TaskPriorityQueueConsumer.java:149)
at org.apache.dolphinscheduler.server.master.consumer.TaskPriorityQueueConsumer.run(TaskPriorityQueueConsumer.java:118)
**To Reproduce**
1、worker server runs normally, click `start workflow` ,
2、then restart master server , master server runtime log will show the following exception for some time(about 30 seconds), then master will go works normally
**Which version of Dolphin Scheduler:**
-[ dev-1.3.0 ]
| https://github.com/apache/dolphinscheduler/issues/2910 | https://github.com/apache/dolphinscheduler/pull/2913 | 0de8e589651396925f66549b75e57b9673669291 | 5c9b080ab85ffc458557ebcbb5e8bf30a7c40ed0 | "2020-06-06T07:57:47Z" | java | "2020-06-06T10:24:26Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/dispatch/host/LowerWeightHostManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.master.dispatch.host;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory;
import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext;
import org.apache.dolphinscheduler.server.master.dispatch.host.assign.HostWeight;
import org.apache.dolphinscheduler.server.master.dispatch.host.assign.LowerWeightRoundRobin;
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import static org.apache.dolphinscheduler.common.Constants.COMMA;
/**
* round robin host manager
*/
public class LowerWeightHostManager extends CommonHostManager {
private final Logger logger = LoggerFactory.getLogger(LowerWeightHostManager.class);
/**
* zookeeper registry center
*/
@Autowired
private ZookeeperRegistryCenter registryCenter;
/**
* round robin host manager
*/
private RoundRobinHostManager roundRobinHostManager;
/**
* selector
*/
private LowerWeightRoundRobin selector;
/**
* worker host weights
*/
private ConcurrentHashMap<String, Set<HostWeight>> workerHostWeightsMap;
/**
* worker group host lock
*/
private Lock lock;
/**
* executor service
*/
private ScheduledExecutorService executorService;
@PostConstruct
public void init(){
this.selector = new LowerWeightRoundRobin();
this.workerHostWeightsMap = new ConcurrentHashMap<>();
this.lock = new ReentrantLock();
this.executorService = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("LowerWeightHostManagerExecutor"));
this.executorService.scheduleWithFixedDelay(new RefreshResourceTask(),35, 40, TimeUnit.SECONDS);
this.roundRobinHostManager = new RoundRobinHostManager();
this.roundRobinHostManager.setZookeeperNodeManager(getZookeeperNodeManager());
}
@PreDestroy
public void close(){
this.executorService.shutdownNow();
}
/**
* select host
* @param context context
* @return host
*/
@Override
public Host select(ExecutionContext context){
Set<HostWeight> workerHostWeights = getWorkerHostWeights(context.getWorkerGroup());
if(CollectionUtils.isNotEmpty(workerHostWeights)){
return selector.select(workerHostWeights).getHost();
}
return new Host();
}
@Override
public Host select(Collection<Host> nodes) {
throw new UnsupportedOperationException("not support");
}
private void syncWorkerHostWeight(Map<String, Set<HostWeight>> workerHostWeights){
lock.lock();
try {
workerHostWeightsMap.clear();
workerHostWeightsMap.putAll(workerHostWeights);
} finally {
lock.unlock();
}
}
private Set<HostWeight> getWorkerHostWeights(String workerGroup){
lock.lock();
try {
return workerHostWeightsMap.get(workerGroup);
} finally {
lock.unlock();
}
}
class RefreshResourceTask implements Runnable{
@Override
public void run() {
try {
Map<String, Set<String>> workerGroupNodes = zookeeperNodeManager.getWorkerGroupNodes();
Set<Map.Entry<String, Set<String>>> entries = workerGroupNodes.entrySet();
Map<String, Set<HostWeight>> workerHostWeights = new HashMap<>();
for(Map.Entry<String, Set<String>> entry : entries){
String workerGroup = entry.getKey();
Set<String> nodes = entry.getValue();
String workerGroupPath = registryCenter.getWorkerGroupPath(workerGroup);
Set<HostWeight> hostWeights = new HashSet<>(nodes.size());
for(String node : nodes){
String heartbeat = registryCenter.getZookeeperCachedOperator().get(workerGroupPath + "/" + node);
if(StringUtils.isNotEmpty(heartbeat)
&& heartbeat.split(COMMA).length == Constants.HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH){
String[] parts = heartbeat.split(COMMA);
int status = Integer.parseInt(parts[8]);
if (status == Constants.ABNORMAL_NODE_STATUS){
logger.warn("load is too high or availablePhysicalMemorySize(G) is too low, it's availablePhysicalMemorySize(G):{},loadAvg:{}",
Double.parseDouble(parts[3]) , Double.parseDouble(parts[2]));
continue;
}
double cpu = Double.parseDouble(parts[0]);
double memory = Double.parseDouble(parts[1]);
double loadAverage = Double.parseDouble(parts[2]);
HostWeight hostWeight = new HostWeight(Host.of(node), cpu, memory, loadAverage);
hostWeights.add(hostWeight);
}
}
workerHostWeights.put(workerGroup, hostWeights);
}
syncWorkerHostWeight(workerHostWeights);
} catch (Throwable ex){
logger.error("RefreshResourceTask error", ex);
}
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,910 | [BUG] master server will show exception for some time when master server restart, it's just dispatching task | *For better global communication, please give priority to using English description, thx! *
**Describe the bug**
[ERROR] 2020-06-06 15:46:22.683 org.apache.dolphinscheduler.server.master.consumer.TaskPriorityQueueConsumer:[152] - dispatch error
org.apache.dolphinscheduler.server.master.dispatch.exceptions.ExecuteException: fail to execute : Command [type=TASK_EXECUTE_REQUEST, opaque=1, bodyLen=1619] due to no suitable worker , current task need to default worker group execute
at org.apache.dolphinscheduler.server.master.dispatch.ExecutorDispatcher.dispatch(ExecutorDispatcher.java:87)
at org.apache.dolphinscheduler.server.master.consumer.TaskPriorityQueueConsumer.dispatch(TaskPriorityQueueConsumer.java:149)
at org.apache.dolphinscheduler.server.master.consumer.TaskPriorityQueueConsumer.run(TaskPriorityQueueConsumer.java:118)
**To Reproduce**
1、worker server runs normally, click `start workflow` ,
2、then restart master server , master server runtime log will show the following exception for some time(about 30 seconds), then master will go works normally
**Which version of Dolphin Scheduler:**
-[ dev-1.3.0 ]
| https://github.com/apache/dolphinscheduler/issues/2910 | https://github.com/apache/dolphinscheduler/pull/2913 | 0de8e589651396925f66549b75e57b9673669291 | 5c9b080ab85ffc458557ebcbb5e8bf30a7c40ed0 | "2020-06-06T07:57:47Z" | java | "2020-06-06T10:24:26Z" | sql/h2.mv.db | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,715 | compile error on UsersServiceTest |
**Describe the bug**
compile error on UsersServiceTest.java
**To Reproduce**
mvn package
**Expected behavior**
remove unused import
**Screenshots**
![image](https://user-images.githubusercontent.com/11856424/81902918-80e05b00-95f3-11ea-854b-f16cf271177b.png)
**Which version of Dolphin Scheduler:**
-[dev-1.3.0]
-[dev]
| https://github.com/apache/dolphinscheduler/issues/2715 | https://github.com/apache/dolphinscheduler/pull/2718 | 11c4c583286d913c17eb8efff029d93d89f98b25 | 9fd728908fd41b8aecc9565687060f96f413936a | "2020-05-14T07:15:13Z" | java | "2020-06-14T02:27:13Z" | dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UsersServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import org.apache.avro.generic.GenericData;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.EncryptionUtils;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class UsersServiceTest {
private static final Logger logger = LoggerFactory.getLogger(UsersServiceTest.class);
@InjectMocks
private UsersService usersService;
@Mock
private UserMapper userMapper;
@Mock
private TenantMapper tenantMapper;
@Mock
private ProjectUserMapper projectUserMapper;
@Mock
private ResourceUserMapper resourcesUserMapper;
@Mock
private UDFUserMapper udfUserMapper;
@Mock
private DataSourceUserMapper datasourceUserMapper;
@Mock
private AlertGroupMapper alertGroupMapper;
@Mock
private ResourceMapper resourceMapper;
private String queueName ="UsersServiceTestQueue";
@Before
public void before(){
}
@After
public void after(){
}
@Test
public void testCreateUser(){
User user = new User();
user.setUserType(UserType.ADMIN_USER);
String userName = "userTest0001~";
String userPassword = "userTest";
String email = "123@qq.com";
int tenantId = Integer.MAX_VALUE;
String phone= "13456432345";
int state = 1;
try {
//userName error
Map<String, Object> result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
userName = "userTest0001";
userPassword = "userTest000111111111111111";
//password error
result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
userPassword = "userTest0001";
email = "1q.com";
//email error
result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
email = "122222@qq.com";
phone ="2233";
//phone error
result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state);
logger.info(result.toString());
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, result.get(Constants.STATUS));
phone = "13456432345";
//tenantId not exists
result = usersService.createUser(user, userName, userPassword, email, tenantId, phone, queueName, state);
logger.info(result.toString());
Assert.assertEquals(Status.TENANT_NOT_EXIST, result.get(Constants.STATUS));
//success
Mockito.when(tenantMapper.queryById(1)).thenReturn(getTenant());
result = usersService.createUser(user, userName, userPassword, email, 1, phone, queueName, state);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
logger.error(Status.CREATE_USER_ERROR.getMsg(),e);
Assert.assertTrue(false);
}
}
@Test
public void testQueryUser(){
String userName = "userTest0001";
String userPassword = "userTest0001";
when(userMapper.queryUserByNamePassword(userName,EncryptionUtils.getMd5(userPassword))).thenReturn(getGeneralUser());
User queryUser = usersService.queryUser(userName, userPassword);
logger.info(queryUser.toString());
Assert.assertTrue(queryUser!=null);
}
@Test
public void testGetUserIdByName() {
User user = new User();
user.setId(1);
user.setUserType(UserType.ADMIN_USER);
user.setUserName("test_user");
//user name null
int userId = usersService.getUserIdByName("");
Assert.assertEquals(0, userId);
//user not exist
when(usersService.queryUser(user.getUserName())).thenReturn(null);
int userNotExistId = usersService.getUserIdByName(user.getUserName());
Assert.assertEquals(-1, userNotExistId);
//user exist
when(usersService.queryUser(user.getUserName())).thenReturn(user);
int userExistId = usersService.getUserIdByName(user.getUserName());
Assert.assertEquals(user.getId(), userExistId);
}
@Test
public void testQueryUserList(){
User user = new User();
//no operate
Map<String, Object> result = usersService.queryUserList(user);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
user.setUserType(UserType.ADMIN_USER);
when(userMapper.selectList(null )).thenReturn(getUserList());
result = usersService.queryUserList(user);
List<User> userList = (List<User>) result.get(Constants.DATA_LIST);
Assert.assertTrue(userList.size()>0);
}
@Test
public void testQueryUserListPage(){
User user = new User();
IPage<User> page = new Page<>(1,10);
page.setRecords(getUserList());
when(userMapper.queryUserPaging(any(Page.class), eq("userTest"))).thenReturn(page);
//no operate
Map<String, Object> result = usersService.queryUserList(user,"userTest",1,10);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
user.setUserType(UserType.ADMIN_USER);
result = usersService.queryUserList(user,"userTest",1,10);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
PageInfo<User> pageInfo = (PageInfo<User>) result.get(Constants.DATA_LIST);
Assert.assertTrue(pageInfo.getLists().size()>0);
}
@Test
public void testUpdateUser(){
String userName = "userTest0001";
String userPassword = "userTest0001";
try {
//user not exist
Map<String, Object> result = usersService.updateUser(0,userName,userPassword,"3443@qq.com",1,"13457864543","queue", 1);
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
logger.info(result.toString());
//success
when(userMapper.selectById(1)).thenReturn(getUser());
result = usersService.updateUser(1,userName,userPassword,"32222s@qq.com",1,"13457864543","queue", 1);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
logger.error("update user error",e);
Assert.assertTrue(false);
}
}
@Test
public void testDeleteUserById(){
User loginUser = new User();
try {
when(userMapper.queryTenantCodeByUserId(1)).thenReturn(getUser());
when(userMapper.selectById(1)).thenReturn(getUser());
//no operate
Map<String, Object> result = usersService.deleteUserById(loginUser,3);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
// user not exist
loginUser.setUserType(UserType.ADMIN_USER);
result = usersService.deleteUserById(loginUser,3);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
//success
result = usersService.deleteUserById(loginUser,1);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} catch (Exception e) {
logger.error("delete user error",e);
Assert.assertTrue(false);
}
}
@Test
public void testGrantProject(){
when(userMapper.selectById(1)).thenReturn(getUser());
User loginUser = new User();
String projectIds= "100000,120000";
Map<String, Object> result = usersService.grantProject(loginUser, 1, projectIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//user not exist
loginUser.setUserType(UserType.ADMIN_USER);
result = usersService.grantProject(loginUser, 2, projectIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
//success
result = usersService.grantProject(loginUser, 1, projectIds);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testGrantResources(){
String resourceIds = "100000,120000";
when(userMapper.selectById(1)).thenReturn(getUser());
User loginUser = new User();
Map<String, Object> result = usersService.grantResources(loginUser, 1, resourceIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//user not exist
loginUser.setUserType(UserType.ADMIN_USER);
result = usersService.grantResources(loginUser, 2, resourceIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
//success
when(resourceMapper.queryAuthorizedResourceList(1)).thenReturn(new ArrayList<Resource>());
when(resourceMapper.selectById(Mockito.anyInt())).thenReturn(getResource());
result = usersService.grantResources(loginUser, 1, resourceIds);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testGrantUDFFunction(){
String udfIds = "100000,120000";
when(userMapper.selectById(1)).thenReturn(getUser());
User loginUser = new User();
Map<String, Object> result = usersService.grantUDFFunction(loginUser, 1, udfIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//user not exist
loginUser.setUserType(UserType.ADMIN_USER);
result = usersService.grantUDFFunction(loginUser, 2, udfIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
//success
result = usersService.grantUDFFunction(loginUser, 1, udfIds);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testGrantDataSource(){
String datasourceIds = "100000,120000";
when(userMapper.selectById(1)).thenReturn(getUser());
User loginUser = new User();
Map<String, Object> result = usersService.grantDataSource(loginUser, 1, datasourceIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//user not exist
loginUser.setUserType(UserType.ADMIN_USER);
result = usersService.grantDataSource(loginUser, 2, datasourceIds);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NOT_EXIST, result.get(Constants.STATUS));
//success
result = usersService.grantDataSource(loginUser, 1, datasourceIds);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void getUserInfo(){
User loginUser = new User();
loginUser.setUserName("admin");
loginUser.setUserType(UserType.ADMIN_USER);
// get admin user
Map<String, Object> result = usersService.getUserInfo(loginUser);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
User tempUser = (User) result.get(Constants.DATA_LIST);
//check userName
Assert.assertEquals("admin",tempUser.getUserName());
//get general user
loginUser.setUserType(null);
loginUser.setId(1);
when(userMapper.queryDetailsById(1)).thenReturn(getGeneralUser());
result = usersService.getUserInfo(loginUser);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
tempUser = (User) result.get(Constants.DATA_LIST);
//check userName
Assert.assertEquals("userTest0001",tempUser.getUserName());
}
@Test
public void testQueryAllGeneralUsers(){
User loginUser = new User();
//no operate
Map<String, Object> result = usersService.queryAllGeneralUsers(loginUser);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
loginUser.setUserType(UserType.ADMIN_USER);
when(userMapper.queryAllGeneralUser()).thenReturn(getUserList());
result = usersService.queryAllGeneralUsers(loginUser);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
List<User> userList = (List<User>) result.get(Constants.DATA_LIST);
Assert.assertTrue(CollectionUtils.isNotEmpty(userList));
}
@Test
public void testVerifyUserName(){
//not exist user
Result result = usersService.verifyUserName("admin89899");
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS.getMsg(), result.getMsg());
//exist user
when(userMapper.queryByUserNameAccurately("userTest0001")).thenReturn(getUser());
result = usersService.verifyUserName("userTest0001");
logger.info(result.toString());
Assert.assertEquals(Status.USER_NAME_EXIST.getMsg(), result.getMsg());
}
@Test
public void testUnauthorizedUser(){
User loginUser = new User();
when(userMapper.selectList(null )).thenReturn(getUserList());
when( userMapper.queryUserListByAlertGroupId(2)).thenReturn(getUserList());
//no operate
Map<String, Object> result = usersService.unauthorizedUser(loginUser, 2);
logger.info(result.toString());
loginUser.setUserType(UserType.ADMIN_USER);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
result = usersService.unauthorizedUser(loginUser, 2);
logger.info(result.toString());
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
}
@Test
public void testAuthorizedUser(){
User loginUser = new User();
when(userMapper.queryUserListByAlertGroupId(2)).thenReturn(getUserList());
//no operate
Map<String, Object> result = usersService.authorizedUser(loginUser, 2);
logger.info(result.toString());
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, result.get(Constants.STATUS));
//success
loginUser.setUserType(UserType.ADMIN_USER);
result = usersService.authorizedUser(loginUser, 2);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
List<User> userList = (List<User>) result.get(Constants.DATA_LIST);
logger.info(result.toString());
Assert.assertTrue(CollectionUtils.isNotEmpty(userList));
}
/**
* get user
* @return
*/
private User getGeneralUser(){
User user = new User();
user.setUserType(UserType.GENERAL_USER);
user.setUserName("userTest0001");
user.setUserPassword("userTest0001");
return user;
}
private List<User> getUserList(){
List<User> userList = new ArrayList<>();
userList.add(getGeneralUser());
return userList;
}
/**
* get user
*/
private User getUser(){
User user = new User();
user.setUserType(UserType.ADMIN_USER);
user.setUserName("userTest0001");
user.setUserPassword("userTest0001");
user.setState(1);
return user;
}
/**
* get tenant
* @return tenant
*/
private Tenant getTenant(){
Tenant tenant = new Tenant();
tenant.setId(1);
return tenant;
}
/**
* get resource
* @return resource
*/
private Resource getResource(){
Resource resource = new Resource();
resource.setPid(-1);
resource.setUserId(1);
resource.setDescription("ResourcesServiceTest.jar");
resource.setAlias("ResourcesServiceTest.jar");
resource.setFullName("/ResourcesServiceTest.jar");
resource.setType(ResourceType.FILE);
return resource;
}
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,018 | [BUG] i18n incomplete | *For better global communication, please give priority to using English description, thx! *
**Describe the bug**
In User page, the user type is not in i18n;
**To Reproduce**
Steps to reproduce the behavior, for example:
1. In file '\dolphinscheduler-ui\src\js\conf\home\pages\security\pages\users\_source\list.vue'
2. In line 29
3. the '用户类型' should be '{{$t('User Type')}}'
4. and the i18n file should be update
**Expected behavior**
modify the page without i18n
**Screenshots**
None
**Which version of Dolphin Scheduler:**
-[1.3.0]
**Additional context**
None
**Requirement or improvement
None
| https://github.com/apache/dolphinscheduler/issues/3018 | https://github.com/apache/dolphinscheduler/pull/3022 | 4aa1826718a75d9fc7ac948d871732a09abc8b8d | 6e0b464531d322c2ff60470d3ed63d025d8c970f | "2020-06-19T03:43:13Z" | java | "2020-06-20T14:22:55Z" | dolphinscheduler-ui/src/js/conf/home/pages/security/pages/users/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model user-list-model">
<div class="table-box">
<table>
<tr>
<th>
<span>{{$t('#')}}</span>
</th>
<th>
<span>{{$t('User Name')}}</span>
</th>
<th>
<span>用户类型</span>
</th>
<th>
<span>{{$t('Tenant')}}</span>
</th>
<th>
<span>{{$t('Queue')}}</span>
</th>
<th>
<span>{{$t('Email')}}</span>
</th>
<th>
<span>{{$t('Phone')}}</span>
</th>
<th id="state">
<span>{{$t('State')}}</span>
</th>
<th>
<span>{{$t('Create Time')}}</span>
</th>
<th>
<span>{{$t('Update Time')}}</span>
</th>
<th width="120">
<span>{{$t('Operation')}}</span>
</th>
</tr>
<tr v-for="(item, $index) in list" :key="item.id">
<td>
<span>{{parseInt(pageNo === 1 ? ($index + 1) : (($index + 1) + (pageSize * (pageNo - 1))))}}</span>
</td>
<td>
<span>
{{item.userName || '-'}}
</span>
</td>
<td>
<span>{{item.userType === 'GENERAL_USER' ? `${$t('Ordinary users')}` : `${$t('Administrator')}`}}</span>
</td>
<td><span>{{item.tenantName || '-'}}</span></td>
<td><span>{{item.queue || '-'}}</span></td>
<td>
<span>{{item.email || '-'}}</span>
</td>
<td>
<span>{{item.phone || '-'}}</span>
</td>
<td>
<span v-if="item.state == 1">{{$t('Enable')}}</span>
<span v-else>{{$t('Disable')}}</span>
</td>
<td>
<span v-if="item.createTime">{{item.createTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td>
<span v-if="item.updateTime">{{item.updateTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td>
<x-poptip
:ref="'poptip-auth-' + $index"
popper-class="user-list-poptip"
placement="bottom-end">
<div class="auth-select-box">
<a href="javascript:" @click="_authProject(item,$index)">{{$t('Project')}}</a>
<a href="javascript:" @click="_authFile(item,$index)">{{$t('Resources')}}</a>
<a href="javascript:" @click="_authDataSource(item,$index)">{{$t('Datasource')}}</a>
<a href="javascript:" @click="_authUdfFunc(item,$index)">{{$t('UDF Function')}}</a>
</div>
<template slot="reference">
<x-button type="warning" shape="circle" size="xsmall" data-toggle="tooltip" :title="$t('Authorize')" icon="ans-icon-user-empty" :disabled="item.userType === 'ADMIN_USER'"></x-button>
</template>
</x-poptip>
<x-button type="info" shape="circle" size="xsmall" data-toggle="tooltip" icon="ans-icon-edit" :title="$t('Edit')" @click="_edit(item)">
</x-button>
<x-poptip
:ref="'poptip-delete-' + $index"
placement="bottom-end"
width="90">
<p>{{$t('Delete?')}}</p>
<div style="text-align: right; margin: 0;padding-top: 4px;">
<x-button type="text" size="xsmall" shape="circle" @click="_closeDelete($index)">{{$t('Cancel')}}</x-button>
<x-button type="primary" size="xsmall" shape="circle" @click="_delete(item,$index)">{{$t('Confirm')}}</x-button>
</div>
<template slot="reference">
<x-button
type="error"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('delete')"
:disabled="item.userType === 'ADMIN_USER'"
icon="ans-icon-trash">
</x-button>
</template>
</x-poptip>
</td>
</tr>
</table>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import i18n from '@/module/i18n'
import { mapActions } from 'vuex'
import mTransfer from '@/module/components/transfer/transfer'
import mResource from '@/module/components/transfer/resource'
export default {
name: 'user-list',
data () {
return {
list: []
}
},
props: {
userList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('security', ['deleteUser', 'getAuthList', 'grantAuthorization','getResourceList']),
_closeDelete (i) {
this.$refs[`poptip-delete-${i}`][0].doClose()
},
_delete (item, i) {
this.deleteUser({
id: item.id
}).then(res => {
this.$refs[`poptip-delete-${i}`][0].doClose()
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$refs[`poptip-delete-${i}`][0].doClose()
this.$message.error(e.msg || '')
})
},
_edit (item) {
this.$emit('on-edit', item)
},
_authProject (item, i) {
this.$refs[`poptip-auth-${i}`][0].doClose()
this.getAuthList({
id: item.id,
type: 'project',
category: 'projects'
}).then(data => {
let sourceListPrs = _.map(data[0], v => {
return {
id: v.id,
name: v.name
}
})
let targetListPrs = _.map(data[1], v => {
return {
id: v.id,
name: v.name
}
})
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'v-modal-custom',
transitionName: 'opacityp',
render (h) {
return h(mTransfer, {
on: {
onUpdate (projectIds) {
self._grantAuthorization('users/grant-project', {
userId: item.id,
projectIds: projectIds
})
modal.remove()
},
close () {
modal.remove()
}
},
props: {
sourceListPrs: sourceListPrs,
targetListPrs: targetListPrs,
type: {
name: `${i18n.$t('Project')}`
}
}
})
}
})
})
},
/*
getAllLeaf
*/
getAllLeaf (data) {
let result = []
let getLeaf = (data)=> {
data.forEach(item => {
if (item.children.length==0) {
result.push(item)
} else {
getLeaf(item.children)
}
})
}
getLeaf(data)
return result
},
_authFile (item, i) {
this.$refs[`poptip-auth-${i}`][0].doClose()
this.getResourceList({
id: item.id,
type: 'file',
category: 'resources'
}).then(data => {
// let sourceListPrs = _.map(data[0], v => {
// return {
// id: v.id,
// name: v.alias,
// type: v.type
// }
// })
let fileSourceList = []
let udfSourceList = []
data[0].forEach((value,index,array)=>{
if(value.type =='FILE'){
fileSourceList.push(value)
} else{
udfSourceList.push(value)
}
})
let fileTargetList = []
let udfTargetList = []
let pathId = []
data[1].forEach(v=>{
let arr = []
arr[0] = v
if(this.getAllLeaf(arr).length>0) {
pathId.push(this.getAllLeaf(arr)[0])
}
})
data[1].forEach((value,index,array)=>{
if(value.type =='FILE'){
fileTargetList.push(value)
} else{
udfTargetList.push(value)
}
})
fileTargetList = _.map(fileTargetList, v => {
return v.id
})
udfTargetList = _.map(udfTargetList, v => {
return v.id
})
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'v-modal-custom',
transitionName: 'opacityp',
render (h) {
return h(mResource, {
on: {
onUpdate (resourceIds) {
self._grantAuthorization('users/grant-file', {
userId: item.id,
resourceIds: resourceIds
})
modal.remove()
},
close () {
modal.remove()
}
},
props: {
// sourceListPrs: sourceListPrs,
// targetListPrs: targetListPrs,
fileSourceList: fileSourceList,
udfSourceList: udfSourceList,
fileTargetList: fileTargetList,
udfTargetList: udfTargetList,
type: {
name: `${i18n.$t('Resources')}`
}
}
})
}
})
})
},
_authDataSource (item, i) {
this.$refs[`poptip-auth-${i}`][0].doClose()
this.getAuthList({
id: item.id,
type: 'datasource',
category: 'datasources'
}).then(data => {
let sourceListPrs = _.map(data[0], v => {
return {
id: v.id,
name: v.name
}
})
let targetListPrs = _.map(data[1], v => {
return {
id: v.id,
name: v.name
}
})
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'v-modal-custom',
transitionName: 'opacityp',
render (h) {
return h(mTransfer, {
on: {
onUpdate (datasourceIds) {
self._grantAuthorization('users/grant-datasource', {
userId: item.id,
datasourceIds: datasourceIds
})
modal.remove()
},
close () {
modal.remove()
}
},
props: {
sourceListPrs: sourceListPrs,
targetListPrs: targetListPrs,
type: {
name: `${i18n.$t('Datasource')}`
}
}
})
}
})
})
},
_authUdfFunc (item, i) {
this.$refs[`poptip-auth-${i}`][0].doClose()
this.getAuthList({
id: item.id,
type: 'udf-func',
category: 'resources'
}).then(data => {
let sourceListPrs = _.map(data[0], v => {
return {
id: v.id,
name: v.funcName
}
})
let targetListPrs = _.map(data[1], v => {
return {
id: v.id,
name: v.funcName
}
})
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'v-modal-custom',
transitionName: 'opacityp',
render (h) {
return h(mTransfer, {
on: {
onUpdate (udfIds) {
self._grantAuthorization('users/grant-udf-func', {
userId: item.id,
udfIds: udfIds
})
modal.remove()
},
close () {
modal.remove()
}
},
props: {
sourceListPrs: sourceListPrs,
targetListPrs: targetListPrs,
type: {
name: 'UDF Function'
}
}
})
}
})
})
},
_grantAuthorization (api, param) {
this.grantAuthorization({
api: api,
param: param
}).then(res => {
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
}
},
watch: {
userList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
this.list = this.userList
},
mounted () {
},
components: { }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.user-list-model {
.user-list-poptip {
min-width: 90px !important;
.auth-select-box {
a {
font-size: 14px;
height: 28px;
line-height: 28px;
display: block;
}
}
}
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,018 | [BUG] i18n incomplete | *For better global communication, please give priority to using English description, thx! *
**Describe the bug**
In User page, the user type is not in i18n;
**To Reproduce**
Steps to reproduce the behavior, for example:
1. In file '\dolphinscheduler-ui\src\js\conf\home\pages\security\pages\users\_source\list.vue'
2. In line 29
3. the '用户类型' should be '{{$t('User Type')}}'
4. and the i18n file should be update
**Expected behavior**
modify the page without i18n
**Screenshots**
None
**Which version of Dolphin Scheduler:**
-[1.3.0]
**Additional context**
None
**Requirement or improvement
None
| https://github.com/apache/dolphinscheduler/issues/3018 | https://github.com/apache/dolphinscheduler/pull/3022 | 4aa1826718a75d9fc7ac948d871732a09abc8b8d | 6e0b464531d322c2ff60470d3ed63d025d8c970f | "2020-06-19T03:43:13Z" | java | "2020-06-20T14:22:55Z" | dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': 'User Name',
'Please enter user name': 'Please enter user name',
Password: 'Password',
'Please enter your password': 'Please enter your password',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22',
Login: 'Login',
Home: 'Home',
'Failed to create node to save': 'Failed to create node to save',
'Global parameters': 'Global parameters',
'Local parameters': 'Local parameters',
'Copy success': 'Copy success',
'The browser does not support automatic copying': 'The browser does not support automatic copying',
'Whether to save the DAG graph': 'Whether to save the DAG graph',
'Current node settings': 'Current node settings',
'View history': 'View history',
'View log': 'View log',
'Enter this child node': 'Enter this child node',
'Node name': 'Node name',
'Please enter name(required)': 'Please enter name(required)',
'Run flag': 'Run flag',
Normal: 'Normal',
'Prohibition execution': 'Prohibition execution',
'Please enter description': 'Please enter description',
'Number of failed retries': 'Number of failed retries',
Times: 'Times',
'Failed retry interval': 'Failed retry interval',
Minute: 'Minute',
Cancel: 'Cancel',
'Confirm add': 'Confirm add',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process',
'The task has not been executed and cannot enter the sub-Process': 'The task has not been executed and cannot enter the sub-Process',
'Please enter name (required)': 'Please enter name (required)',
'Name already exists': 'Name already exists',
'Download Log': 'Download Log',
'Refresh Log': 'Refresh Log',
'Enter full screen': 'Enter full screen',
'Cancel full screen': 'Cancel full screen',
Close: 'Close',
'Update log success': 'Update log success',
'No more logs': 'No more logs',
'No log': 'No log',
'Loading Log...': 'Loading Log...',
'Set the DAG diagram name': 'Set the DAG diagram name',
'Please enter description(optional)': 'Please enter description(optional)',
'Set global': 'Set global',
'Whether to update the process definition': 'Whether to update the process definition',
Add: 'Add',
'DAG graph name cannot be empty': 'DAG graph name cannot be empty',
'Create Datasource': 'Create Datasource',
'Project Home': 'Project Home',
'Project Manage': 'Project',
'Create Project': 'Create Project',
'Cron Manage': 'Cron Manage',
'Copy Workflow': 'Copy Workflow',
'Tenant Manage': 'Tenant Manage',
'Create Tenant': 'Create Tenant',
'User Manage': 'User Manage',
'Create User': 'Create User',
'User Information': 'User Information',
'Edit Password': 'Edit Password',
success: 'success',
failed: 'failed',
delete: 'delete',
'Please choose': 'Please choose',
'Please enter a positive integer': 'Please enter a positive integer',
'Program Type': 'Program Type',
'Main class': 'Main class',
'Main jar package': 'Main jar package',
'Please enter main jar package': 'Please enter main jar package',
'Command-line parameters': 'Command-line parameters',
'Please enter Command-line parameters': 'Please enter Command-line parameters',
'Other parameters': 'Other parameters',
'Please enter other parameters': 'Please enter other parameters',
Resources: 'Resources',
'Custom Parameters': 'Custom Parameters',
'Custom template': 'Custom template',
Datasource: 'Datasource',
methods: 'methods',
'Please enter method(optional)': 'Please enter method(optional)',
Script: 'Script',
'Please enter script(required)': 'Please enter script(required)',
'Deploy Mode': 'Deploy Mode',
'Flink Version':'Flink Version',
'Driver core number': 'Driver core number',
'Please enter driver core number': 'Please enter driver core number',
'Driver memory use': 'Driver memory use',
'Please enter driver memory use': 'Please enter driver memory use',
'Number of Executors': 'Number of Executors',
'Please enter the number of Executor': 'Please enter the number of Executor',
'Executor memory': 'Executor memory',
'Please enter the Executor memory': 'Please enter the Executor memory',
'Executor core number': 'Executor core number',
'Please enter Executor core number': 'Please enter Executor core number',
'The number of Executors should be a positive integer': 'The number of Executors should be a positive integer',
'Memory should be a positive integer': 'Memory should be a positive integer',
'Please enter ExecutorPlease enter Executor core number': 'Please enter ExecutorPlease enter Executor core number',
'Core number should be positive integer': 'Core number should be positive integer',
'SQL Type': 'SQL Type',
Title: 'Title',
'Please enter the title of email': 'Please enter the title of email',
Table: 'Table',
TableMode: 'Table',
Attachment: 'Attachment',
'SQL Parameter': 'SQL Parameter',
'SQL Statement': 'SQL Statement',
'UDF Function': 'UDF Function',
'Please enter a SQL Statement(required)': 'Please enter a SQL Statement(required)',
'Please enter a JSON Statement(required)': 'Please enter a JSON Statement(required)',
'One form or attachment must be selected': 'One form or attachment must be selected',
'Recipient required': 'Recipient required',
'Mail subject required': 'Mail subject required',
'Child Node': 'Child Node',
'Please select a sub-Process': 'Please select a sub-Process',
Edit: 'Edit',
'Datasource Name': 'Datasource Name',
'Please enter datasource name': 'Please enter datasource name',
IP: 'IP',
'Please enter IP': 'Please enter IP',
Port: 'Port',
'Please enter port': 'Please enter port',
'Database Name': 'Database Name',
'Please enter database name': 'Please enter database name',
'Oracle Connect Type': 'ServiceName or SID',
'Oracle Service Name': 'ServiceName',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc connect parameters',
'Test Connect': 'Test Connect',
'Please enter resource name': 'Please enter resource name',
'Please enter resource folder name': 'Please enter resource folder name',
'Please enter a non-query SQL statement': 'Please enter a non-query SQL statement',
'Please enter IP/hostname': 'Please enter IP/hostname',
'jdbc connection parameters is not a correct JSON format': 'jdbc connection parameters is not a correct JSON format',
'#': '#',
'Datasource Type': 'Datasource Type',
'Datasource Parameter': 'Datasource Parameter',
'Create Time': 'Create Time',
'Update Time': 'Update Time',
Operation: 'Operation',
'Click to view': 'Click to view',
'Delete?': 'Delete?',
Confirm: 'Confirm',
'Task status statistics': 'Task Status Statistics',
Number: 'Number',
State: 'State',
'Process Status Statistics': 'Process Status Statistics',
'Process Definition Statistics': 'Process Definition Statistics',
'Project Name': 'Project Name',
'Please enter name': 'Please enter name',
'Owned Users': 'Owned Users',
'Process Pid': 'Process Pid',
'Zk registration directory': 'Zk registration directory',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': 'Last heartbeat time',
'Edit Tenant': 'Edit Tenant',
'Tenant Code': 'Tenant Code',
'Tenant Name': 'Tenant Name',
Queue: 'Queue',
'Please select a queue': 'default is tenant association queue',
'Please enter the tenant code in English': 'Please enter the tenant code in English',
'Please enter tenant code in English': 'Please enter tenant code in English',
'Edit User': 'Edit User',
Tenant: 'Tenant',
Email: 'Email',
Phone: 'Phone',
'Please enter phone number': 'Please enter phone number',
'Please enter main class': 'Please enter main class',
'Please enter email': 'Please enter email',
'Please enter the correct email format': 'Please enter the correct email format',
'Please enter the correct mobile phone format': 'Please enter the correct mobile phone format',
Project: 'Project',
Authorize: 'Authorize',
'File resources': 'File resources',
'UDF resources': 'UDF resources',
'Please select UDF resources directory': 'Please select UDF resources directory',
'UDF resources directory': 'UDF resources directory',
'Upload File Size': 'Upload File size cannot exceed 1g',
'Edit alarm group': 'Edit alarm group',
'Create alarm group': 'Create alarm group',
'Group Name': 'Group Name',
'Please enter group name': 'Please enter group name',
'Group Type': 'Group Type',
Remarks: 'Remarks',
SMS: 'SMS',
'Managing Users': 'Managing Users',
Permission: 'Permission',
Administrator: 'Administrator',
'Confirm Password': 'Confirm Password',
'Please enter confirm password': 'Please enter confirm password',
'Password cannot be in Chinese': 'Password cannot be in Chinese',
'Please enter a password (6-22) character password': 'Please enter a password (6-22) character password',
'Confirmation password cannot be in Chinese': 'Confirmation password cannot be in Chinese',
'Please enter a confirmation password (6-22) character password': 'Please enter a confirmation password (6-22) character password',
'The password is inconsistent with the confirmation password': 'The password is inconsistent with the confirmation password',
'Please select the datasource': 'Please select the datasource',
'Please select resources': 'Please select resources',
Query: 'Query',
'Non Query': 'Non Query',
'prop(required)': 'prop(required)',
'value(optional)': 'value(optional)',
'value(required)': 'value(required)',
'prop is empty': 'prop is empty',
'value is empty': 'value is empty',
'prop is repeat': 'prop is repeat',
'Start Time': 'Start Time',
'End Time': 'End Time',
crontab: 'crontab',
'Failure Strategy': 'Failure Strategy',
online: 'online',
offline: 'offline',
'Task Status': 'Task Status',
'Process Instance': 'Process Instance',
'Task Instance': 'Task Instance',
'Select date range': 'Select date range',
Date: 'Date',
waiting: 'waiting',
execution: 'execution',
finish: 'finish',
'Create File': 'Create File',
'Create folder': 'Create folder',
'File Name': 'File Name',
'Folder Name': 'Folder Name',
'File Format': 'File Format',
'Folder Format': 'Folder Format',
'File Content': 'File Content',
Create: 'Create',
'Please enter the resource content': 'Please enter the resource content',
'Resource content cannot exceed 3000 lines': 'Resource content cannot exceed 3000 lines',
'File Details': 'File Details',
'Download Details': 'Download Details',
Return: 'Return',
Save: 'Save',
'File Manage': 'File Manage',
'Upload Files': 'Upload Files',
'Create UDF Function': 'Create UDF Function',
'Upload UDF Resources': 'Upload UDF Resources',
'Service-Master': 'Service-Master',
'Service-Worker': 'Service-Worker',
'Process Name': 'Process Name',
Executor: 'Executor',
'Run Type': 'Run Type',
'Scheduling Time': 'Scheduling Time',
'Run Times': 'Run Times',
host: 'host',
'fault-tolerant sign': 'fault-tolerant sign',
Rerun: 'Rerun',
'Recovery Failed': 'Recovery Failed',
Stop: 'Stop',
Pause: 'Pause',
'Recovery Suspend': 'Recovery Suspend',
Gantt: 'Gantt',
Name: 'Name',
'Node Type': 'Node Type',
'Submit Time': 'Submit Time',
Duration: 'Duration',
'Retry Count': 'Retry Count',
'Task Name': 'Task Name',
'Task Date': 'Task Date',
'Source Table': 'Source Table',
'Record Number': 'Record Number',
'Target Table': 'Target Table',
'Online viewing type is not supported': 'Online viewing type is not supported',
Size: 'Size',
Rename: 'Rename',
Download: 'Download',
Export: 'Export',
Submit: 'Submit',
'Edit UDF Function': 'Edit UDF Function',
type: 'type',
'UDF Function Name': 'UDF Function Name',
FILE: 'FILE',
UDF: 'UDF',
'File Subdirectory': 'File Subdirectory',
'Please enter a function name': 'Please enter a function name',
'Package Name': 'Package Name',
'Please enter a Package name': 'Please enter a Package name',
Parameter: 'Parameter',
'Please enter a parameter': 'Please enter a parameter',
'UDF Resources': 'UDF Resources',
'Upload Resources': 'Upload Resources',
Instructions: 'Instructions',
'Please enter a instructions': 'Please enter a instructions',
'Please enter a UDF function name': 'Please enter a UDF function name',
'Select UDF Resources': 'Select UDF Resources',
'Class Name': 'Class Name',
'Jar Package': 'Jar Package',
'Library Name': 'Library Name',
'UDF Resource Name': 'UDF Resource Name',
'File Size': 'File Size',
Description: 'Description',
'Drag Nodes and Selected Items': 'Drag Nodes and Selected Items',
'Select Line Connection': 'Select Line Connection',
'Delete selected lines or nodes': 'Delete selected lines or nodes',
'Full Screen': 'Full Screen',
Unpublished: 'Unpublished',
'Start Process': 'Start Process',
'Execute from the current node': 'Execute from the current node',
'Recover tolerance fault process': 'Recover tolerance fault process',
'Resume the suspension process': 'Resume the suspension process',
'Execute from the failed nodes': 'Execute from the failed nodes',
'Complement Data': 'Complement Data',
slot: 'slot',
taskManager: 'taskManager',
jobManagerMemory: 'jobManagerMemory',
taskManagerMemory: 'taskManagerMemory',
'Scheduling execution': 'Scheduling execution',
'Recovery waiting thread': 'Recovery waiting thread',
'Submitted successfully': 'Submitted successfully',
Executing: 'Executing',
'Ready to pause': 'Ready to pause',
'Ready to stop': 'Ready to stop',
'Need fault tolerance': 'Need fault tolerance',
kill: 'kill',
'Waiting for thread': 'Waiting for thread',
'Waiting for dependence': 'Waiting for dependence',
Start: 'Start',
Copy: 'Copy',
'Copy name': 'Copy name',
Delete: 'Delete',
'Please enter keyword': 'Please enter keyword',
'File Upload': 'File Upload',
'Drag the file into the current upload window': 'Drag the file into the current upload window',
'Drag area upload': 'Drag area upload',
Upload: 'Upload',
'Please enter file name': 'Please enter file name',
'Please select the file to upload': 'Please select the file to upload',
'Resources manage': 'Resources',
Security: 'Security',
Logout: 'Logout',
'No data': 'No data',
'Uploading...': 'Uploading...',
'Loading...': 'Loading...',
List: 'List',
'Unable to download without proper url': 'Unable to download without proper url',
Process: 'Process',
'Process definition': 'Process definition',
'Task record': 'Task record',
'Warning group manage': 'Warning group manage',
'Servers manage': 'Servers manage',
'UDF manage': 'UDF manage',
'Resource manage': 'Resource manage',
'Function manage': 'Function manage',
'Edit password': 'Edit password',
'Ordinary users': 'Ordinary users',
'Create process': 'Create process',
'Import process': 'Import process',
'Timing state': 'Timing state',
Timing: 'Timing',
TreeView: 'TreeView',
'Mailbox already exists! Recipients and copyers cannot repeat': 'Mailbox already exists! Recipients and copyers cannot repeat',
'Mailbox input is illegal': 'Mailbox input is illegal',
'Please set the parameters before starting': 'Please set the parameters before starting',
Continue: 'Continue',
End: 'End',
'Node execution': 'Node execution',
'Backward execution': 'Backward execution',
'Forward execution': 'Forward execution',
'Execute only the current node': 'Execute only the current node',
'Notification strategy': 'Notification strategy',
'Notification group': 'Notification group',
'Please select a notification group': 'Please select a notification group',
Recipient: 'Recipient',
Cc: 'Cc',
'Whether it is a complement process?': 'Whether it is a complement process?',
'Schedule date': 'Schedule date',
'Mode of execution': 'Mode of execution',
'Serial execution': 'Serial execution',
'Parallel execution': 'Parallel execution',
'Set parameters before timing': 'Set parameters before timing',
'Start and stop time': 'Start and stop time',
'Please select time': 'Please select time',
'Please enter crontab': 'Please enter crontab',
none_1: 'none',
success_1: 'success',
failure_1: 'failure',
All_1: 'All',
Toolbar: 'Toolbar',
'View variables': 'View variables',
'Format DAG': 'Format DAG',
'Refresh DAG status': 'Refresh DAG status',
Return_1: 'Return',
'Please enter format': 'Please enter format',
'connection parameter': 'connection parameter',
'Process definition details': 'Process definition details',
'Create process definition': 'Create process definition',
'Scheduled task list': 'Scheduled task list',
'Process instance details': 'Process instance details',
'Create Resource': 'Create Resource',
'User Center': 'User Center',
'Please enter method': 'Please enter method',
none: 'none',
name: 'name',
'Process priority': 'Process priority',
'Task priority': 'Task priority',
'Task timeout alarm': 'Task timeout alarm',
'Timeout strategy': 'Timeout strategy',
'Timeout alarm': 'Timeout alarm',
'Timeout failure': 'Timeout failure',
'Timeout period': 'Timeout period',
'Timeout strategy must be selected': 'Timeout strategy must be selected',
'Timeout must be a positive integer': 'Timeout must be a positive integer',
'Add dependency': 'Add dependency',
and: 'and',
or: 'or',
month: 'month',
week: 'week',
day: 'day',
hour: 'hour',
Running: 'Running',
'Waiting for dependency to complete': 'Waiting for dependency to complete',
Selected: 'Selected',
Last1Hour: 'Last1Hour',
Last2Hours: 'Last2Hours',
Last3Hours: 'Last3Hours',
today: 'today',
Last1Days: 'Last1Days',
Last2Days: 'Last2Days',
Last3Days: 'Last3Days',
Last7Days: 'Last7Days',
ThisWeek: 'ThisWeek',
LastWeek: 'LastWeek',
LastMonday: 'LastMonday',
LastTuesday: 'LastTuesday',
LastWednesday: 'LastWednesday',
LastThursday: 'LastThursday',
LastFriday: 'LastFriday',
LastSaturday: 'LastSaturday',
LastSunday: 'LastSunday',
ThisMonth: 'ThisMonth',
LastMonth: 'LastMonth',
LastMonthBegin: 'LastMonthBegin',
LastMonthEnd: 'LastMonthEnd',
'Refresh status succeeded': 'Refresh status succeeded',
'Queue manage': 'Queue manage',
'Create queue': 'Create queue',
'Edit queue': 'Edit queue',
'Datasource manage': 'Datasource',
'History task record': 'History task record',
'Please go online': 'Please go online',
'Queue value': 'Queue value',
'Please enter queue value': 'Please enter queue value',
'Worker group manage': 'Worker group manage',
'Create worker group': 'Create worker group',
'Edit worker group': 'Edit worker group',
'Token manage': 'Token manage',
'Create token': 'Create token',
'Edit token': 'Edit token',
'Please enter the IP address separated by commas': 'Please enter the IP address separated by commas',
'Note: Multiple IP addresses have been comma separated': 'Note: Multiple IP addresses have been comma separated',
'Failure time': 'Failure time',
User: 'User',
'Please enter token': 'Please enter token',
'Generate token': 'Generate token',
Monitor: 'Monitor',
Group: 'Group',
'Queue statistics': 'Queue statistics',
'Command status statistics': 'Command status statistics',
'Task kill': 'Task Kill',
'Task queue': 'Task queue',
'Error command count': 'Error command count',
'Normal command count': 'Normal command count',
Manage: ' Manage',
'Number of connections': 'Number of connections',
Sent: 'Sent',
Received: 'Received',
'Min latency': 'Min latency',
'Avg latency': 'Avg latency',
'Max latency': 'Max latency',
'Node count': 'Node count',
'Query time': 'Query time',
'Node self-test status': 'Node self-test status',
'Health status': 'Health status',
'Max connections': 'Max connections',
'Threads connections': 'Threads connections',
'Max used connections': 'Max used connections',
'Threads running connections': 'Threads running connections',
'Worker group': 'Worker group',
'Please enter a positive integer greater than 0': 'Please enter a positive integer greater than 0',
'Pre Statement': 'Pre Statement',
'Post Statement': 'Post Statement',
'Statement cannot be empty': 'Statement cannot be empty',
'Process Define Count': 'Work flow Define Count',
'Process Instance Running Count': 'Process Instance Running Count',
'command number of waiting for running': 'command number of waiting for running',
'failure command number': 'failure command number',
'tasks number of waiting running': 'tasks number of waiting running',
'task number of ready to kill': 'task number of ready to kill',
'Statistics manage': 'Statistics Manage',
statistics: 'Statistics',
'select tenant': 'select tenant',
'Please enter Principal': 'Please enter Principal',
'The start time must not be the same as the end': 'The start time must not be the same as the end',
'Startup parameter': 'Startup parameter',
'Startup type': 'Startup type',
'warning of timeout': 'warning of timeout',
'Next five execution times': 'Next five execution times',
'Execute time': 'Execute time',
'Complement range': 'Complement range',
'Http Url': 'Http Url',
'Http Method': 'Http Method',
'Http Parameters': 'Http Parameters',
'Http Parameters Key': 'Http Parameters Key',
'Http Parameters Position': 'Http Parameters Position',
'Http Parameters Value': 'Http Parameters Value',
'Http Check Condition': 'Http Check Condition',
'Http Condition': 'Http Condition',
'Please Enter Http Url': 'Please Enter Http Url(required)',
'Please Enter Http Condition': 'Please Enter Http Condition',
'There is no data for this period of time': 'There is no data for this period of time',
'IP address cannot be empty': 'IP address cannot be empty',
'Please enter the correct IP': 'Please enter the correct IP',
'Please generate token': 'Please generate token',
'Spark Version': 'Spark Version',
TargetDataBase: 'target database',
TargetTable: 'target table',
'Please enter the table of target': 'Please enter the table of target',
'Please enter a Target Table(required)': 'Please enter a Target Table(required)',
SpeedByte: 'speed(byte count)',
SpeedRecord: 'speed(record count)',
'0 means unlimited by byte': '0 means unlimited',
'0 means unlimited by count': '0 means unlimited',
'Modify User': 'Modify User',
'Whether directory': 'Whether directory',
Yes: 'Yes',
No: 'No',
'Hadoop Custom Params': 'Hadoop Params',
'Sqoop Advanced Parameters': 'Sqoop Params',
'Sqoop Job Name': 'Job Name',
'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)',
'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)',
'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)',
'Please enter Target Dir(required)': 'Please enter Target Dir(required)',
'Please enter Export Dir(required)': 'Please enter Export Dir(required)',
'Please enter Hive Database(required)': 'Please enter Hive Databasec(required)',
'Please enter Hive Table(required)': 'Please enter Hive Table(required)',
'Please enter Hive Partition Keys': 'Please enter Hive Partition Key',
'Please enter Hive Partition Values': 'Please enter Partition Value',
'Please enter Replace Delimiter': 'Please enter Replace Delimiter',
'Please enter Fields Terminated': 'Please enter Fields Terminated',
'Please enter Lines Terminated': 'Please enter Lines Terminated',
'Please enter Concurrency': 'Please enter Concurrency',
'Please enter Update Key': 'Please enter Update Key',
'Please enter Job Name(required)': 'Please enter Job Name(required)',
'Please enter Custom Shell(required)': 'Please enter Custom Shell(required)',
Direct: 'Direct',
Type: 'Type',
ModelType: 'ModelType',
ColumnType: 'ColumnType',
Database: 'Database',
Column: 'Column',
'Map Column Hive': 'Map Column Hive',
'Map Column Java': 'Map Column Java',
'Export Dir': 'Export Dir',
'Hive partition Keys': 'Hive partition Keys',
'Hive partition Values': 'Hive partition Values',
FieldsTerminated: 'FieldsTerminated',
LinesTerminated: 'LinesTerminated',
IsUpdate: 'IsUpdate',
UpdateKey: 'UpdateKey',
UpdateMode: 'UpdateMode',
'Target Dir': 'Target Dir',
DeleteTargetDir: 'DeleteTargetDir',
FileType: 'FileType',
CompressionCodec: 'CompressionCodec',
CreateHiveTable: 'CreateHiveTable',
DropDelimiter: 'DropDelimiter',
OverWriteSrc: 'OverWriteSrc',
ReplaceDelimiter: 'ReplaceDelimiter',
Concurrency: 'Concurrency',
Form: 'Form',
OnlyUpdate: 'OnlyUpdate',
AllowInsert: 'AllowInsert',
'Data Source': 'Data Source',
'Data Target': 'Data Target',
'All Columns': 'All Columns',
'Some Columns': 'Some Columns',
'Branch flow': 'Branch flow',
'Custom Job': 'Custom Job',
'Custom Script': 'Custom Script',
'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow',
'Successful branch flow and failed branch flow are required': 'Successful branch flow and failed branch flow are required',
'Unauthorized or deleted resources': 'Unauthorized or deleted resources',
'Please delete all non-existent resources': 'Please delete all non-existent resources',
'Enable': 'Enable',
'Disable': 'Disable',
'Timeout Settings': 'Timeout Settings',
'Connect Timeout':'Connect Timeout',
'Socket Timeout':'Socket Timeout',
'Connect timeout be a positive integer': 'Connect timeout be a positive integer',
'Socket Timeout be a positive integer': 'Socket Timeout be a positive integer',
'ms':'ms'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,018 | [BUG] i18n incomplete | *For better global communication, please give priority to using English description, thx! *
**Describe the bug**
In User page, the user type is not in i18n;
**To Reproduce**
Steps to reproduce the behavior, for example:
1. In file '\dolphinscheduler-ui\src\js\conf\home\pages\security\pages\users\_source\list.vue'
2. In line 29
3. the '用户类型' should be '{{$t('User Type')}}'
4. and the i18n file should be update
**Expected behavior**
modify the page without i18n
**Screenshots**
None
**Which version of Dolphin Scheduler:**
-[1.3.0]
**Additional context**
None
**Requirement or improvement
None
| https://github.com/apache/dolphinscheduler/issues/3018 | https://github.com/apache/dolphinscheduler/pull/3022 | 4aa1826718a75d9fc7ac948d871732a09abc8b8d | 6e0b464531d322c2ff60470d3ed63d025d8c970f | "2020-06-19T03:43:13Z" | java | "2020-06-20T14:22:55Z" | dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': '用户名',
'Please enter user name': '请输入用户名',
Password: '密码',
'Please enter your password': '请输入密码',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': '密码至少包含数字,字母和字符的两种组合,长度在6-22之间',
Login: '登录',
Home: '首页',
'Failed to create node to save': '未创建节点保存失败',
'Global parameters': '全局参数',
'Local parameters': '局部参数',
'Copy success': '复制成功',
'The browser does not support automatic copying': '该浏览器不支持自动复制',
'Whether to save the DAG graph': '是否保存DAG图',
'Current node settings': '当前节点设置',
'View history': '查看历史',
'View log': '查看日志',
'Enter this child node': '进入该子节点',
'Node name': '节点名称',
'Please enter name(required)': '请输入名称(必填)',
'Run flag': '运行标志',
Normal: '正常',
'Prohibition execution': '禁止执行',
'Please enter description': '请输入描述',
'Number of failed retries': '失败重试次数',
Times: '次',
'Failed retry interval': '失败重试间隔',
Minute: '分',
Cancel: '取消',
'Confirm add': '确认添加',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': '新创建子工作流还未执行,不能进入子工作流',
'The task has not been executed and cannot enter the sub-Process': '该任务还未执行,不能进入子工作流',
'Please enter name (required)': '请输入名称(必填)',
'Name already exists': '名称已存在请重新输入',
'Download Log': '下载日志',
'Refresh Log': '刷新日志',
'Enter full screen': '进入全屏',
'Cancel full screen': '取消全屏',
Close: '关闭',
'Update log success': '更新日志成功',
'No more logs': '暂无更多日志',
'No log': '暂无日志',
'Loading Log...': '正在努力请求日志中...',
'Set the DAG diagram name': '设置DAG图名称',
'Please enter description(optional)': '请输入描述(选填)',
'Set global': '设置全局',
'Whether to update the process definition': '是否更新流程定义',
Add: '添加',
'DAG graph name cannot be empty': 'DAG图名称不能为空',
'Create Datasource': '创建数据源',
'Project Home': '项目首页',
'Project Manage': '项目管理',
'Create Project': '创建项目',
'Cron Manage': '定时管理',
'Copy Workflow': '复制工作流',
'Tenant Manage': '租户管理',
'Create Tenant': '创建租户',
'User Manage': '用户管理',
'Create User': '创建用户',
'User Information': '用户信息',
'Edit Password': '密码修改',
success: '成功',
failed: '失败',
delete: '删除',
'Please choose': '请选择',
'Please enter a positive integer': '请输入正整数',
'Program Type': '程序类型',
'Main class': '主函数的class',
'Main jar package': '主jar包',
'Please enter main jar package': '请选择主jar包',
'Command-line parameters': '命令行参数',
'Please enter Command-line parameters': '请输入命令行参数',
'Other parameters': '其他参数',
'Please enter other parameters': '请输入其他参数',
Resources: '资源',
'Custom Parameters': '自定义参数',
'Custom template': '自定义模版',
'Please enter main class': '请填写主函数的class',
Datasource: '数据源',
methods: '方法',
'Please enter method(optional)': '请输入方法(选填)',
Script: '脚本',
'Please enter script(required)': '请输入脚本(必填)',
'Deploy Mode': '部署方式',
'Flink Version': 'Flink版本',
'Driver core number': 'Driver内核数',
'Please enter driver core number': '请输入Driver内核数',
'Driver memory use': 'Driver内存数',
'Please enter driver memory use': '请输入Driver内存数',
'Number of Executors': 'Executor数量',
'Please enter the number of Executor': '请输入Executor数量',
'Executor memory': 'Executor内存数',
'Please enter the Executor memory': '请输入Executor内存数',
'Executor core number': 'Executor内核数',
'Please enter Executor core number': '请输入Executor内核数',
'The number of Executors should be a positive integer': 'Executor数量为正整数',
'Memory should be a positive integer': '内存数为数字',
'Please enter ExecutorPlease enter Executor core number': '请填写Executor内核数',
'Core number should be positive integer': '内核数为正整数',
'SQL Type': 'sql类型',
Title: '主题',
'Please enter the title of email': '请输入邮件主题',
Table: '表名',
TableMode: '表格',
Attachment: '附件',
'SQL Parameter': 'sql参数',
'SQL Statement': 'sql语句',
'UDF Function': 'UDF函数',
FILE: '文件',
UDF: 'UDF',
'File Subdirectory': '文件子目录',
'Please enter a SQL Statement(required)': '请输入sql语句(必填)',
'Please enter a JSON Statement(required)': '请输入json语句(必填)',
'One form or attachment must be selected': '表格、附件必须勾选一个',
'Recipient required': '收件人邮箱必填',
'Mail subject required': '邮件主题必填',
'Child Node': '子节点',
'Please select a sub-Process': '请选择子工作流',
Edit: '编辑',
'Datasource Name': '数据源名称',
'Please enter datasource name': '请输入数据源名称',
IP: 'IP主机名',
'Please enter IP': '请输入IP主机名',
Port: '端口',
'Please enter port': '请输入端口',
'Database Name': '数据库名',
'Please enter database name': '请输入数据库名',
'Oracle Connect Type': '服务名或SID',
'Oracle Service Name': '服务名',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc连接参数',
'Test Connect': '测试连接',
'Please enter resource name': '请输入数据源名称',
'Please enter resource folder name': '请输入资源文件夹名称',
'Please enter a non-query SQL statement': '请输入非查询sql语句',
'Please enter IP/hostname': '请输入IP/主机名',
'jdbc connection parameters is not a correct JSON format': 'jdbc连接参数不是一个正确的JSON格式',
'#': '编号',
'Datasource Type': '数据源类型',
'Datasource Parameter': '数据源参数',
'Create Time': '创建时间',
'Update Time': '更新时间',
Operation: '操作',
'Click to view': '点击查看',
'Delete?': '确定删除吗?',
Confirm: '确定',
'Task status statistics': '任务状态统计',
Number: '数量',
State: '状态',
'Process Status Statistics': '流程状态统计',
'Process Definition Statistics': '流程定义统计',
'Project Name': '项目名称',
'Please enter name': '请输入名称',
'Owned Users': '所属用户',
'Process Pid': '进程pid',
'Zk registration directory': 'zk注册目录',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': '最后心跳时间',
'Edit Tenant': '编辑租户',
'Tenant Code': '租户编码',
'Tenant Name': '租户名称',
Queue: '队列',
'Please enter the tenant code in English': '请输入租户编码只允许英文',
'Please enter tenant code in English': '请输入英文租户编码',
'Edit User': '编辑用户',
Tenant: '租户',
Email: '邮件',
Phone: '手机',
'Please enter phone number': '请输入手机',
'Please enter email': '请输入邮箱',
'Please enter the correct email format': '请输入正确的邮箱格式',
'Please enter the correct mobile phone format': '请输入正确的手机格式',
Project: '项目',
Authorize: '授权',
'File resources': '文件资源',
'UDF resources': 'UDF资源',
'UDF resources directory': 'UDF资源目录',
'Please select UDF resources directory': '请选择UDF资源目录',
'Edit alarm group': '编辑告警组',
'Create alarm group': '创建告警组',
'Group Name': '组名称',
'Please enter group name': '请输入组名称',
'Group Type': '组类型',
Remarks: '备注',
SMS: '短信',
'Managing Users': '管理用户',
Permission: '权限',
Administrator: '管理员',
'Confirm Password': '确认密码',
'Please enter confirm password': '请输入确认密码',
'Password cannot be in Chinese': '密码不能为中文',
'Please enter a password (6-22) character password': '请输入密码(6-22)字符密码',
'Confirmation password cannot be in Chinese': '确认密码不能为中文',
'Please enter a confirmation password (6-22) character password': '请输入确认密码(6-22)字符密码',
'The password is inconsistent with the confirmation password': '密码与确认密码不一致,请重新确认',
'Please select the datasource': '请选择数据源',
'Please select resources': '请选择资源',
Query: '查询',
'Non Query': '非查询',
'prop(required)': 'prop(必填)',
'value(optional)': 'value(选填)',
'value(required)': 'value(必填)',
'prop is empty': 'prop不能为空',
'value is empty': 'value不能为空',
'prop is repeat': 'prop中有重复',
'Start Time': '开始时间',
'End Time': '结束时间',
crontab: 'crontab',
'Failure Strategy': '失败策略',
online: '上线',
offline: '下线',
'Task Status': '任务状态',
'Process Instance': '工作流实例',
'Task Instance': '任务实例',
'Select date range': '选择日期区间',
Date: '日期',
waiting: '等待',
execution: '执行中',
finish: '完成',
'Create File': '创建文件',
'Create folder': '创建文件夹',
'File Name': '文件名称',
'Folder Name': '文件夹名称',
'File Format': '文件格式',
'Folder Format': '文件夹格式',
'File Content': '文件内容',
'Upload File Size': '文件大小不能超过1G',
Create: '创建',
'Please enter the resource content': '请输入资源内容',
'Resource content cannot exceed 3000 lines': '资源内容不能超过3000行',
'File Details': '文件详情',
'Download Details': '下载详情',
Return: '返回',
Save: '保存',
'File Manage': '文件管理',
'Upload Files': '上传文件',
'Create UDF Function': '创建UDF函数',
'Upload UDF Resources': '上传UDF资源',
'Service-Master': '服务管理-Master',
'Service-Worker': '服务管理-Worker',
'Process Name': '工作流名称',
Executor: '执行用户',
'Run Type': '运行类型',
'Scheduling Time': '调度时间',
'Run Times': '运行次数',
host: 'host',
'fault-tolerant sign': '容错标识',
Rerun: '重跑',
'Recovery Failed': '恢复失败',
Stop: '停止',
Pause: '暂停',
'Recovery Suspend': '恢复运行',
Gantt: '甘特图',
Name: '名称',
'Node Type': '节点类型',
'Submit Time': '提交时间',
Duration: '运行时长',
'Retry Count': '重试次数',
'Task Name': '任务名称',
'Task Date': '任务日期',
'Source Table': '源表',
'Record Number': '记录数',
'Target Table': '目标表',
'Online viewing type is not supported': '不支持在线查看类型',
Size: '大小',
Rename: '重命名',
Download: '下载',
Export: '导出',
Submit: '提交',
'Edit UDF Function': '编辑UDF函数',
type: '类型',
'UDF Function Name': 'UDF函数名称',
'Please enter a function name': '请输入函数名',
'Package Name': '包名类名',
'Please enter a Package name': '请输入包名类名',
Parameter: '参数',
'Please enter a parameter': '请输入参数',
'UDF Resources': 'UDF资源',
'Upload Resources': '上传资源',
Instructions: '使用说明',
'Please enter a instructions': '请输入使用说明',
'Please enter a UDF function name': '请输入UDF函数名称',
'Select UDF Resources': '请选择UDF资源',
'Class Name': '类名',
'Jar Package': 'jar包',
'Library Name': '库名',
'UDF Resource Name': 'UDF资源名称',
'File Size': '文件大小',
Description: '描述',
'Drag Nodes and Selected Items': '拖动节点和选中项',
'Select Line Connection': '选择线条连接',
'Delete selected lines or nodes': '删除选中的线或节点',
'Full Screen': '全屏',
Unpublished: '未发布',
'Start Process': '启动工作流',
'Execute from the current node': '从当前节点开始执行',
'Recover tolerance fault process': '恢复被容错的工作流',
'Resume the suspension process': '恢复运行流程',
'Execute from the failed nodes': '从失败节点开始执行',
'Complement Data': '补数',
'Scheduling execution': '调度执行',
'Recovery waiting thread': '恢复等待线程',
'Submitted successfully': '提交成功',
Executing: '正在执行',
'Ready to pause': '准备暂停',
'Ready to stop': '准备停止',
'Need fault tolerance': '需要容错',
kill: 'kill',
'Waiting for thread': '等待线程',
'Waiting for dependence': '等待依赖',
Start: '运行',
Copy: '复制节点',
'Copy name': '复制名称',
Delete: '删除',
'Please enter keyword': '请输入关键词',
'File Upload': '文件上传',
'Drag the file into the current upload window': '请将文件拖拽到当前上传窗口内!',
'Drag area upload': '拖动区域上传',
Upload: '上传',
'Please enter file name': '请输入文件名',
'Please select the file to upload': '请选择要上传的文件',
'Resources manage': '资源中心',
Security: '安全中心',
Logout: '退出',
'No data': '查询无数据',
'Uploading...': '文件上传中',
'Loading...': '正在努力加载中...',
List: '列表',
'Unable to download without proper url': '无下载url无法下载',
Process: '工作流',
'Process definition': '工作流定义',
'Task record': '任务记录',
'Warning group manage': '告警组管理',
'Servers manage': '服务管理',
'UDF manage': 'UDF管理',
'Resource manage': '资源管理',
'Function manage': '函数管理',
'Edit password': '修改密码',
'Ordinary users': '普通用户',
'Create process': '创建工作流',
'Import process': '导入工作流',
'Timing state': '定时状态',
Timing: '定时',
TreeView: '树形图',
'Mailbox already exists! Recipients and copyers cannot repeat': '邮箱已存在!收件人和抄送人不能重复',
'Mailbox input is illegal': '邮箱输入不合法',
'Please set the parameters before starting': '启动前请先设置参数',
Continue: '继续',
End: '结束',
'Node execution': '节点执行',
'Backward execution': '向后执行',
'Forward execution': '向前执行',
'Execute only the current node': '仅执行当前节点',
'Notification strategy': '通知策略',
'Notification group': '通知组',
'Please select a notification group': '请选择通知组',
Recipient: '收件人',
Cc: '抄送人',
'Whether it is a complement process?': '是否补数',
'Schedule date': '调度日期',
'Mode of execution': '执行方式',
'Serial execution': '串行执行',
'Parallel execution': '并行执行',
'Set parameters before timing': '定时前请先设置参数',
'Start and stop time': '起止时间',
'Please select time': '请选择时间',
'Please enter crontab': '请输入crontab',
none_1: '都不发',
success_1: '成功发',
failure_1: '失败发',
All_1: '成功或失败都发',
Toolbar: '工具栏',
'View variables': '查看变量',
'Format DAG': '格式化DAG',
'Refresh DAG status': '刷新DAG状态',
Return_1: '返回上一节点',
'Please enter format': '请输入格式为',
'connection parameter': '连接参数',
'Process definition details': '流程定义详情',
'Create process definition': '创建流程定义',
'Scheduled task list': '定时任务列表',
'Process instance details': '流程实例详情',
'Create Resource': '创建资源',
'User Center': '用户中心',
'Please enter method': '请输入方法',
none: '无',
name: '名称',
'Process priority': '流程优先级',
'Task priority': '任务优先级',
'Task timeout alarm': '任务超时告警',
'Timeout strategy': '超时策略',
'Timeout alarm': '超时告警',
'Timeout failure': '超时失败',
'Timeout period': '超时时长',
'Timeout strategy must be selected': '超时策略必须选一个',
'Timeout must be a positive integer': '超时时长必须为正整数',
'Add dependency': '添加依赖',
and: '且',
or: '或',
month: '月',
week: '周',
day: '日',
hour: '时',
Running: '正在运行',
'Waiting for dependency to complete': '等待依赖完成',
Selected: '已选',
Last1Hour: '前1小时',
Last2Hours: '前2小时',
Last3Hours: '前3小时',
today: '今天',
Last1Days: '昨天',
Last2Days: '前两天',
Last3Days: '前三天',
Last7Days: '前七天',
ThisWeek: '本周',
LastWeek: '上周',
LastMonday: '上周一',
LastTuesday: '上周二',
LastWednesday: '上周三',
LastThursday: '上周四',
LastFriday: '上周五',
LastSaturday: '上周六',
LastSunday: '上周日',
ThisMonth: '本月',
LastMonth: '上月',
LastMonthBegin: '上月初',
LastMonthEnd: '上月末',
'Refresh status succeeded': '刷新状态成功',
'Queue manage': '队列管理',
'Create queue': '创建队列',
'Edit queue': '编辑队列',
'Datasource manage': '数据源中心',
'History task record': '历史任务记录',
'Please go online': '不要忘记上线',
'Queue value': '队列值',
'Please enter queue value': '请输入队列值',
'Worker group manage': 'Worker分组管理',
'Create worker group': '创建Worker分组',
'Edit worker group': '编辑Worker分组',
'Token manage': '令牌管理',
'Create token': '创建令牌',
'Edit token': '编辑令牌',
'Please enter the IP address separated by commas': '请输入IP地址多个用英文逗号隔开',
'Note: Multiple IP addresses have been comma separated': '注意:多个IP地址以英文逗号分割',
'Failure time': '失效时间',
User: '用户',
'Please enter token': '请输入令牌',
'Generate token': '生成令牌',
Monitor: '监控中心',
Group: '分组',
'Queue statistics': '队列统计',
'Command status statistics': '命令状态统计',
'Task kill': '等待kill任务',
'Task queue': '等待执行任务',
'Error command count': '错误指令数',
'Normal command count': '正确指令数',
Manage: '管理',
'Number of connections': '连接数',
Sent: '发送量',
Received: '接收量',
'Min latency': '最低延时',
'Avg latency': '平均延时',
'Max latency': '最大延时',
'Node count': '节点数',
'Query time': '当前查询时间',
'Node self-test status': '节点自检状态',
'Health status': '健康状态',
'Max connections': '最大连接数',
'Threads connections': '当前连接数',
'Max used connections': '同时使用连接最大数',
'Threads running connections': '数据库当前活跃连接数',
'Worker group': 'Worker分组',
'Please enter a positive integer greater than 0': '请输入大于 0 的正整数',
'Pre Statement': '前置sql',
'Post Statement': '后置sql',
'Statement cannot be empty': '语句不能为空',
'Process Define Count': '工作流定义数',
'Process Instance Running Count': '正在运行的流程数',
'Please select a queue': '默认为租户关联队列',
'command number of waiting for running': '待执行的命令数',
'failure command number': '执行失败的命令数',
'tasks number of waiting running': '待运行任务数',
'task number of ready to kill': '待杀死任务数',
'Statistics manage': '统计管理',
statistics: '统计',
'select tenant': '选择租户',
'Please enter Principal': '请输入Principal',
'The start time must not be the same as the end': '开始时间和结束时间不能相同',
'Startup parameter': '启动参数',
'Startup type': '启动类型',
'warning of timeout': '超时告警',
'Next five execution times': '接下来五次执行时间',
'Execute time': '执行时间',
'Complement range': '补数范围',
slot: 'slot数量',
taskManager: 'taskManage数量',
jobManagerMemory: 'jobManager内存数',
taskManagerMemory: 'taskManager内存数',
'Http Url': '请求地址',
'Http Method': '请求类型',
'Http Parameters': '请求参数',
'Http Parameters Key': '参数名',
'Http Parameters Position': '参数位置',
'Http Parameters Value': '参数值',
'Http Check Condition': '校验条件',
'Http Condition': '校验内容',
'Please Enter Http Url': '请填写请求地址(必填)',
'Please Enter Http Condition': '请填写校验内容',
'There is no data for this period of time': '该时间段无数据',
'IP address cannot be empty': 'IP地址不能为空',
'Please enter the correct IP': '请输入正确的IP',
'Please generate token': '请生成Token',
'Spark Version': 'Spark版本',
TargetDataBase: '目标库',
TargetTable: '目标表',
'Please enter the table of target': '请输入目标表名',
'Please enter a Target Table(required)': '请输入目标表(必填)',
SpeedByte: '限流(字节数)',
SpeedRecord: '限流(记录数)',
'0 means unlimited by byte': 'KB,0代表不限制',
'0 means unlimited by count': '0代表不限制',
'Modify User': '修改用户',
'Whether directory': '是否文件夹',
Yes: '是',
No: '否',
'Hadoop Custom Params': 'Hadoop参数',
'Sqoop Advanced Parameters': 'Sqoop参数',
'Sqoop Job Name': '任务名称',
'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)',
'Please enter Mysql Table(required)': '请输入Mysql表名(必填)',
'Please enter Columns (Comma separated)': '请输入列名,用 , 隔开',
'Please enter Target Dir(required)': '请输入目标路径(必填)',
'Please enter Export Dir(required)': '请输入数据源路径(必填)',
'Please enter Hive Database(required)': '请输入Hive数据库(必填)',
'Please enter Hive Table(required)': '请输入Hive表名(必填)',
'Please enter Hive Partition Keys': '请输入分区键',
'Please enter Hive Partition Values': '请输入分区值',
'Please enter Replace Delimiter': '请输入替换分隔符',
'Please enter Fields Terminated': '请输入列分隔符',
'Please enter Lines Terminated': '请输入行分隔符',
'Please enter Concurrency': '请输入并发度',
'Please enter Update Key': '请输入更新列',
'Please enter Job Name(required)': '请输入任务名称(必填)',
'Please enter Custom Shell(required)': '请输入自定义脚本',
Direct: '流向',
Type: '类型',
ModelType: '模式',
ColumnType: '列类型',
Database: '数据库',
Column: '列',
'Map Column Hive': 'Hive类型映射',
'Map Column Java': 'Java类型映射',
'Export Dir': '数据源路径',
'Hive partition Keys': 'Hive 分区键',
'Hive partition Values': 'Hive 分区值',
FieldsTerminated: '列分隔符',
LinesTerminated: '行分隔符',
IsUpdate: '是否更新',
UpdateKey: '更新列',
UpdateMode: '更新类型',
'Target Dir': '目标路径',
DeleteTargetDir: '是否删除目录',
FileType: '保存格式',
CompressionCodec: '压缩类型',
CreateHiveTable: '是否创建新表',
DropDelimiter: '是否删除分隔符',
OverWriteSrc: '是否覆盖数据源',
ReplaceDelimiter: '替换分隔符',
Concurrency: '并发度',
Form: '表单',
OnlyUpdate: '只更新',
AllowInsert: '无更新便插入',
'Data Source': '数据来源',
'Data Target': '数据目的',
'All Columns': '全表导入',
'Some Columns': '选择列',
'Branch flow': '分支流转',
'Custom Job': '自定义任务',
'Custom Script': '自定义脚本',
'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点',
'Successful branch flow and failed branch flow are required': '成功分支流转和失败分支流转必填',
'Unauthorized or deleted resources': '未授权或已删除资源',
'Please delete all non-existent resources': '请删除所有未授权或已删除资源',
'Enable': '启用',
'Disable': '停用',
'Timeout Settings': '超时设置',
'Connect Timeout':'连接超时',
'Socket Timeout':'Socket超时',
'Connect timeout be a positive integer': '连接超时必须为数字',
'Socket Timeout be a positive integer': 'Socket超时必须为数字',
'ms':'毫秒'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,085 | [BUG] after zoom or drag the dag view, cant plcae node correctly and may be disappear | **Describe the bug**
After zoom or drag the dag view, cant plcae node correctly and may be disappear
**To Reproduce**
1. Go to 'project detail dag'
2. Scroll middle mouse button, zoom nodes
3. Drag new node into contaiment. (close to sidebar )
4. See error, the new node diappear, or it cant place what we want.
**Expected behavior**
When we drag new node into contaiment, it should not disappear. Because we do the correct thing, but something wrong happen.
**Screenshots**
![no zoom](https://user-images.githubusercontent.com/40662353/86087052-d0acb000-bad5-11ea-827e-de9316d989a1.png)
![after zoom](https://user-images.githubusercontent.com/40662353/86087169-1a959600-bad6-11ea-858a-b9fc00cc74f0.png)
**Which version of Dolphin Scheduler:**
-[1.2.0 to latest version ]
**Additional context**
After zooming (or drag #canvas element), the area of the canvas and the outer area do not fit, causing the drag to fail.
I try to fix this problem. There has two ways.
1. use jsplumb nativie method to support zoom.
2. modify dragZoom.js code, change the zoom beahavior.
I choose 2, I think this way lead to increased code complexity. Any suggetions? Should I create a PR?
| https://github.com/apache/dolphinscheduler/issues/3085 | https://github.com/apache/dolphinscheduler/pull/3103 | b0a7bb4d9c1fbeaa96b3aff8d2ab928a4c6c6c15 | 631cc6ea1fbc95e55597982f3c5a23d1f3751327 | "2020-06-30T05:34:19Z" | java | "2020-07-02T06:19:04Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/dragZoom.js | import d3 from 'd3'
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const DragZoom = function () {
this.element = {}
this.zoom = {}
this.scale = 1
}
DragZoom.prototype.init = function () {
const $canvas = $('#canvas')
this.element = d3.select('#canvas')
this.zoom = d3.behavior.zoom()
.scaleExtent([0.5, 2])
.on('zoom', () => {
this.scale = d3.event.scale
$canvas.css('transform', 'translate(' + d3.event.translate[0] + 'px,' + d3.event.translate[1] + 'px) scale(' + this.scale + ')')
$canvas.css('transform-origin', '0 0')
})
this.element.call(this.zoom).on('dblclick.zoom', null)
}
export default new DragZoom()
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,085 | [BUG] after zoom or drag the dag view, cant plcae node correctly and may be disappear | **Describe the bug**
After zoom or drag the dag view, cant plcae node correctly and may be disappear
**To Reproduce**
1. Go to 'project detail dag'
2. Scroll middle mouse button, zoom nodes
3. Drag new node into contaiment. (close to sidebar )
4. See error, the new node diappear, or it cant place what we want.
**Expected behavior**
When we drag new node into contaiment, it should not disappear. Because we do the correct thing, but something wrong happen.
**Screenshots**
![no zoom](https://user-images.githubusercontent.com/40662353/86087052-d0acb000-bad5-11ea-827e-de9316d989a1.png)
![after zoom](https://user-images.githubusercontent.com/40662353/86087169-1a959600-bad6-11ea-858a-b9fc00cc74f0.png)
**Which version of Dolphin Scheduler:**
-[1.2.0 to latest version ]
**Additional context**
After zooming (or drag #canvas element), the area of the canvas and the outer area do not fit, causing the drag to fail.
I try to fix this problem. There has two ways.
1. use jsplumb nativie method to support zoom.
2. modify dragZoom.js code, change the zoom beahavior.
I choose 2, I think this way lead to increased code complexity. Any suggetions? Should I create a PR?
| https://github.com/apache/dolphinscheduler/issues/3085 | https://github.com/apache/dolphinscheduler/pull/3103 | b0a7bb4d9c1fbeaa96b3aff8d2ab928a4c6c6c15 | 631cc6ea1fbc95e55597982f3c5a23d1f3751327 | "2020-06-30T05:34:19Z" | java | "2020-07-02T06:19:04Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import 'jquery-ui/ui/widgets/draggable'
import 'jquery-ui/ui/widgets/droppable'
import 'jquery-ui/ui/widgets/resizable'
import Vue from 'vue'
import _ from 'lodash'
import i18n from '@/module/i18n'
import { jsPlumb } from 'jsplumb'
import DragZoom from './dragZoom'
import store from '@/conf/home/store'
import router from '@/conf/home/router'
import { uuid, findComponentDownward } from '@/module/util/'
import {
tasksAll,
rtTasksTpl,
setSvgColor,
saveTargetarr,
rtTargetarrArr
} from './util'
import mStart from '@/conf/home/pages/projects/pages/definition/pages/list/_source/start'
import multiDrag from './multiDrag'
const JSP = function () {
this.dag = {}
this.selectedElement = {}
this.config = {
// Whether to drag
isDrag: true,
// Whether to allow connection
isAttachment: false,
// Whether to drag a new node
isNewNodes: true,
// Whether to support double-click node events
isDblclick: true,
// Whether to support right-click menu events
isContextmenu: true,
// Whether to allow click events
isClick: false
}
}
/**
* dag init
*/
JSP.prototype.init = function ({ dag, instance, options }) {
// Get the dag component instance
this.dag = dag
// Get jsplumb instance
this.JspInstance = instance
// Get JSP options
this.options = options || {}
// Register jsplumb connection type and configuration
this.JspInstance.registerConnectionType('basic', {
anchor: 'Continuous',
connector: 'Bezier' // Line type
})
// Initial configuration
this.setConfig({
isDrag: !store.state.dag.isDetails,
isAttachment: false,
isNewNodes: !store.state.dag.isDetails, // Permissions.getAuth() === false ? false : !store.state.dag.isDetails,
isDblclick: true,
isContextmenu: true,
isClick: false
})
// Monitor line click
this.JspInstance.bind('click', e => {
if (this.config.isClick) {
this.connectClick(e)
}
})
// Drag and drop
if (this.config.isNewNodes) {
DragZoom.init()
}
// support multi drag
multiDrag()
}
/**
* set config attribute
*/
JSP.prototype.setConfig = function (o) {
this.config = Object.assign(this.config, {}, o)
}
/**
* Node binding event
*/
JSP.prototype.tasksEvent = function (selfId) {
const tasks = $(`#${selfId}`)
// Bind right event
tasks.on('contextmenu', e => {
this.tasksContextmenu(e)
return false
})
// Binding double click event
tasks.find('.icos').bind('dblclick', e => {
this.tasksDblclick(e)
})
// Binding click event
tasks.on('click', e => {
this.tasksClick(e)
})
}
/**
* Dag node drag and drop processing
*/
JSP.prototype.draggable = function () {
if (this.config.isNewNodes) {
let selfId
const self = this
$('.toolbar-btn .roundedRect').draggable({
scope: 'plant',
helper: 'clone',
containment: $('.dag-model'),
stop: function (e, ui) {
},
drag: function () {
$('body').find('.tooltip.fade.top.in').remove()
}
})
$('#canvas').droppable({
scope: 'plant',
drop: function (ev, ui) {
let id = 'tasks-' + Math.ceil(Math.random() * 100000) // eslint-disable-line
// Get mouse coordinates
const left = parseInt(ui.offset.left - $(this).offset().left)
let top = parseInt(ui.offset.top - $(this).offset().top) - 10
if (top < 25) {
top = 25
}
// Generate template node
$('#canvas').append(rtTasksTpl({
id: id,
name: id,
x: left,
y: top,
isAttachment: self.config.isAttachment,
taskType: findComponentDownward(self.dag.$root, 'dag-chart').dagBarId
}))
// Get the generated node
const thisDom = jsPlumb.getSelector('.statemachine-demo .w')
// Generating a connection node
self.JspInstance.batch(() => {
self.initNode(thisDom[thisDom.length - 1])
})
selfId = id
self.tasksEvent(selfId)
// Dom structure is not generated without pop-up form form
if ($(`#${selfId}`).html()) {
// dag event
findComponentDownward(self.dag.$root, 'dag-chart')._createNodes({
id: selfId
})
}
}
})
}
}
/**
* Echo json processing and old data structure processing
*/
JSP.prototype.jsonHandle = function ({ largeJson, locations }) {
_.map(largeJson, v => {
// Generate template
$('#canvas').append(rtTasksTpl({
id: v.id,
name: v.name,
x: locations[v.id].x,
y: locations[v.id].y,
targetarr: locations[v.id].targetarr,
isAttachment: this.config.isAttachment,
taskType: v.type,
runFlag: v.runFlag,
nodenumber: locations[v.id].nodenumber,
successNode: v.conditionResult === undefined? '' : v.conditionResult.successNode[0],
failedNode: v.conditionResult === undefined? '' : v.conditionResult.failedNode[0]
}))
// contextmenu event
$(`#${v.id}`).on('contextmenu', e => {
this.tasksContextmenu(e)
return false
})
// dblclick event
$(`#${v.id}`).find('.icos').bind('dblclick', e => {
this.tasksDblclick(e)
})
// click event
$(`#${v.id}`).bind('click', e => {
this.tasksClick(e)
})
})
}
/**
* Initialize a single node
*/
JSP.prototype.initNode = function (el) {
// Whether to drag
if (this.config.isDrag) {
this.JspInstance.draggable(el, {
containment: 'dag-container'
})
}
// Node attribute configuration
this.JspInstance.makeSource(el, {
filter: '.ep',
anchor: 'Continuous',
connectorStyle: {
stroke: '#2d8cf0',
strokeWidth: 2,
outlineStroke: 'transparent',
outlineWidth: 4
},
// This place is leaking
// connectionType: "basic",
extract: {
action: 'the-action'
},
maxConnections: -1
})
// Node connection property configuration
this.JspInstance.makeTarget(el, {
dropOptions: { hoverClass: 'dragHover' },
anchor: 'Continuous',
allowLoopback: false // Forbid yourself to connect yourself
})
this.JspInstance.fire('jsPlumbDemoNodeAdded', el)
}
/**
* Node right click menu
*/
JSP.prototype.tasksContextmenu = function (event) {
if (this.config.isContextmenu) {
const routerName = router.history.current.name
// state
const isOne = routerName === 'projects-definition-details' && this.dag.releaseState !== 'NOT_RELEASE'
// hide
const isTwo = store.state.dag.isDetails
const html = [
`<a href="javascript:" id="startRunning" class="${isOne ? '' : 'disbled'}"><em class="ans-icon-play"></em><span>${i18n.$t('Start')}</span></a>`,
`<a href="javascript:" id="editNodes" class="${isTwo ? 'disbled' : ''}"><em class="ans-icon-edit"></em><span>${i18n.$t('Edit')}</span></a>`,
`<a href="javascript:" id="copyNodes" class="${isTwo ? 'disbled' : ''}"><em class="ans-icon-copy"></em><span>${i18n.$t('Copy')}</span></a>`,
`<a href="javascript:" id="removeNodes" class="${isTwo ? 'disbled' : ''}"><em class="ans-icon-trash"></em><span>${i18n.$t('Delete')}</span></a>`
]
const operationHtml = () => {
return html.splice(',')
}
const e = event
const $id = e.currentTarget.id
const $contextmenu = $('#contextmenu')
const $name = $(`#${$id}`).find('.name-p').text()
const $left = e.pageX + document.body.scrollLeft - 5
const $top = e.pageY + document.body.scrollTop - 5
$contextmenu.css({
left: $left,
top: $top,
visibility: 'visible'
})
// Action bar
$contextmenu.html('').append(operationHtml)
if (isOne) {
// start run
$('#startRunning').on('click', () => {
const name = store.state.dag.name
const id = router.history.current.params.id
store.dispatch('dag/getStartCheck', { processDefinitionId: id }).then(res => {
const modal = Vue.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'v-modal-custom',
transitionName: 'opacityp',
render (h) {
return h(mStart, {
on: {
onUpdate () {
modal.remove()
},
close () {
modal.remove()
}
},
props: {
item: {
id: id,
name: name
},
startNodeList: $name,
sourceType: 'contextmenu'
}
})
}
})
}).catch(e => {
Vue.$message.error(e.msg || '')
})
})
}
if (!isTwo) {
// edit node
$('#editNodes').click(ev => {
findComponentDownward(this.dag.$root, 'dag-chart')._createNodes({
id: $id,
type: $(`#${$id}`).attr('data-tasks-type')
})
})
// delete node
$('#removeNodes').click(ev => {
this.removeNodes($id)
})
// copy node
$('#copyNodes').click(res => {
this.copyNodes($id)
})
}
}
}
/**
* Node double click event
*/
JSP.prototype.tasksDblclick = function (e) {
// Untie event
if (this.config.isDblclick) {
const id = $(e.currentTarget.offsetParent).attr('id')
findComponentDownward(this.dag.$root, 'dag-chart')._createNodes({
id: id,
type: $(`#${id}`).attr('data-tasks-type')
})
}
}
/**
* Node click event
*/
JSP.prototype.tasksClick = function (e) {
let $id
const self = this
const $body = $('body')
if (this.config.isClick) {
const $connect = this.selectedElement.connect
$('.w').removeClass('jtk-tasks-active')
$(e.currentTarget).addClass('jtk-tasks-active')
if ($connect) {
setSvgColor($connect, '#2d8cf0')
this.selectedElement.connect = null
}
this.selectedElement.id = $(e.currentTarget).attr('id')
// Unbind copy and paste events
$body.unbind('copy').unbind('paste')
// Copy binding id
$id = self.selectedElement.id
$body.bind({
copy: function () {
$id = self.selectedElement.id
},
paste: function () {
$id && self.copyNodes($id)
}
})
}
}
/**
* Remove binding events
* paste
*/
JSP.prototype.removePaste = function () {
const $body = $('body')
// Unbind copy and paste events
$body.unbind('copy').unbind('paste')
// Remove selected node parameters
this.selectedElement.id = null
// Remove node selection effect
$('.w').removeClass('jtk-tasks-active')
}
/**
* Line click event
*/
JSP.prototype.connectClick = function (e) {
// Set svg color
setSvgColor(e, '#0097e0')
const $id = this.selectedElement.id
if ($id) {
$(`#${$id}`).removeClass('jtk-tasks-active')
this.selectedElement.id = null
}
this.selectedElement.connect = e
}
/**
* toolbarEvent
* @param {Pointer}
*/
JSP.prototype.handleEventPointer = function (is) {
this.setConfig({
isClick: is,
isAttachment: false
})
}
/**
* toolbarEvent
* @param {Line}
*/
JSP.prototype.handleEventLine = function (is) {
const wDom = $('.w')
this.setConfig({
isAttachment: is
})
is ? wDom.addClass('jtk-ep') : wDom.removeClass('jtk-ep')
}
/**
* toolbarEvent
* @param {Remove}
*/
JSP.prototype.handleEventRemove = function () {
const $id = this.selectedElement.id || null
const $connect = this.selectedElement.connect || null
if ($id) {
this.removeNodes(this.selectedElement.id)
} else {
this.removeConnect($connect)
}
// Monitor whether to edit DAG
store.commit('dag/setIsEditDag', true)
}
/**
* Delete node
*/
JSP.prototype.removeNodes = function ($id) {
// Delete node processing(data-targetarr)
_.map(tasksAll(), v => {
const targetarr = v.targetarr.split(',')
if (targetarr.length) {
const newArr = _.filter(targetarr, v1 => v1 !== $id)
$(`#${v.id}`).attr('data-targetarr', newArr.toString())
}
})
// delete node
this.JspInstance.remove($id)
// delete dom
$(`#${$id}`).remove()
// callback onRemoveNodes event
this.options && this.options.onRemoveNodes && this.options.onRemoveNodes($id)
}
/**
* Delete connection
*/
JSP.prototype.removeConnect = function ($connect) {
if (!$connect) {
return
}
// Remove connections and remove node and node dependencies
const targetId = $connect.targetId
const sourceId = $connect.sourceId
let targetarr = rtTargetarrArr(targetId)
if (targetarr.length) {
targetarr = _.filter(targetarr, v => v !== sourceId)
$(`#${targetId}`).attr('data-targetarr', targetarr.toString())
}
if ($(`#${sourceId}`).attr('data-tasks-type') === 'CONDITIONS') {
$(`#${sourceId}`).attr('data-nodenumber', Number($(`#${sourceId}`).attr('data-nodenumber')) - 1)
}
this.JspInstance.deleteConnection($connect)
this.selectedElement = {}
}
/**
* Copy node
*/
JSP.prototype.copyNodes = function ($id) {
let newNodeInfo = _.cloneDeep(_.find(store.state.dag.tasks, v => v.id === $id))
const newNodePors = store.state.dag.locations[$id]
// Unstored nodes do not allow replication
if (!newNodePors) {
return
}
// Generate random id
const newUuId = `${uuid() + uuid()}`
const id = newNodeInfo.id.length > 8 ? newNodeInfo.id.substr(0, 7) : newNodeInfo.id
const name = newNodeInfo.name.length > 8 ? newNodeInfo.name.substr(0, 7) : newNodeInfo.name
// new id
const newId = `${id || ''}-${newUuId}`
// new name
const newName = `${name || ''}-${newUuId}`
// coordinate x
const newX = newNodePors.x + 100
// coordinate y
const newY = newNodePors.y + 40
// Generate template node
$('#canvas').append(rtTasksTpl({
id: newId,
name: newName,
x: newX,
y: newY,
isAttachment: this.config.isAttachment,
taskType: newNodeInfo.type
}))
// Get the generated node
const thisDom = jsPlumb.getSelector('.statemachine-demo .w')
// Copy node information
newNodeInfo = Object.assign(newNodeInfo, {
id: newId,
name: newName
})
// Add new node
store.commit('dag/addTasks', newNodeInfo)
// Add node location information
store.commit('dag/addLocations', {
[newId]: {
name: newName,
targetarr: '',
nodenumber: 0,
x: newX,
y: newY
}
})
// Generating a connection node
this.JspInstance.batch(() => {
this.initNode(thisDom[thisDom.length - 1])
// Add events to nodes
this.tasksEvent(newId)
})
}
/**
* toolbarEvent
* @param {Screen}
*/
JSP.prototype.handleEventScreen = function ({ item, is }) {
let screenOpen = true
if (is) {
item.icon = 'ans-icon-min'
screenOpen = true
} else {
item.icon = 'ans-icon-max'
screenOpen = false
}
const $mainLayoutModel = $('.main-layout-model')
if (screenOpen) {
$mainLayoutModel.addClass('dag-screen')
} else {
$mainLayoutModel.removeClass('dag-screen')
}
}
/**
* save task
* @param tasks
* @param locations
* @param connects
*/
JSP.prototype.saveStore = function () {
return new Promise((resolve, reject) => {
const connects = []
const locations = {}
const tasks = []
const is = (id) => {
return !!_.filter(tasksAll(), v => v.id === id).length
}
// task
_.map(_.cloneDeep(store.state.dag.tasks), v => {
if (is(v.id)) {
const preTasks = []
const id = $(`#${v.id}`)
const tar = id.attr('data-targetarr')
const idDep = tar ? id.attr('data-targetarr').split(',') : []
if (idDep.length) {
_.map(idDep, v1 => {
preTasks.push($(`#${v1}`).find('.name-p').text())
})
}
let tasksParam = _.assign(v, {
preTasks: preTasks
})
// Sub-workflow has no retries and interval
if (v.type === 'SUB_PROCESS') {
tasksParam = _.omit(tasksParam, ['maxRetryTimes', 'retryInterval'])
}
tasks.push(tasksParam)
}
})
_.map(this.JspInstance.getConnections(), v => {
connects.push({
endPointSourceId: v.sourceId,
endPointTargetId: v.targetId
})
})
_.map(tasksAll(), v => {
locations[v.id] = {
name: v.name,
targetarr: v.targetarr,
nodenumber: v.nodenumber,
x: v.x,
y: v.y
}
})
// Storage node
store.commit('dag/setTasks', tasks)
// Store coordinate information
store.commit('dag/setLocations', locations)
// Storage line dependence
store.commit('dag/setConnects', connects)
resolve({
connects: connects,
tasks: tasks,
locations: locations
})
})
}
/**
* Event processing
*/
JSP.prototype.handleEvent = function () {
this.JspInstance.bind('beforeDrop', function (info) {
const sourceId = info.sourceId// 出
const targetId = info.targetId// 入
/**
* Recursive search for nodes
*/
let recursiveVal
const recursiveTargetarr = (arr, targetId) => {
for (const i in arr) {
if (arr[i] === targetId) {
recursiveVal = targetId
} else {
recursiveTargetarr(rtTargetarrArr(arr[i]), targetId)
}
}
return recursiveVal
}
// Connection to connected nodes is not allowed
if (_.findIndex(rtTargetarrArr(targetId), v => v === sourceId) !== -1) {
return false
}
// Recursive form to find if the target Targetarr has a sourceId
if (recursiveTargetarr(rtTargetarrArr(sourceId), targetId)) {
return false
}
if ($(`#${sourceId}`).attr('data-tasks-type') === 'CONDITIONS' && $(`#${sourceId}`).attr('data-nodenumber') === 2) {
return false
} else {
$(`#${sourceId}`).attr('data-nodenumber', Number($(`#${sourceId}`).attr('data-nodenumber')) + 1)
}
// Storage node dependency information
saveTargetarr(sourceId, targetId)
// Monitor whether to edit DAG
store.commit('dag/setIsEditDag', true)
return true
})
}
/**
* Backfill data processing
*/
JSP.prototype.jspBackfill = function ({ connects, locations, largeJson }) {
// Backfill nodes
this.jsonHandle({
largeJson: largeJson,
locations: locations
})
const wNodes = jsPlumb.getSelector('.statemachine-demo .w')
// Backfill line
this.JspInstance.batch(() => {
for (let i = 0; i < wNodes.length; i++) {
this.initNode(wNodes[i])
}
_.map(connects, v => {
let sourceId = v.endPointSourceId.split('-')
let targetId = v.endPointTargetId.split('-')
if (sourceId.length === 4 && targetId.length === 4) {
sourceId = `${sourceId[0]}-${sourceId[1]}-${sourceId[2]}`
targetId = `${targetId[0]}-${targetId[1]}-${targetId[2]}`
} else {
sourceId = v.endPointSourceId
targetId = v.endPointTargetId
}
if($(`#${sourceId}`).attr('data-tasks-type') === 'CONDITIONS' && $(`#${sourceId}`).attr('data-successnode') === $(`#${targetId}`).find('.name-p').text()) {
this.JspInstance.connect({
source: sourceId,
target: targetId,
type: 'basic',
paintStyle: { strokeWidth: 2, stroke: '#4caf50' },
HoverPaintStyle: {stroke: '#ccc', strokeWidth: 3},
overlays:[["Label", { label: i18n.$t('success'), location:0.5, id:"label"} ]]
})
} else if($(`#${sourceId}`).attr('data-tasks-type') === 'CONDITIONS' && $(`#${sourceId}`).attr('data-failednode') === $(`#${targetId}`).find('.name-p').text()) {
this.JspInstance.connect({
source: sourceId,
target: targetId,
type: 'basic',
paintStyle: { strokeWidth: 2, stroke: '#252d39' },
HoverPaintStyle: {stroke: '#ccc', strokeWidth: 3},
overlays:[["Label", { label: i18n.$t('failed'), location:0.5, id:"label"} ]]
})
} else {
this.JspInstance.connect({
source: sourceId,
target: targetId,
type: 'basic',
paintStyle: { strokeWidth: 2, stroke: '#2d8cf0' },
HoverPaintStyle: {stroke: '#ccc', strokeWidth: 3}
})
}
})
})
jsPlumb.fire('jsPlumbDemoLoaded', this.JspInstance)
// Connection monitoring
this.handleEvent()
// Drag and drop new nodes
this.draggable()
}
export default new JSP()
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,085 | [BUG] after zoom or drag the dag view, cant plcae node correctly and may be disappear | **Describe the bug**
After zoom or drag the dag view, cant plcae node correctly and may be disappear
**To Reproduce**
1. Go to 'project detail dag'
2. Scroll middle mouse button, zoom nodes
3. Drag new node into contaiment. (close to sidebar )
4. See error, the new node diappear, or it cant place what we want.
**Expected behavior**
When we drag new node into contaiment, it should not disappear. Because we do the correct thing, but something wrong happen.
**Screenshots**
![no zoom](https://user-images.githubusercontent.com/40662353/86087052-d0acb000-bad5-11ea-827e-de9316d989a1.png)
![after zoom](https://user-images.githubusercontent.com/40662353/86087169-1a959600-bad6-11ea-858a-b9fc00cc74f0.png)
**Which version of Dolphin Scheduler:**
-[1.2.0 to latest version ]
**Additional context**
After zooming (or drag #canvas element), the area of the canvas and the outer area do not fit, causing the drag to fail.
I try to fix this problem. There has two ways.
1. use jsplumb nativie method to support zoom.
2. modify dragZoom.js code, change the zoom beahavior.
I choose 2, I think this way lead to increased code complexity. Any suggetions? Should I create a PR?
| https://github.com/apache/dolphinscheduler/issues/3085 | https://github.com/apache/dolphinscheduler/pull/3103 | b0a7bb4d9c1fbeaa96b3aff8d2ab928a4c6c6c15 | 631cc6ea1fbc95e55597982f3c5a23d1f3751327 | "2020-06-30T05:34:19Z" | java | "2020-07-02T06:19:04Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/util.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import _ from 'lodash'
import i18n from '@/module/i18n'
import store from '@/conf/home/store'
/**
* Node, to array
*/
const rtTargetarrArr = (id) => {
const ids = $(`#${id}`).attr('data-targetarr')
return ids ? ids.split(',') : []
}
/**
* Store node id to targetarr
*/
const saveTargetarr = (valId, domId) => {
const $target = $(`#${domId}`)
const targetStr = $target.attr('data-targetarr') ? $target.attr('data-targetarr') + `,${valId}` : `${valId}`
$target.attr('data-targetarr', targetStr)
}
const rtBantpl = () => {
return `<em class="ans-icon-forbidden" data-toggle="tooltip" data-html="true" data-container="body" data-placement="left" title="${i18n.$t('Prohibition execution')}"></em>`
}
/**
* return node html
*/
const rtTasksTpl = ({ id, name, x, y, targetarr, isAttachment, taskType, runFlag, nodenumber, successNode, failedNode }) => {
let tpl = ''
tpl += `<div class="w jtk-draggable jtk-droppable jtk-endpoint-anchor jtk-connected ${isAttachment ? 'jtk-ep' : ''}" data-targetarr="${targetarr || ''}" data-successNode="${successNode || ''}" data-failedNode="${failedNode || ''}" data-nodenumber="${nodenumber || 0}" data-tasks-type="${taskType}" id="${id}" style="left: ${x}px; top: ${y}px;">`
tpl += '<div>'
tpl += '<div class="state-p"></div>'
tpl += `<div class="icos icos-${taskType}"></div>`
tpl += `<span class="name-p">${name}</span>`
tpl += '</div>'
tpl += '<div class="ep"></div>'
tpl += '<div class="ban-p">'
if (runFlag === 'FORBIDDEN') {
tpl += rtBantpl()
}
tpl += '</div>'
tpl += '</div>'
return tpl
}
/**
* Get all tasks nodes
*/
const tasksAll = () => {
const a = []
$('#canvas .w').each(function (idx, elem) {
const e = $(elem)
a.push({
id: e.attr('id'),
name: e.find('.name-p').text(),
targetarr: e.attr('data-targetarr') || '',
nodenumber: e.attr('data-nodenumber'),
x: parseInt(e.css('left'), 10),
y: parseInt(e.css('top'), 10)
})
})
return a
}
/**
* Determine if name is in the current dag map
* rely dom / backfill
*/
const isNameExDag = (name, rely) => {
if (rely === 'dom') {
return _.findIndex(tasksAll(), v => v.name === name) !== -1
} else {
return _.findIndex(store.state.dag.tasks, v => v.name === name) !== -1
}
}
/**
* Change svg line color
*/
const setSvgColor = (e, color) => {
// Traverse clear all colors
$('.jtk-connector').each((i, o) => {
_.map($(o)[0].childNodes, v => {
$(v).attr('fill', '#2d8cf0').attr('stroke', '#2d8cf0').attr('stroke-width', 2)
})
})
// Add color to the selection
_.map($(e.canvas)[0].childNodes, (v, i) => {
$(v).attr('fill', color).attr('stroke', color)
if ($(v).attr('class')) {
$(v).attr('stroke-width', 2)
}
})
}
/**
* Get all node ids
*/
const allNodesId = () => {
const idArr = []
$('.w').each((i, o) => {
const $obj = $(o)
const $span = $obj.find('.name-p').text()
if ($span) {
idArr.push({
id: $obj.attr('id'),
name: $span
})
}
})
return idArr
}
export {
rtTargetarrArr,
saveTargetarr,
rtTasksTpl,
tasksAll,
isNameExDag,
setSvgColor,
allNodesId,
rtBantpl
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,157 | [BUG] netty server may start multiple times if multiple threads execute the start method | *For better global communication, please give priority to using English description, thx! *
**Describe the bug**
look at the code of NettyRemotingServer
```java
public void start(){
if(this.isStarted.get()){
return;
}
this.serverBootstrap
.group(this.bossGroup, this.workGroup)
.channel(NioServerSocketChannel.class)
.option(ChannelOption.SO_REUSEADDR, true)
.option(ChannelOption.SO_BACKLOG, serverConfig.getSoBacklog())
.childOption(ChannelOption.SO_KEEPALIVE, serverConfig.isSoKeepalive())
.childOption(ChannelOption.TCP_NODELAY, serverConfig.isTcpNoDelay())
.childOption(ChannelOption.SO_SNDBUF, serverConfig.getSendBufferSize())
.childOption(ChannelOption.SO_RCVBUF, serverConfig.getReceiveBufferSize())
.childHandler(new ChannelInitializer<NioSocketChannel>() {
@Override
protected void initChannel(NioSocketChannel ch) throws Exception {
initNettyChannel(ch);
}
});
ChannelFuture future;
try {
future = serverBootstrap.bind(serverConfig.getListenPort()).sync();
} catch (Exception e) {
logger.error("NettyRemotingServer bind fail {}, exit",e.getMessage(), e);
throw new RuntimeException(String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort()));
}
if (future.isSuccess()) {
logger.info("NettyRemotingServer bind success at port : {}", serverConfig.getListenPort());
} else if (future.cause() != null) {
throw new RuntimeException(String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort()), future.cause());
} else {
throw new RuntimeException(String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort()));
}
//
isStarted.compareAndSet(false, true);
}
```
If multiple threads execute the method at a time, it may start the netty server multiple times.
**To Reproduce**
Steps to reproduce the behavior, for example:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Which version of Dolphin Scheduler:**
-[1.1.0-preview]
**Additional context**
Add any other context about the problem here.
**Requirement or improvement
- Please describe about your requirements or improvement suggestions.
| https://github.com/apache/dolphinscheduler/issues/3157 | https://github.com/apache/dolphinscheduler/pull/3158 | d4d6aded1184b803557f444c42253d02887995b9 | bf3cc0d00ef27eb0523b4542e5905256e369b068 | "2020-07-06T12:12:29Z" | java | "2020-07-07T10:45:20Z" | dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/NettyRemotingServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import org.apache.dolphinscheduler.remote.codec.NettyDecoder;
import org.apache.dolphinscheduler.remote.codec.NettyEncoder;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.config.NettyServerConfig;
import org.apache.dolphinscheduler.remote.handler.NettyServerHandler;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.remote.utils.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
/**
* remoting netty server
*/
public class NettyRemotingServer {
private final Logger logger = LoggerFactory.getLogger(NettyRemotingServer.class);
/**
* server bootstrap
*/
private final ServerBootstrap serverBootstrap = new ServerBootstrap();
/**
* encoder
*/
private final NettyEncoder encoder = new NettyEncoder();
/**
* default executor
*/
private final ExecutorService defaultExecutor = Executors.newFixedThreadPool(Constants.CPUS);
/**
* boss group
*/
private final NioEventLoopGroup bossGroup;
/**
* worker group
*/
private final NioEventLoopGroup workGroup;
/**
* server config
*/
private final NettyServerConfig serverConfig;
/**
* server handler
*/
private final NettyServerHandler serverHandler = new NettyServerHandler(this);
/**
* started flag
*/
private final AtomicBoolean isStarted = new AtomicBoolean(false);
/**
* server init
*
* @param serverConfig server config
*/
public NettyRemotingServer(final NettyServerConfig serverConfig){
this.serverConfig = serverConfig;
this.bossGroup = new NioEventLoopGroup(1, new ThreadFactory() {
private AtomicInteger threadIndex = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, String.format("NettyServerBossThread_%d", this.threadIndex.incrementAndGet()));
}
});
this.workGroup = new NioEventLoopGroup(serverConfig.getWorkerThread(), new ThreadFactory() {
private AtomicInteger threadIndex = new AtomicInteger(0);
@Override
public Thread newThread(Runnable r) {
return new Thread(r, String.format("NettyServerWorkerThread_%d", this.threadIndex.incrementAndGet()));
}
});
}
/**
* server start
*/
public void start(){
if(this.isStarted.get()){
return;
}
this.serverBootstrap
.group(this.bossGroup, this.workGroup)
.channel(NioServerSocketChannel.class)
.option(ChannelOption.SO_REUSEADDR, true)
.option(ChannelOption.SO_BACKLOG, serverConfig.getSoBacklog())
.childOption(ChannelOption.SO_KEEPALIVE, serverConfig.isSoKeepalive())
.childOption(ChannelOption.TCP_NODELAY, serverConfig.isTcpNoDelay())
.childOption(ChannelOption.SO_SNDBUF, serverConfig.getSendBufferSize())
.childOption(ChannelOption.SO_RCVBUF, serverConfig.getReceiveBufferSize())
.childHandler(new ChannelInitializer<NioSocketChannel>() {
@Override
protected void initChannel(NioSocketChannel ch) throws Exception {
initNettyChannel(ch);
}
});
ChannelFuture future;
try {
future = serverBootstrap.bind(serverConfig.getListenPort()).sync();
} catch (Exception e) {
logger.error("NettyRemotingServer bind fail {}, exit",e.getMessage(), e);
throw new RuntimeException(String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort()));
}
if (future.isSuccess()) {
logger.info("NettyRemotingServer bind success at port : {}", serverConfig.getListenPort());
} else if (future.cause() != null) {
throw new RuntimeException(String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort()), future.cause());
} else {
throw new RuntimeException(String.format("NettyRemotingServer bind %s fail", serverConfig.getListenPort()));
}
//
isStarted.compareAndSet(false, true);
}
/**
* init netty channel
* @param ch socket channel
* @throws Exception
*/
private void initNettyChannel(NioSocketChannel ch) throws Exception{
ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast("encoder", encoder);
pipeline.addLast("decoder", new NettyDecoder());
pipeline.addLast("handler", serverHandler);
}
/**
* register processor
* @param commandType command type
* @param processor processor
*/
public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor) {
this.registerProcessor(commandType, processor, null);
}
/**
* register processor
*
* @param commandType command type
* @param processor processor
* @param executor thread executor
*/
public void registerProcessor(final CommandType commandType, final NettyRequestProcessor processor, final ExecutorService executor) {
this.serverHandler.registerProcessor(commandType, processor, executor);
}
/**
* get default thread executor
* @return thread executor
*/
public ExecutorService getDefaultExecutor() {
return defaultExecutor;
}
public void close() {
if(isStarted.compareAndSet(true, false)){
try {
if(bossGroup != null){
this.bossGroup.shutdownGracefully();
}
if(workGroup != null){
this.workGroup.shutdownGracefully();
}
defaultExecutor.shutdown();
} catch (Exception ex) {
logger.error("netty server close exception", ex);
}
logger.info("netty server closed");
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,165 | config resource.storage.type=hdfs throw exception need config resource.storage.type=HDFS | when i config common.properties ,
resource.storage.type=hdfs
will throw exception
![image](https://user-images.githubusercontent.com/59079269/86872565-3cfb6500-c10f-11ea-802b-21fdf1003db7.png)
because Enumerated type comparisons do not ignore case。 | https://github.com/apache/dolphinscheduler/issues/3165 | https://github.com/apache/dolphinscheduler/pull/3166 | ae902e2308b60f57010918bbce67703def10a02f | a23a3e2d1254acf1d6c194f4996824e3f290cae1 | "2020-07-08T03:37:44Z" | java | "2020-07-09T10:28:04Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ResUploadType;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import static org.apache.dolphinscheduler.common.Constants.COMMON_PROPERTIES_PATH;
/**
* property utils
* single instance
*/
public class PropertyUtils {
/**
* logger
*/
private static final Logger logger = LoggerFactory.getLogger(PropertyUtils.class);
private static final Properties properties = new Properties();
private PropertyUtils() {
throw new IllegalStateException("PropertyUtils class");
}
static {
String[] propertyFiles = new String[]{COMMON_PROPERTIES_PATH};
for (String fileName : propertyFiles) {
InputStream fis = null;
try {
fis = PropertyUtils.class.getResourceAsStream(fileName);
properties.load(fis);
} catch (IOException e) {
logger.error(e.getMessage(), e);
if (fis != null) {
IOUtils.closeQuietly(fis);
}
System.exit(1);
} finally {
IOUtils.closeQuietly(fis);
}
}
}
/**
*
* @return judge whether resource upload startup
*/
public static Boolean getResUploadStartupState(){
String resUploadStartupType = PropertyUtils.getString(Constants.RESOURCE_STORAGE_TYPE);
ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType);
return resUploadType == ResUploadType.HDFS || resUploadType == ResUploadType.S3;
}
/**
* get property value
*
* @param key property name
* @return property value
*/
public static String getString(String key) {
return properties.getProperty(key.trim());
}
/**
* get property value
*
* @param key property name
* @param defaultVal default value
* @return property value
*/
public static String getString(String key, String defaultVal) {
String val = properties.getProperty(key.trim());
return val == null ? defaultVal : val;
}
/**
* get property value
*
* @param key property name
* @return get property int value , if key == null, then return -1
*/
public static int getInt(String key) {
return getInt(key, -1);
}
/**
*
* @param key key
* @param defaultValue default value
* @return property value
*/
public static int getInt(String key, int defaultValue) {
String value = getString(key);
if (value == null) {
return defaultValue;
}
try {
return Integer.parseInt(value);
} catch (NumberFormatException e) {
logger.info(e.getMessage(),e);
}
return defaultValue;
}
/**
* get property value
*
* @param key property name
* @return property value
*/
public static boolean getBoolean(String key) {
String value = properties.getProperty(key.trim());
if(null != value){
return Boolean.parseBoolean(value);
}
return false;
}
/**
* get property value
*
* @param key property name
* @param defaultValue default value
* @return property value
*/
public static Boolean getBoolean(String key, boolean defaultValue) {
String value = properties.getProperty(key.trim());
if(null != value){
return Boolean.parseBoolean(value);
}
return defaultValue;
}
/**
* get property long value
* @param key key
* @param defaultVal default value
* @return property value
*/
public static long getLong(String key, long defaultVal) {
String val = getString(key);
return val == null ? defaultVal : Long.parseLong(val);
}
/**
*
* @param key key
* @return property value
*/
public static long getLong(String key) {
return getLong(key,-1);
}
/**
*
* @param key key
* @param defaultVal default value
* @return property value
*/
public double getDouble(String key, double defaultVal) {
String val = getString(key);
return val == null ? defaultVal : Double.parseDouble(val);
}
/**
* get array
* @param key property name
* @param splitStr separator
* @return property value through array
*/
public static String[] getArray(String key, String splitStr) {
String value = getString(key);
if (value == null) {
return new String[0];
}
try {
String[] propertyArray = value.split(splitStr);
return propertyArray;
} catch (NumberFormatException e) {
logger.info(e.getMessage(),e);
}
return new String[0];
}
/**
*
* @param key key
* @param type type
* @param defaultValue default value
* @param <T> T
* @return get enum value
*/
public <T extends Enum<T>> T getEnum(String key, Class<T> type,
T defaultValue) {
String val = getString(key);
return val == null ? defaultValue : Enum.valueOf(type, val);
}
/**
* get all properties with specified prefix, like: fs.
* @param prefix prefix to search
* @return all properties with specified prefix
*/
public static Map<String, String> getPrefixedProperties(String prefix) {
Map<String, String> matchedProperties = new HashMap<>();
for (String propName : properties.stringPropertyNames()) {
if (propName.startsWith(prefix)) {
matchedProperties.put(propName, properties.getProperty(propName));
}
}
return matchedProperties;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,176 | [BUG] optimize #3165 Gets the value of this property “resource.storage.type”, Comparison with enumerated types | By looking at the ResourcesService code, I found a potential problem.
Enumeration type comparisons are used in both classes :HadoopUtils.java ,CommonUtils.java
I don't think this [submission](https://github.com/apache/incubator-dolphinscheduler/pull/3166) is perfect
All comparisons of ResUploadType need to be optimized.
I'm going to modify this part
中文:
通过阅读ResourcesService代码,发现了潜在隐患。
在HadoopUtils和CommonUtils,资源中心类型,都是通过枚举类型比较。
所以感觉到[submission](https://github.com/apache/incubator-dolphinscheduler/pull/3166) 这个提交是不完备的。
需要优化所有的ResUploadType类型比较部分的代码。
| https://github.com/apache/dolphinscheduler/issues/3176 | https://github.com/apache/dolphinscheduler/pull/3178 | 1eb8fb6db33af4b644492a9fb0b0348d7de14407 | 1e7582e910c23a42bb4e92cd64fde4df7cbf6b34 | "2020-07-10T06:05:07Z" | java | "2020-07-10T07:21:42Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CommonUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ResUploadType;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.net.URL;
/**
* common utils
*/
public class CommonUtils {
private static final Logger logger = LoggerFactory.getLogger(CommonUtils.class);
private CommonUtils() {
throw new IllegalStateException("CommonUtils class");
}
/**
* @return get the path of system environment variables
*/
public static String getSystemEnvPath() {
String envPath = PropertyUtils.getString(Constants.DOLPHINSCHEDULER_ENV_PATH);
if (StringUtils.isEmpty(envPath)) {
URL envDefaultPath = CommonUtils.class.getClassLoader().getResource(Constants.ENV_PATH);
if (envDefaultPath != null){
envPath = envDefaultPath.getPath();
logger.debug("env path :{}", envPath);
}else{
envPath = "/etc/profile";
}
}
return envPath;
}
/**
*
* @return is develop mode
*/
public static boolean isDevelopMode() {
return PropertyUtils.getBoolean(Constants.DEVELOPMENT_STATE, true);
}
/**
* if upload resource is HDFS and kerberos startup is true , else false
* @return true if upload resource is HDFS and kerberos startup
*/
public static boolean getKerberosStartupState(){
String resUploadStartupType = PropertyUtils.getString(Constants.RESOURCE_STORAGE_TYPE);
ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType);
Boolean kerberosStartupState = PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE,false);
return resUploadType == ResUploadType.HDFS && kerberosStartupState;
}
/**
* load kerberos configuration
* @throws Exception errors
*/
public static void loadKerberosConf()throws Exception{
if (CommonUtils.getKerberosStartupState()) {
System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF, PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH));
Configuration configuration = new Configuration();
configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION, Constants.KERBEROS);
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME),
PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH));
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,176 | [BUG] optimize #3165 Gets the value of this property “resource.storage.type”, Comparison with enumerated types | By looking at the ResourcesService code, I found a potential problem.
Enumeration type comparisons are used in both classes :HadoopUtils.java ,CommonUtils.java
I don't think this [submission](https://github.com/apache/incubator-dolphinscheduler/pull/3166) is perfect
All comparisons of ResUploadType need to be optimized.
I'm going to modify this part
中文:
通过阅读ResourcesService代码,发现了潜在隐患。
在HadoopUtils和CommonUtils,资源中心类型,都是通过枚举类型比较。
所以感觉到[submission](https://github.com/apache/incubator-dolphinscheduler/pull/3166) 这个提交是不完备的。
需要优化所有的ResUploadType类型比较部分的代码。
| https://github.com/apache/dolphinscheduler/issues/3176 | https://github.com/apache/dolphinscheduler/pull/3178 | 1eb8fb6db33af4b644492a9fb0b0348d7de14407 | 1e7582e910c23a42bb4e92cd64fde4df7cbf6b34 | "2020-07-10T06:05:07Z" | java | "2020-07-10T07:21:42Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import org.apache.commons.io.IOUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.ResUploadType;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.client.cli.RMAdminCLI;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.nio.file.Files;
import java.security.PrivilegedExceptionAction;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.apache.dolphinscheduler.common.Constants.RESOURCE_UPLOAD_PATH;
/**
* hadoop utils
* single instance
*/
public class HadoopUtils implements Closeable {
private static final Logger logger = LoggerFactory.getLogger(HadoopUtils.class);
private static String hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER);
public static final String resourceUploadPath = PropertyUtils.getString(RESOURCE_UPLOAD_PATH, "/dolphinscheduler");
public static final String rmHaIds = PropertyUtils.getString(Constants.YARN_RESOURCEMANAGER_HA_RM_IDS);
public static final String appAddress = PropertyUtils.getString(Constants.YARN_APPLICATION_STATUS_ADDRESS);
public static final String jobHistoryAddress = PropertyUtils.getString(Constants.YARN_JOB_HISTORY_STATUS_ADDRESS);
private static final String HADOOP_UTILS_KEY = "HADOOP_UTILS_KEY";
private static final LoadingCache<String, HadoopUtils> cache = CacheBuilder
.newBuilder()
.expireAfterWrite(PropertyUtils.getInt(Constants.KERBEROS_EXPIRE_TIME, 2), TimeUnit.HOURS)
.build(new CacheLoader<String, HadoopUtils>() {
@Override
public HadoopUtils load(String key) throws Exception {
return new HadoopUtils();
}
});
private static volatile boolean yarnEnabled = false;
private Configuration configuration;
private FileSystem fs;
private HadoopUtils() {
init();
initHdfsPath();
}
public static HadoopUtils getInstance() {
return cache.getUnchecked(HADOOP_UTILS_KEY);
}
/**
* init dolphinscheduler root path in hdfs
*/
private void initHdfsPath() {
Path path = new Path(resourceUploadPath);
try {
if (!fs.exists(path)) {
fs.mkdirs(path);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
/**
* init hadoop configuration
*/
private void init() {
try {
configuration = new Configuration();
String resourceStorageType = PropertyUtils.getString(Constants.RESOURCE_STORAGE_TYPE);
ResUploadType resUploadType = ResUploadType.valueOf(resourceStorageType);
if (resUploadType == ResUploadType.HDFS) {
if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)) {
System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF,
PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH));
configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
hdfsUser = "";
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME),
PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH));
}
String defaultFS = configuration.get(Constants.FS_DEFAULTFS);
//first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file
// the default is the local file system
if (defaultFS.startsWith("file")) {
String defaultFSProp = PropertyUtils.getString(Constants.FS_DEFAULTFS);
if (StringUtils.isNotBlank(defaultFSProp)) {
Map<String, String> fsRelatedProps = PropertyUtils.getPrefixedProperties("fs.");
configuration.set(Constants.FS_DEFAULTFS, defaultFSProp);
fsRelatedProps.forEach((key, value) -> configuration.set(key, value));
} else {
logger.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULTFS);
throw new RuntimeException(
String.format("property: %s can not to be empty, please set!", Constants.FS_DEFAULTFS)
);
}
} else {
logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULTFS, defaultFS);
}
if (fs == null) {
if (StringUtils.isNotEmpty(hdfsUser)) {
UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser);
ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
@Override
public Boolean run() throws Exception {
fs = FileSystem.get(configuration);
return true;
}
});
} else {
logger.warn("hdfs.root.user is not set value!");
fs = FileSystem.get(configuration);
}
}
} else if (resUploadType == ResUploadType.S3) {
configuration.set(Constants.FS_DEFAULTFS, PropertyUtils.getString(Constants.FS_DEFAULTFS));
configuration.set(Constants.FS_S3A_ENDPOINT, PropertyUtils.getString(Constants.FS_S3A_ENDPOINT));
configuration.set(Constants.FS_S3A_ACCESS_KEY, PropertyUtils.getString(Constants.FS_S3A_ACCESS_KEY));
configuration.set(Constants.FS_S3A_SECRET_KEY, PropertyUtils.getString(Constants.FS_S3A_SECRET_KEY));
fs = FileSystem.get(configuration);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
/**
* @return Configuration
*/
public Configuration getConfiguration() {
return configuration;
}
/**
* get application url
*
* @param applicationId application id
* @return url of application
*/
public String getApplicationUrl(String applicationId) throws Exception {
/**
* if rmHaIds contains xx, it signs not use resourcemanager
* otherwise:
* if rmHaIds is empty, single resourcemanager enabled
* if rmHaIds not empty: resourcemanager HA enabled
*/
String appUrl = "";
if (StringUtils.isEmpty(rmHaIds)){
//single resourcemanager enabled
appUrl = appAddress;
yarnEnabled = true;
} else {
//resourcemanager HA enabled
appUrl = getAppAddress(appAddress, rmHaIds);
yarnEnabled = true;
logger.info("application url : {}", appUrl);
}
if(StringUtils.isBlank(appUrl)){
throw new Exception("application url is blank");
}
return String.format(appUrl, applicationId);
}
public String getJobHistoryUrl(String applicationId) {
//eg:application_1587475402360_712719 -> job_1587475402360_712719
String jobId = applicationId.replace("application", "job");
return String.format(jobHistoryAddress, jobId);
}
/**
* cat file on hdfs
*
* @param hdfsFilePath hdfs file path
* @return byte[] byte array
* @throws IOException errors
*/
public byte[] catFile(String hdfsFilePath) throws IOException {
if (StringUtils.isBlank(hdfsFilePath)) {
logger.error("hdfs file path:{} is blank", hdfsFilePath);
return new byte[0];
}
FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath));
return IOUtils.toByteArray(fsDataInputStream);
}
/**
* cat file on hdfs
*
* @param hdfsFilePath hdfs file path
* @param skipLineNums skip line numbers
* @param limit read how many lines
* @return content of file
* @throws IOException errors
*/
public List<String> catFile(String hdfsFilePath, int skipLineNums, int limit) throws IOException {
if (StringUtils.isBlank(hdfsFilePath)) {
logger.error("hdfs file path:{} is blank", hdfsFilePath);
return Collections.emptyList();
}
try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))) {
BufferedReader br = new BufferedReader(new InputStreamReader(in));
Stream<String> stream = br.lines().skip(skipLineNums).limit(limit);
return stream.collect(Collectors.toList());
}
}
/**
* make the given file and all non-existent parents into
* directories. Has the semantics of Unix 'mkdir -p'.
* Existence of the directory hierarchy is not an error.
*
* @param hdfsPath path to create
* @return mkdir result
* @throws IOException errors
*/
public boolean mkdir(String hdfsPath) throws IOException {
return fs.mkdirs(new Path(hdfsPath));
}
/**
* copy files between FileSystems
*
* @param srcPath source hdfs path
* @param dstPath destination hdfs path
* @param deleteSource whether to delete the src
* @param overwrite whether to overwrite an existing file
* @return if success or not
* @throws IOException errors
*/
public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException {
return FileUtil.copy(fs, new Path(srcPath), fs, new Path(dstPath), deleteSource, overwrite, fs.getConf());
}
/**
* the src file is on the local disk. Add it to FS at
* the given dst name.
*
* @param srcFile local file
* @param dstHdfsPath destination hdfs path
* @param deleteSource whether to delete the src
* @param overwrite whether to overwrite an existing file
* @return if success or not
* @throws IOException errors
*/
public boolean copyLocalToHdfs(String srcFile, String dstHdfsPath, boolean deleteSource, boolean overwrite) throws IOException {
Path srcPath = new Path(srcFile);
Path dstPath = new Path(dstHdfsPath);
fs.copyFromLocalFile(deleteSource, overwrite, srcPath, dstPath);
return true;
}
/**
* copy hdfs file to local
*
* @param srcHdfsFilePath source hdfs file path
* @param dstFile destination file
* @param deleteSource delete source
* @param overwrite overwrite
* @return result of copy hdfs file to local
* @throws IOException errors
*/
public boolean copyHdfsToLocal(String srcHdfsFilePath, String dstFile, boolean deleteSource, boolean overwrite) throws IOException {
Path srcPath = new Path(srcHdfsFilePath);
File dstPath = new File(dstFile);
if (dstPath.exists()) {
if (dstPath.isFile()) {
if (overwrite) {
Files.delete(dstPath.toPath());
}
} else {
logger.error("destination file must be a file");
}
}
if (!dstPath.getParentFile().exists()) {
dstPath.getParentFile().mkdirs();
}
return FileUtil.copy(fs, srcPath, dstPath, deleteSource, fs.getConf());
}
/**
* delete a file
*
* @param hdfsFilePath the path to delete.
* @param recursive if path is a directory and set to
* true, the directory is deleted else throws an exception. In
* case of a file the recursive can be set to either true or false.
* @return true if delete is successful else false.
* @throws IOException errors
*/
public boolean delete(String hdfsFilePath, boolean recursive) throws IOException {
return fs.delete(new Path(hdfsFilePath), recursive);
}
/**
* check if exists
*
* @param hdfsFilePath source file path
* @return result of exists or not
* @throws IOException errors
*/
public boolean exists(String hdfsFilePath) throws IOException {
return fs.exists(new Path(hdfsFilePath));
}
/**
* Gets a list of files in the directory
*
* @param filePath file path
* @return {@link FileStatus} file status
* @throws Exception errors
*/
public FileStatus[] listFileStatus(String filePath) throws Exception {
try {
return fs.listStatus(new Path(filePath));
} catch (IOException e) {
logger.error("Get file list exception", e);
throw new Exception("Get file list exception", e);
}
}
/**
* Renames Path src to Path dst. Can take place on local fs
* or remote DFS.
*
* @param src path to be renamed
* @param dst new path after rename
* @return true if rename is successful
* @throws IOException on failure
*/
public boolean rename(String src, String dst) throws IOException {
return fs.rename(new Path(src), new Path(dst));
}
/**
* hadoop resourcemanager enabled or not
*
* @return result
*/
public boolean isYarnEnabled() {
return yarnEnabled;
}
/**
* get the state of an application
*
* @param applicationId application id
* @return the return may be null or there may be other parse exceptions
*/
public ExecutionStatus getApplicationStatus(String applicationId) throws Exception{
if (StringUtils.isEmpty(applicationId)) {
return null;
}
String result = Constants.FAILED;
String applicationUrl = getApplicationUrl(applicationId);
logger.info("applicationUrl={}", applicationUrl);
String responseContent = HttpUtils.get(applicationUrl);
if (responseContent != null) {
ObjectNode jsonObject = JSONUtils.parseObject(responseContent);
result = jsonObject.path("app").path("finalStatus").asText();
} else {
//may be in job history
String jobHistoryUrl = getJobHistoryUrl(applicationId);
logger.info("jobHistoryUrl={}", jobHistoryUrl);
responseContent = HttpUtils.get(jobHistoryUrl);
ObjectNode jsonObject = JSONUtils.parseObject(responseContent);
if (!jsonObject.has("job")){
return ExecutionStatus.FAILURE;
}
result = jsonObject.path("job").path("state").asText();
}
switch (result) {
case Constants.ACCEPTED:
return ExecutionStatus.SUBMITTED_SUCCESS;
case Constants.SUCCEEDED:
return ExecutionStatus.SUCCESS;
case Constants.NEW:
case Constants.NEW_SAVING:
case Constants.SUBMITTED:
case Constants.FAILED:
return ExecutionStatus.FAILURE;
case Constants.KILLED:
return ExecutionStatus.KILL;
case Constants.RUNNING:
default:
return ExecutionStatus.RUNNING_EXEUTION;
}
}
/**
* get data hdfs path
*
* @return data hdfs path
*/
public static String getHdfsDataBasePath() {
if ("/".equals(resourceUploadPath)) {
// if basepath is configured to /, the generated url may be //default/resources (with extra leading /)
return "";
} else {
return resourceUploadPath;
}
}
/**
* hdfs resource dir
*
* @param tenantCode tenant code
* @param resourceType resource type
* @return hdfs resource dir
*/
public static String getHdfsDir(ResourceType resourceType, String tenantCode) {
String hdfsDir = "";
if (resourceType.equals(ResourceType.FILE)) {
hdfsDir = getHdfsResDir(tenantCode);
} else if (resourceType.equals(ResourceType.UDF)) {
hdfsDir = getHdfsUdfDir(tenantCode);
}
return hdfsDir;
}
/**
* hdfs resource dir
*
* @param tenantCode tenant code
* @return hdfs resource dir
*/
public static String getHdfsResDir(String tenantCode) {
return String.format("%s/resources", getHdfsTenantDir(tenantCode));
}
/**
* hdfs user dir
*
* @param tenantCode tenant code
* @param userId user id
* @return hdfs resource dir
*/
public static String getHdfsUserDir(String tenantCode, int userId) {
return String.format("%s/home/%d", getHdfsTenantDir(tenantCode), userId);
}
/**
* hdfs udf dir
*
* @param tenantCode tenant code
* @return get udf dir on hdfs
*/
public static String getHdfsUdfDir(String tenantCode) {
return String.format("%s/udfs", getHdfsTenantDir(tenantCode));
}
/**
* get hdfs file name
*
* @param resourceType resource type
* @param tenantCode tenant code
* @param fileName file name
* @return hdfs file name
*/
public static String getHdfsFileName(ResourceType resourceType, String tenantCode, String fileName) {
if (fileName.startsWith("/")) {
fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsDir(resourceType, tenantCode), fileName);
}
/**
* get absolute path and name for resource file on hdfs
*
* @param tenantCode tenant code
* @param fileName file name
* @return get absolute path and name for file on hdfs
*/
public static String getHdfsResourceFileName(String tenantCode, String fileName) {
if (fileName.startsWith("/")) {
fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsResDir(tenantCode), fileName);
}
/**
* get absolute path and name for udf file on hdfs
*
* @param tenantCode tenant code
* @param fileName file name
* @return get absolute path and name for udf file on hdfs
*/
public static String getHdfsUdfFileName(String tenantCode, String fileName) {
if (fileName.startsWith("/")) {
fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsUdfDir(tenantCode), fileName);
}
/**
* @param tenantCode tenant code
* @return file directory of tenants on hdfs
*/
public static String getHdfsTenantDir(String tenantCode) {
return String.format("%s/%s", getHdfsDataBasePath(), tenantCode);
}
/**
* getAppAddress
*
* @param appAddress app address
* @param rmHa resource manager ha
* @return app address
*/
public static String getAppAddress(String appAddress, String rmHa) {
//get active ResourceManager
String activeRM = YarnHAAdminUtils.getAcitveRMName(rmHa);
String[] split1 = appAddress.split(Constants.DOUBLE_SLASH);
if (split1.length != 2) {
return null;
}
String start = split1[0] + Constants.DOUBLE_SLASH;
String[] split2 = split1[1].split(Constants.COLON);
if (split2.length != 2) {
return null;
}
String end = Constants.COLON + split2[1];
return start + activeRM + end;
}
@Override
public void close() throws IOException {
if (fs != null) {
try {
fs.close();
} catch (IOException e) {
logger.error("Close HadoopUtils instance failed", e);
throw new IOException("Close HadoopUtils instance failed", e);
}
}
}
/**
* yarn ha admin utils
*/
private static final class YarnHAAdminUtils extends RMAdminCLI {
/**
* get active resourcemanager
*
* @param rmIds
* @return
*/
public static String getAcitveRMName(String rmIds) {
String[] rmIdArr = rmIds.split(Constants.COMMA);
int activeResourceManagerPort = PropertyUtils.getInt(Constants.HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT, 8088);
String yarnUrl = "http://%s:" + activeResourceManagerPort + "/ws/v1/cluster/info";
String state = null;
try {
/**
* send http get request to rm1
*/
state = getRMState(String.format(yarnUrl, rmIdArr[0]));
if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) {
return rmIdArr[0];
} else if (Constants.HADOOP_RM_STATE_STANDBY.equals(state)) {
state = getRMState(String.format(yarnUrl, rmIdArr[1]));
if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) {
return rmIdArr[1];
}
} else {
return null;
}
} catch (Exception e) {
state = getRMState(String.format(yarnUrl, rmIdArr[1]));
if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) {
return rmIdArr[0];
}
}
return null;
}
/**
* get ResourceManager state
*
* @param url
* @return
*/
public static String getRMState(String url) {
String retStr = HttpUtils.get(url);
if (StringUtils.isEmpty(retStr)) {
return null;
}
//to json
ObjectNode jsonObject = JSONUtils.parseObject(retStr);
//get ResourceManager state
if (!jsonObject.has("clusterInfo")){
return null;
}
return jsonObject.get("clusterInfo").path("haState").asText();
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,176 | [BUG] optimize #3165 Gets the value of this property “resource.storage.type”, Comparison with enumerated types | By looking at the ResourcesService code, I found a potential problem.
Enumeration type comparisons are used in both classes :HadoopUtils.java ,CommonUtils.java
I don't think this [submission](https://github.com/apache/incubator-dolphinscheduler/pull/3166) is perfect
All comparisons of ResUploadType need to be optimized.
I'm going to modify this part
中文:
通过阅读ResourcesService代码,发现了潜在隐患。
在HadoopUtils和CommonUtils,资源中心类型,都是通过枚举类型比较。
所以感觉到[submission](https://github.com/apache/incubator-dolphinscheduler/pull/3166) 这个提交是不完备的。
需要优化所有的ResUploadType类型比较部分的代码。
| https://github.com/apache/dolphinscheduler/issues/3176 | https://github.com/apache/dolphinscheduler/pull/3178 | 1eb8fb6db33af4b644492a9fb0b0348d7de14407 | 1e7582e910c23a42bb4e92cd64fde4df7cbf6b34 | "2020-07-10T06:05:07Z" | java | "2020-07-10T07:21:42Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/PropertyUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ResUploadType;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import static org.apache.dolphinscheduler.common.Constants.COMMON_PROPERTIES_PATH;
/**
* property utils
* single instance
*/
public class PropertyUtils {
/**
* logger
*/
private static final Logger logger = LoggerFactory.getLogger(PropertyUtils.class);
private static final Properties properties = new Properties();
private PropertyUtils() {
throw new IllegalStateException("PropertyUtils class");
}
static {
String[] propertyFiles = new String[]{COMMON_PROPERTIES_PATH};
for (String fileName : propertyFiles) {
InputStream fis = null;
try {
fis = PropertyUtils.class.getResourceAsStream(fileName);
properties.load(fis);
} catch (IOException e) {
logger.error(e.getMessage(), e);
if (fis != null) {
IOUtils.closeQuietly(fis);
}
System.exit(1);
} finally {
IOUtils.closeQuietly(fis);
}
}
}
/**
*
* @return judge whether resource upload startup
*/
public static Boolean getResUploadStartupState(){
String resUploadStartupType = PropertyUtils.getString(Constants.RESOURCE_STORAGE_TYPE).toUpperCase();
ResUploadType resUploadType = ResUploadType.valueOf(resUploadStartupType);
return resUploadType == ResUploadType.HDFS || resUploadType == ResUploadType.S3;
}
/**
* get property value
*
* @param key property name
* @return property value
*/
public static String getString(String key) {
return properties.getProperty(key.trim());
}
/**
* get property value
*
* @param key property name
* @param defaultVal default value
* @return property value
*/
public static String getString(String key, String defaultVal) {
String val = properties.getProperty(key.trim());
return val == null ? defaultVal : val;
}
/**
* get property value
*
* @param key property name
* @return get property int value , if key == null, then return -1
*/
public static int getInt(String key) {
return getInt(key, -1);
}
/**
*
* @param key key
* @param defaultValue default value
* @return property value
*/
public static int getInt(String key, int defaultValue) {
String value = getString(key);
if (value == null) {
return defaultValue;
}
try {
return Integer.parseInt(value);
} catch (NumberFormatException e) {
logger.info(e.getMessage(),e);
}
return defaultValue;
}
/**
* get property value
*
* @param key property name
* @return property value
*/
public static boolean getBoolean(String key) {
String value = properties.getProperty(key.trim());
if(null != value){
return Boolean.parseBoolean(value);
}
return false;
}
/**
* get property value
*
* @param key property name
* @param defaultValue default value
* @return property value
*/
public static Boolean getBoolean(String key, boolean defaultValue) {
String value = properties.getProperty(key.trim());
if(null != value){
return Boolean.parseBoolean(value);
}
return defaultValue;
}
/**
* get property long value
* @param key key
* @param defaultVal default value
* @return property value
*/
public static long getLong(String key, long defaultVal) {
String val = getString(key);
return val == null ? defaultVal : Long.parseLong(val);
}
/**
*
* @param key key
* @return property value
*/
public static long getLong(String key) {
return getLong(key,-1);
}
/**
*
* @param key key
* @param defaultVal default value
* @return property value
*/
public double getDouble(String key, double defaultVal) {
String val = getString(key);
return val == null ? defaultVal : Double.parseDouble(val);
}
/**
* get array
* @param key property name
* @param splitStr separator
* @return property value through array
*/
public static String[] getArray(String key, String splitStr) {
String value = getString(key);
if (value == null) {
return new String[0];
}
try {
String[] propertyArray = value.split(splitStr);
return propertyArray;
} catch (NumberFormatException e) {
logger.info(e.getMessage(),e);
}
return new String[0];
}
/**
*
* @param key key
* @param type type
* @param defaultValue default value
* @param <T> T
* @return get enum value
*/
public <T extends Enum<T>> T getEnum(String key, Class<T> type,
T defaultValue) {
String val = getString(key);
return val == null ? defaultValue : Enum.valueOf(type, val);
}
/**
* get all properties with specified prefix, like: fs.
* @param prefix prefix to search
* @return all properties with specified prefix
*/
public static Map<String, String> getPrefixedProperties(String prefix) {
Map<String, String> matchedProperties = new HashMap<>();
for (String propName : properties.stringPropertyNames()) {
if (propName.startsWith(prefix)) {
matchedProperties.put(propName, properties.getProperty(propName));
}
}
return matchedProperties;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,181 | [BUG] get http code 400 bad request with AWS S3 as resource storage type | *For better global communication, please give priority to using English description, thx! *
**Describe the bug**
when you use AWS S3 as the resource storage backend you will get a error like:
``` log
Status Code: 400, AWS Service: Amazon S3, AWS Request ID: xxxxxxx, AWS Error Code: null, AWS Error Message: Bad Request
```
**To Reproduce**
Steps to reproduce the behavior, for example:
just set `resource.storage.type=S3` in common.properties and also keep other configuration correct.
**Expected behavior**
the resource centre work fine.
**Screenshots**
when you try to upload a file at resource centre, you will get a error.
**Which version of Dolphin Scheduler:**
-[1.3.1-release]
**Additional context**
it is because of the version of AWS S3 encryption method.
**Requirement or improvement**
I will make a PR for this later.
| https://github.com/apache/dolphinscheduler/issues/3181 | https://github.com/apache/dolphinscheduler/pull/3182 | 1e7582e910c23a42bb4e92cd64fde4df7cbf6b34 | dcdd7dedd06454ed468eae86881b75177261c9e2 | "2020-07-10T11:30:13Z" | java | "2020-07-11T00:56:38Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import java.util.regex.Pattern;
/**
* Constants
*/
public final class Constants {
private Constants() {
throw new IllegalStateException("Constants class");
}
/**
* quartz config
*/
public static final String ORG_QUARTZ_JOBSTORE_DRIVERDELEGATECLASS = "org.quartz.jobStore.driverDelegateClass";
public static final String ORG_QUARTZ_SCHEDULER_INSTANCENAME = "org.quartz.scheduler.instanceName";
public static final String ORG_QUARTZ_SCHEDULER_INSTANCEID = "org.quartz.scheduler.instanceId";
public static final String ORG_QUARTZ_SCHEDULER_MAKESCHEDULERTHREADDAEMON = "org.quartz.scheduler.makeSchedulerThreadDaemon";
public static final String ORG_QUARTZ_JOBSTORE_USEPROPERTIES = "org.quartz.jobStore.useProperties";
public static final String ORG_QUARTZ_THREADPOOL_CLASS = "org.quartz.threadPool.class";
public static final String ORG_QUARTZ_THREADPOOL_THREADCOUNT = "org.quartz.threadPool.threadCount";
public static final String ORG_QUARTZ_THREADPOOL_MAKETHREADSDAEMONS = "org.quartz.threadPool.makeThreadsDaemons";
public static final String ORG_QUARTZ_THREADPOOL_THREADPRIORITY = "org.quartz.threadPool.threadPriority";
public static final String ORG_QUARTZ_JOBSTORE_CLASS = "org.quartz.jobStore.class";
public static final String ORG_QUARTZ_JOBSTORE_TABLEPREFIX = "org.quartz.jobStore.tablePrefix";
public static final String ORG_QUARTZ_JOBSTORE_ISCLUSTERED = "org.quartz.jobStore.isClustered";
public static final String ORG_QUARTZ_JOBSTORE_MISFIRETHRESHOLD = "org.quartz.jobStore.misfireThreshold";
public static final String ORG_QUARTZ_JOBSTORE_CLUSTERCHECKININTERVAL = "org.quartz.jobStore.clusterCheckinInterval";
public static final String ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK = "org.quartz.jobStore.acquireTriggersWithinLock";
public static final String ORG_QUARTZ_JOBSTORE_DATASOURCE = "org.quartz.jobStore.dataSource";
public static final String ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS = "org.quartz.dataSource.myDs.connectionProvider.class";
/**
* quartz config default value
*/
public static final String QUARTZ_TABLE_PREFIX = "QRTZ_";
public static final String QUARTZ_MISFIRETHRESHOLD = "60000";
public static final String QUARTZ_CLUSTERCHECKININTERVAL = "5000";
public static final String QUARTZ_DATASOURCE = "myDs";
public static final String QUARTZ_THREADCOUNT = "25";
public static final String QUARTZ_THREADPRIORITY = "5";
public static final String QUARTZ_INSTANCENAME = "DolphinScheduler";
public static final String QUARTZ_INSTANCEID = "AUTO";
public static final String QUARTZ_ACQUIRETRIGGERSWITHINLOCK = "true";
/**
* common properties path
*/
public static final String COMMON_PROPERTIES_PATH = "/common.properties";
/**
* fs.defaultFS
*/
public static final String FS_DEFAULTFS = "fs.defaultFS";
/**
* fs s3a endpoint
*/
public static final String FS_S3A_ENDPOINT = "fs.s3a.endpoint";
/**
* fs s3a access key
*/
public static final String FS_S3A_ACCESS_KEY = "fs.s3a.access.key";
/**
* fs s3a secret key
*/
public static final String FS_S3A_SECRET_KEY = "fs.s3a.secret.key";
/**
* yarn.resourcemanager.ha.rm.ids
*/
public static final String YARN_RESOURCEMANAGER_HA_RM_IDS = "yarn.resourcemanager.ha.rm.ids";
public static final String YARN_RESOURCEMANAGER_HA_XX = "xx";
/**
* yarn.application.status.address
*/
public static final String YARN_APPLICATION_STATUS_ADDRESS = "yarn.application.status.address";
/**
* yarn.job.history.status.address
*/
public static final String YARN_JOB_HISTORY_STATUS_ADDRESS = "yarn.job.history.status.address";
/**
* hdfs configuration
* hdfs.root.user
*/
public static final String HDFS_ROOT_USER = "hdfs.root.user";
/**
* hdfs/s3 configuration
* resource.upload.path
*/
public static final String RESOURCE_UPLOAD_PATH = "resource.upload.path";
/**
* data basedir path
*/
public static final String DATA_BASEDIR_PATH = "data.basedir.path";
/**
* dolphinscheduler.env.path
*/
public static final String DOLPHINSCHEDULER_ENV_PATH = "dolphinscheduler.env.path";
/**
* environment properties default path
*/
public static final String ENV_PATH = "env/dolphinscheduler_env.sh";
/**
* python home
*/
public static final String PYTHON_HOME="PYTHON_HOME";
/**
* resource.view.suffixs
*/
public static final String RESOURCE_VIEW_SUFFIXS = "resource.view.suffixs";
public static final String RESOURCE_VIEW_SUFFIXS_DEFAULT_VALUE = "txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties";
/**
* development.state
*/
public static final String DEVELOPMENT_STATE = "development.state";
public static final String DEVELOPMENT_STATE_DEFAULT_VALUE = "true";
/**
* string true
*/
public static final String STRING_TRUE = "true";
/**
* string false
*/
public static final String STRING_FALSE = "false";
/**
* resource storage type
*/
public static final String RESOURCE_STORAGE_TYPE = "resource.storage.type";
/**
* MasterServer directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_MASTERS = "/nodes/master";
/**
* WorkerServer directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_WORKERS = "/nodes/worker";
/**
* all servers directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_DEAD_SERVERS = "/dead-servers";
/**
* MasterServer lock directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_MASTERS = "/lock/masters";
/**
* MasterServer failover directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_MASTERS = "/lock/failover/masters";
/**
* WorkerServer failover directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_WORKERS = "/lock/failover/workers";
/**
* MasterServer startup failover runing and fault tolerance process
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS = "/lock/failover/startup-masters";
/**
* comma ,
*/
public static final String COMMA = ",";
/**
* slash /
*/
public static final String SLASH = "/";
/**
* COLON :
*/
public static final String COLON = ":";
/**
* SINGLE_SLASH /
*/
public static final String SINGLE_SLASH = "/";
/**
* DOUBLE_SLASH //
*/
public static final String DOUBLE_SLASH = "//";
/**
* SEMICOLON ;
*/
public static final String SEMICOLON = ";";
/**
* EQUAL SIGN
*/
public static final String EQUAL_SIGN = "=";
/**
* AT SIGN
*/
public static final String AT_SIGN = "@";
public static final String WORKER_MAX_CPULOAD_AVG = "worker.max.cpuload.avg";
public static final String WORKER_RESERVED_MEMORY = "worker.reserved.memory";
public static final String MASTER_MAX_CPULOAD_AVG = "master.max.cpuload.avg";
public static final String MASTER_RESERVED_MEMORY = "master.reserved.memory";
/**
* date format of yyyy-MM-dd HH:mm:ss
*/
public static final String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss";
/**
* date format of yyyyMMddHHmmss
*/
public static final String YYYYMMDDHHMMSS = "yyyyMMddHHmmss";
/**
* http connect time out
*/
public static final int HTTP_CONNECT_TIMEOUT = 60 * 1000;
/**
* http connect request time out
*/
public static final int HTTP_CONNECTION_REQUEST_TIMEOUT = 60 * 1000;
/**
* httpclient soceket time out
*/
public static final int SOCKET_TIMEOUT = 60 * 1000;
/**
* http header
*/
public static final String HTTP_HEADER_UNKNOWN = "unKnown";
/**
* http X-Forwarded-For
*/
public static final String HTTP_X_FORWARDED_FOR = "X-Forwarded-For";
/**
* http X-Real-IP
*/
public static final String HTTP_X_REAL_IP = "X-Real-IP";
/**
* UTF-8
*/
public static final String UTF_8 = "UTF-8";
/**
* user name regex
*/
public static final Pattern REGEX_USER_NAME = Pattern.compile("^[a-zA-Z0-9._-]{3,39}$");
/**
* email regex
*/
public static final Pattern REGEX_MAIL_NAME = Pattern.compile("^([a-z0-9A-Z]+[_|\\-|\\.]?)+[a-z0-9A-Z]@([a-z0-9A-Z]+(-[a-z0-9A-Z]+)?\\.)+[a-zA-Z]{2,}$");
/**
* read permission
*/
public static final int READ_PERMISSION = 2 * 1;
/**
* write permission
*/
public static final int WRITE_PERMISSION = 2 * 2;
/**
* execute permission
*/
public static final int EXECUTE_PERMISSION = 1;
/**
* default admin permission
*/
public static final int DEFAULT_ADMIN_PERMISSION = 7;
/**
* all permissions
*/
public static final int ALL_PERMISSIONS = READ_PERMISSION | WRITE_PERMISSION | EXECUTE_PERMISSION;
/**
* max task timeout
*/
public static final int MAX_TASK_TIMEOUT = 24 * 3600;
/**
* master cpu load
*/
public static final int DEFAULT_MASTER_CPU_LOAD = Runtime.getRuntime().availableProcessors() * 2;
/**
* master reserved memory
*/
public static final double DEFAULT_MASTER_RESERVED_MEMORY = OSUtils.totalMemorySize() / 10;
/**
* worker cpu load
*/
public static final int DEFAULT_WORKER_CPU_LOAD = Runtime.getRuntime().availableProcessors() * 2;
/**
* worker reserved memory
*/
public static final double DEFAULT_WORKER_RESERVED_MEMORY = OSUtils.totalMemorySize() / 10;
/**
* default log cache rows num,output when reach the number
*/
public static final int DEFAULT_LOG_ROWS_NUM = 4 * 16;
/**
* log flush interval?output when reach the interval
*/
public static final int DEFAULT_LOG_FLUSH_INTERVAL = 1000;
/**
* time unit secong to minutes
*/
public static final int SEC_2_MINUTES_TIME_UNIT = 60;
/***
*
* rpc port
*/
public static final int RPC_PORT = 50051;
/**
* forbid running task
*/
public static final String FLOWNODE_RUN_FLAG_FORBIDDEN = "FORBIDDEN";
/**
* datasource configuration path
*/
public static final String DATASOURCE_PROPERTIES = "/datasource.properties";
public static final String TASK_RECORD_URL = "task.record.datasource.url";
public static final String TASK_RECORD_FLAG = "task.record.flag";
public static final String TASK_RECORD_USER = "task.record.datasource.username";
public static final String TASK_RECORD_PWD = "task.record.datasource.password";
public static final String DEFAULT = "Default";
public static final String USER = "user";
public static final String PASSWORD = "password";
public static final String XXXXXX = "******";
public static final String NULL = "NULL";
public static final String THREAD_NAME_MASTER_SERVER = "Master-Server";
public static final String THREAD_NAME_WORKER_SERVER = "Worker-Server";
public static final String TASK_RECORD_TABLE_HIVE_LOG = "eamp_hive_log_hd";
public static final String TASK_RECORD_TABLE_HISTORY_HIVE_LOG = "eamp_hive_hist_log_hd";
/**
* command parameter keys
*/
public static final String CMDPARAM_RECOVER_PROCESS_ID_STRING = "ProcessInstanceId";
public static final String CMDPARAM_RECOVERY_START_NODE_STRING = "StartNodeIdList";
public static final String CMDPARAM_RECOVERY_WAITTING_THREAD = "WaittingThreadInstanceId";
public static final String CMDPARAM_SUB_PROCESS = "processInstanceId";
public static final String CMDPARAM_EMPTY_SUB_PROCESS = "0";
public static final String CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID = "parentProcessInstanceId";
public static final String CMDPARAM_SUB_PROCESS_DEFINE_ID = "processDefinitionId";
public static final String CMDPARAM_START_NODE_NAMES = "StartNodeNameList";
/**
* complement data start date
*/
public static final String CMDPARAM_COMPLEMENT_DATA_START_DATE = "complementStartDate";
/**
* complement data end date
*/
public static final String CMDPARAM_COMPLEMENT_DATA_END_DATE = "complementEndDate";
/**
* hadoop configuration
*/
public static final String HADOOP_RM_STATE_ACTIVE = "ACTIVE";
public static final String HADOOP_RM_STATE_STANDBY = "STANDBY";
public static final String HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT = "resource.manager.httpaddress.port";
/**
* data source config
*/
public static final String SPRING_DATASOURCE_DRIVER_CLASS_NAME = "spring.datasource.driver-class-name";
public static final String SPRING_DATASOURCE_URL = "spring.datasource.url";
public static final String SPRING_DATASOURCE_USERNAME = "spring.datasource.username";
public static final String SPRING_DATASOURCE_PASSWORD = "spring.datasource.password";
public static final String SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT = "spring.datasource.validationQueryTimeout";
public static final String SPRING_DATASOURCE_INITIAL_SIZE = "spring.datasource.initialSize";
public static final String SPRING_DATASOURCE_MIN_IDLE = "spring.datasource.minIdle";
public static final String SPRING_DATASOURCE_MAX_ACTIVE = "spring.datasource.maxActive";
public static final String SPRING_DATASOURCE_MAX_WAIT = "spring.datasource.maxWait";
public static final String SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS = "spring.datasource.timeBetweenEvictionRunsMillis";
public static final String SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS = "spring.datasource.timeBetweenConnectErrorMillis";
public static final String SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS = "spring.datasource.minEvictableIdleTimeMillis";
public static final String SPRING_DATASOURCE_VALIDATION_QUERY = "spring.datasource.validationQuery";
public static final String SPRING_DATASOURCE_TEST_WHILE_IDLE = "spring.datasource.testWhileIdle";
public static final String SPRING_DATASOURCE_TEST_ON_BORROW = "spring.datasource.testOnBorrow";
public static final String SPRING_DATASOURCE_TEST_ON_RETURN = "spring.datasource.testOnReturn";
public static final String SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS = "spring.datasource.poolPreparedStatements";
public static final String SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT = "spring.datasource.defaultAutoCommit";
public static final String SPRING_DATASOURCE_KEEP_ALIVE = "spring.datasource.keepAlive";
public static final String SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE = "spring.datasource.maxPoolPreparedStatementPerConnectionSize";
public static final String DEVELOPMENT = "development";
public static final String QUARTZ_PROPERTIES_PATH = "quartz.properties";
/**
* sleep time
*/
public static final int SLEEP_TIME_MILLIS = 1000;
/**
* heartbeat for zk info length
*/
public static final int HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH = 10;
/**
* hadoop params constant
*/
/**
* jar
*/
public static final String JAR = "jar";
/**
* hadoop
*/
public static final String HADOOP = "hadoop";
/**
* -D parameter
*/
public static final String D = "-D";
/**
* -D mapreduce.job.queuename=ququename
*/
public static final String MR_QUEUE = "mapreduce.job.queuename";
/**
* spark params constant
*/
public static final String MASTER = "--master";
public static final String DEPLOY_MODE = "--deploy-mode";
/**
* --class CLASS_NAME
*/
public static final String MAIN_CLASS = "--class";
/**
* --driver-cores NUM
*/
public static final String DRIVER_CORES = "--driver-cores";
/**
* --driver-memory MEM
*/
public static final String DRIVER_MEMORY = "--driver-memory";
/**
* --num-executors NUM
*/
public static final String NUM_EXECUTORS = "--num-executors";
/**
* --executor-cores NUM
*/
public static final String EXECUTOR_CORES = "--executor-cores";
/**
* --executor-memory MEM
*/
public static final String EXECUTOR_MEMORY = "--executor-memory";
/**
* --queue QUEUE
*/
public static final String SPARK_QUEUE = "--queue";
/**
* --queue --qu
*/
public static final String FLINK_QUEUE = "--qu";
/**
* exit code success
*/
public static final int EXIT_CODE_SUCCESS = 0;
/**
* exit code kill
*/
public static final int EXIT_CODE_KILL = 137;
/**
* exit code failure
*/
public static final int EXIT_CODE_FAILURE = -1;
/**
* date format of yyyyMMdd
*/
public static final String PARAMETER_FORMAT_DATE = "yyyyMMdd";
/**
* date format of yyyyMMddHHmmss
*/
public static final String PARAMETER_FORMAT_TIME = "yyyyMMddHHmmss";
/**
* system date(yyyyMMddHHmmss)
*/
public static final String PARAMETER_DATETIME = "system.datetime";
/**
* system date(yyyymmdd) today
*/
public static final String PARAMETER_CURRENT_DATE = "system.biz.curdate";
/**
* system date(yyyymmdd) yesterday
*/
public static final String PARAMETER_BUSINESS_DATE = "system.biz.date";
/**
* ACCEPTED
*/
public static final String ACCEPTED = "ACCEPTED";
/**
* SUCCEEDED
*/
public static final String SUCCEEDED = "SUCCEEDED";
/**
* NEW
*/
public static final String NEW = "NEW";
/**
* NEW_SAVING
*/
public static final String NEW_SAVING = "NEW_SAVING";
/**
* SUBMITTED
*/
public static final String SUBMITTED = "SUBMITTED";
/**
* FAILED
*/
public static final String FAILED = "FAILED";
/**
* KILLED
*/
public static final String KILLED = "KILLED";
/**
* RUNNING
*/
public static final String RUNNING = "RUNNING";
/**
* underline "_"
*/
public static final String UNDERLINE = "_";
/**
* quartz job prifix
*/
public static final String QUARTZ_JOB_PRIFIX = "job";
/**
* quartz job group prifix
*/
public static final String QUARTZ_JOB_GROUP_PRIFIX = "jobgroup";
/**
* projectId
*/
public static final String PROJECT_ID = "projectId";
/**
* processId
*/
public static final String SCHEDULE_ID = "scheduleId";
/**
* schedule
*/
public static final String SCHEDULE = "schedule";
/**
* application regex
*/
public static final String APPLICATION_REGEX = "application_\\d+_\\d+";
public static final String PID = OSUtils.isWindows() ? "handle" : "pid";
/**
* month_begin
*/
public static final String MONTH_BEGIN = "month_begin";
/**
* add_months
*/
public static final String ADD_MONTHS = "add_months";
/**
* month_end
*/
public static final String MONTH_END = "month_end";
/**
* week_begin
*/
public static final String WEEK_BEGIN = "week_begin";
/**
* week_end
*/
public static final String WEEK_END = "week_end";
/**
* timestamp
*/
public static final String TIMESTAMP = "timestamp";
public static final char SUBTRACT_CHAR = '-';
public static final char ADD_CHAR = '+';
public static final char MULTIPLY_CHAR = '*';
public static final char DIVISION_CHAR = '/';
public static final char LEFT_BRACE_CHAR = '(';
public static final char RIGHT_BRACE_CHAR = ')';
public static final String ADD_STRING = "+";
public static final String MULTIPLY_STRING = "*";
public static final String DIVISION_STRING = "/";
public static final String LEFT_BRACE_STRING = "(";
public static final char P = 'P';
public static final char N = 'N';
public static final String SUBTRACT_STRING = "-";
public static final String GLOBAL_PARAMS = "globalParams";
public static final String LOCAL_PARAMS = "localParams";
public static final String PROCESS_INSTANCE_STATE = "processInstanceState";
public static final String TASK_LIST = "taskList";
public static final String RWXR_XR_X = "rwxr-xr-x";
/**
* master/worker server use for zk
*/
public static final String MASTER_PREFIX = "master";
public static final String WORKER_PREFIX = "worker";
public static final String DELETE_ZK_OP = "delete";
public static final String ADD_ZK_OP = "add";
public static final String ALIAS = "alias";
public static final String CONTENT = "content";
public static final String DEPENDENT_SPLIT = ":||";
public static final String DEPENDENT_ALL = "ALL";
/**
* preview schedule execute count
*/
public static final int PREVIEW_SCHEDULE_EXECUTE_COUNT = 5;
/**
* kerberos
*/
public static final String KERBEROS = "kerberos";
/**
* kerberos expire time
*/
public static final String KERBEROS_EXPIRE_TIME = "kerberos.expire.time";
/**
* java.security.krb5.conf
*/
public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf";
/**
* java.security.krb5.conf.path
*/
public static final String JAVA_SECURITY_KRB5_CONF_PATH = "java.security.krb5.conf.path";
/**
* hadoop.security.authentication
*/
public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
/**
* hadoop.security.authentication
*/
public static final String HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE = "hadoop.security.authentication.startup.state";
/**
* loginUserFromKeytab user
*/
public static final String LOGIN_USER_KEY_TAB_USERNAME = "login.user.keytab.username";
/**
* default worker group id
*/
public static final int DEFAULT_WORKER_ID = -1;
/**
* loginUserFromKeytab path
*/
public static final String LOGIN_USER_KEY_TAB_PATH = "login.user.keytab.path";
/**
* task log info format
*/
public static final String TASK_LOG_INFO_FORMAT = "TaskLogInfo-%s";
/**
* hive conf
*/
public static final String HIVE_CONF = "hiveconf:";
//flink ??
public static final String FLINK_YARN_CLUSTER = "yarn-cluster";
public static final String FLINK_RUN_MODE = "-m";
public static final String FLINK_YARN_SLOT = "-ys";
public static final String FLINK_APP_NAME = "-ynm";
public static final String FLINK_TASK_MANAGE = "-yn";
public static final String FLINK_JOB_MANAGE_MEM = "-yjm";
public static final String FLINK_TASK_MANAGE_MEM = "-ytm";
public static final String FLINK_DETACH = "-d";
public static final String FLINK_MAIN_CLASS = "-c";
public static final int[] NOT_TERMINATED_STATES = new int[]{
ExecutionStatus.SUBMITTED_SUCCESS.ordinal(),
ExecutionStatus.RUNNING_EXEUTION.ordinal(),
ExecutionStatus.READY_PAUSE.ordinal(),
ExecutionStatus.READY_STOP.ordinal(),
ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal(),
ExecutionStatus.WAITTING_THREAD.ordinal(),
ExecutionStatus.WAITTING_DEPEND.ordinal()
};
/**
* status
*/
public static final String STATUS = "status";
/**
* message
*/
public static final String MSG = "msg";
/**
* data total
*/
public static final String COUNT = "count";
/**
* page size
*/
public static final String PAGE_SIZE = "pageSize";
/**
* current page no
*/
public static final String PAGE_NUMBER = "pageNo";
/**
*
*/
public static final String DATA_LIST = "data";
public static final String TOTAL_LIST = "totalList";
public static final String CURRENT_PAGE = "currentPage";
public static final String TOTAL_PAGE = "totalPage";
public static final String TOTAL = "total";
/**
* session user
*/
public static final String SESSION_USER = "session.user";
public static final String SESSION_ID = "sessionId";
public static final String PASSWORD_DEFAULT = "******";
/**
* driver
*/
public static final String ORG_POSTGRESQL_DRIVER = "org.postgresql.Driver";
public static final String COM_MYSQL_JDBC_DRIVER = "com.mysql.jdbc.Driver";
public static final String ORG_APACHE_HIVE_JDBC_HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver";
public static final String COM_CLICKHOUSE_JDBC_DRIVER = "ru.yandex.clickhouse.ClickHouseDriver";
public static final String COM_ORACLE_JDBC_DRIVER = "oracle.jdbc.driver.OracleDriver";
public static final String COM_SQLSERVER_JDBC_DRIVER = "com.microsoft.sqlserver.jdbc.SQLServerDriver";
public static final String COM_DB2_JDBC_DRIVER = "com.ibm.db2.jcc.DB2Driver";
/**
* database type
*/
public static final String MYSQL = "MYSQL";
public static final String POSTGRESQL = "POSTGRESQL";
public static final String HIVE = "HIVE";
public static final String SPARK = "SPARK";
public static final String CLICKHOUSE = "CLICKHOUSE";
public static final String ORACLE = "ORACLE";
public static final String SQLSERVER = "SQLSERVER";
public static final String DB2 = "DB2";
/**
* jdbc url
*/
public static final String JDBC_MYSQL = "jdbc:mysql://";
public static final String JDBC_POSTGRESQL = "jdbc:postgresql://";
public static final String JDBC_HIVE_2 = "jdbc:hive2://";
public static final String JDBC_CLICKHOUSE = "jdbc:clickhouse://";
public static final String JDBC_ORACLE_SID = "jdbc:oracle:thin:@";
public static final String JDBC_ORACLE_SERVICE_NAME = "jdbc:oracle:thin:@//";
public static final String JDBC_SQLSERVER = "jdbc:sqlserver://";
public static final String JDBC_DB2 = "jdbc:db2://";
public static final String ADDRESS = "address";
public static final String DATABASE = "database";
public static final String JDBC_URL = "jdbcUrl";
public static final String PRINCIPAL = "principal";
public static final String OTHER = "other";
public static final String ORACLE_DB_CONNECT_TYPE = "connectType";
/**
* session timeout
*/
public static final int SESSION_TIME_OUT = 7200;
public static final int MAX_FILE_SIZE = 1024 * 1024 * 1024;
public static final String UDF = "UDF";
public static final String CLASS = "class";
public static final String RECEIVERS = "receivers";
public static final String RECEIVERS_CC = "receiversCc";
/**
* dataSource sensitive param
*/
public static final String DATASOURCE_PASSWORD_REGEX = "(?<=(\"password\":\")).*?(?=(\"))";
/**
* default worker group
*/
public static final String DEFAULT_WORKER_GROUP = "default";
public static final Integer TASK_INFO_LENGTH = 5;
/**
* new
* schedule time
*/
public static final String PARAMETER_SHECDULE_TIME = "schedule.time";
/**
* authorize writable perm
*/
public static final int AUTHORIZE_WRITABLE_PERM=7;
/**
* authorize readable perm
*/
public static final int AUTHORIZE_READABLE_PERM=4;
/**
* plugin configurations
*/
public static final String PLUGIN_JAR_SUFFIX = ".jar";
public static final int NORAML_NODE_STATUS = 0;
public static final int ABNORMAL_NODE_STATUS = 1;
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,181 | [BUG] get http code 400 bad request with AWS S3 as resource storage type | *For better global communication, please give priority to using English description, thx! *
**Describe the bug**
when you use AWS S3 as the resource storage backend you will get a error like:
``` log
Status Code: 400, AWS Service: Amazon S3, AWS Request ID: xxxxxxx, AWS Error Code: null, AWS Error Message: Bad Request
```
**To Reproduce**
Steps to reproduce the behavior, for example:
just set `resource.storage.type=S3` in common.properties and also keep other configuration correct.
**Expected behavior**
the resource centre work fine.
**Screenshots**
when you try to upload a file at resource centre, you will get a error.
**Which version of Dolphin Scheduler:**
-[1.3.1-release]
**Additional context**
it is because of the version of AWS S3 encryption method.
**Requirement or improvement**
I will make a PR for this later.
| https://github.com/apache/dolphinscheduler/issues/3181 | https://github.com/apache/dolphinscheduler/pull/3182 | 1e7582e910c23a42bb4e92cd64fde4df7cbf6b34 | dcdd7dedd06454ed468eae86881b75177261c9e2 | "2020-07-10T11:30:13Z" | java | "2020-07-11T00:56:38Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import org.apache.commons.io.IOUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.ResUploadType;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.client.cli.RMAdminCLI;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.nio.file.Files;
import java.security.PrivilegedExceptionAction;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.apache.dolphinscheduler.common.Constants.RESOURCE_UPLOAD_PATH;
/**
* hadoop utils
* single instance
*/
public class HadoopUtils implements Closeable {
private static final Logger logger = LoggerFactory.getLogger(HadoopUtils.class);
private static String hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER);
public static final String resourceUploadPath = PropertyUtils.getString(RESOURCE_UPLOAD_PATH, "/dolphinscheduler");
public static final String rmHaIds = PropertyUtils.getString(Constants.YARN_RESOURCEMANAGER_HA_RM_IDS);
public static final String appAddress = PropertyUtils.getString(Constants.YARN_APPLICATION_STATUS_ADDRESS);
public static final String jobHistoryAddress = PropertyUtils.getString(Constants.YARN_JOB_HISTORY_STATUS_ADDRESS);
private static final String HADOOP_UTILS_KEY = "HADOOP_UTILS_KEY";
private static final LoadingCache<String, HadoopUtils> cache = CacheBuilder
.newBuilder()
.expireAfterWrite(PropertyUtils.getInt(Constants.KERBEROS_EXPIRE_TIME, 2), TimeUnit.HOURS)
.build(new CacheLoader<String, HadoopUtils>() {
@Override
public HadoopUtils load(String key) throws Exception {
return new HadoopUtils();
}
});
private static volatile boolean yarnEnabled = false;
private Configuration configuration;
private FileSystem fs;
private HadoopUtils() {
init();
initHdfsPath();
}
public static HadoopUtils getInstance() {
return cache.getUnchecked(HADOOP_UTILS_KEY);
}
/**
* init dolphinscheduler root path in hdfs
*/
private void initHdfsPath() {
Path path = new Path(resourceUploadPath);
try {
if (!fs.exists(path)) {
fs.mkdirs(path);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
/**
* init hadoop configuration
*/
private void init() {
try {
configuration = new Configuration();
String resourceStorageType = PropertyUtils.getUpperCaseString(Constants.RESOURCE_STORAGE_TYPE);
ResUploadType resUploadType = ResUploadType.valueOf(resourceStorageType);
if (resUploadType == ResUploadType.HDFS) {
if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)) {
System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF,
PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH));
configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
hdfsUser = "";
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME),
PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH));
}
String defaultFS = configuration.get(Constants.FS_DEFAULTFS);
//first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file
// the default is the local file system
if (defaultFS.startsWith("file")) {
String defaultFSProp = PropertyUtils.getString(Constants.FS_DEFAULTFS);
if (StringUtils.isNotBlank(defaultFSProp)) {
Map<String, String> fsRelatedProps = PropertyUtils.getPrefixedProperties("fs.");
configuration.set(Constants.FS_DEFAULTFS, defaultFSProp);
fsRelatedProps.forEach((key, value) -> configuration.set(key, value));
} else {
logger.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULTFS);
throw new RuntimeException(
String.format("property: %s can not to be empty, please set!", Constants.FS_DEFAULTFS)
);
}
} else {
logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULTFS, defaultFS);
}
if (fs == null) {
if (StringUtils.isNotEmpty(hdfsUser)) {
UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser);
ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
@Override
public Boolean run() throws Exception {
fs = FileSystem.get(configuration);
return true;
}
});
} else {
logger.warn("hdfs.root.user is not set value!");
fs = FileSystem.get(configuration);
}
}
} else if (resUploadType == ResUploadType.S3) {
configuration.set(Constants.FS_DEFAULTFS, PropertyUtils.getString(Constants.FS_DEFAULTFS));
configuration.set(Constants.FS_S3A_ENDPOINT, PropertyUtils.getString(Constants.FS_S3A_ENDPOINT));
configuration.set(Constants.FS_S3A_ACCESS_KEY, PropertyUtils.getString(Constants.FS_S3A_ACCESS_KEY));
configuration.set(Constants.FS_S3A_SECRET_KEY, PropertyUtils.getString(Constants.FS_S3A_SECRET_KEY));
fs = FileSystem.get(configuration);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
/**
* @return Configuration
*/
public Configuration getConfiguration() {
return configuration;
}
/**
* get application url
*
* @param applicationId application id
* @return url of application
*/
public String getApplicationUrl(String applicationId) throws Exception {
/**
* if rmHaIds contains xx, it signs not use resourcemanager
* otherwise:
* if rmHaIds is empty, single resourcemanager enabled
* if rmHaIds not empty: resourcemanager HA enabled
*/
String appUrl = "";
if (StringUtils.isEmpty(rmHaIds)){
//single resourcemanager enabled
appUrl = appAddress;
yarnEnabled = true;
} else {
//resourcemanager HA enabled
appUrl = getAppAddress(appAddress, rmHaIds);
yarnEnabled = true;
logger.info("application url : {}", appUrl);
}
if(StringUtils.isBlank(appUrl)){
throw new Exception("application url is blank");
}
return String.format(appUrl, applicationId);
}
public String getJobHistoryUrl(String applicationId) {
//eg:application_1587475402360_712719 -> job_1587475402360_712719
String jobId = applicationId.replace("application", "job");
return String.format(jobHistoryAddress, jobId);
}
/**
* cat file on hdfs
*
* @param hdfsFilePath hdfs file path
* @return byte[] byte array
* @throws IOException errors
*/
public byte[] catFile(String hdfsFilePath) throws IOException {
if (StringUtils.isBlank(hdfsFilePath)) {
logger.error("hdfs file path:{} is blank", hdfsFilePath);
return new byte[0];
}
FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath));
return IOUtils.toByteArray(fsDataInputStream);
}
/**
* cat file on hdfs
*
* @param hdfsFilePath hdfs file path
* @param skipLineNums skip line numbers
* @param limit read how many lines
* @return content of file
* @throws IOException errors
*/
public List<String> catFile(String hdfsFilePath, int skipLineNums, int limit) throws IOException {
if (StringUtils.isBlank(hdfsFilePath)) {
logger.error("hdfs file path:{} is blank", hdfsFilePath);
return Collections.emptyList();
}
try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))) {
BufferedReader br = new BufferedReader(new InputStreamReader(in));
Stream<String> stream = br.lines().skip(skipLineNums).limit(limit);
return stream.collect(Collectors.toList());
}
}
/**
* make the given file and all non-existent parents into
* directories. Has the semantics of Unix 'mkdir -p'.
* Existence of the directory hierarchy is not an error.
*
* @param hdfsPath path to create
* @return mkdir result
* @throws IOException errors
*/
public boolean mkdir(String hdfsPath) throws IOException {
return fs.mkdirs(new Path(hdfsPath));
}
/**
* copy files between FileSystems
*
* @param srcPath source hdfs path
* @param dstPath destination hdfs path
* @param deleteSource whether to delete the src
* @param overwrite whether to overwrite an existing file
* @return if success or not
* @throws IOException errors
*/
public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException {
return FileUtil.copy(fs, new Path(srcPath), fs, new Path(dstPath), deleteSource, overwrite, fs.getConf());
}
/**
* the src file is on the local disk. Add it to FS at
* the given dst name.
*
* @param srcFile local file
* @param dstHdfsPath destination hdfs path
* @param deleteSource whether to delete the src
* @param overwrite whether to overwrite an existing file
* @return if success or not
* @throws IOException errors
*/
public boolean copyLocalToHdfs(String srcFile, String dstHdfsPath, boolean deleteSource, boolean overwrite) throws IOException {
Path srcPath = new Path(srcFile);
Path dstPath = new Path(dstHdfsPath);
fs.copyFromLocalFile(deleteSource, overwrite, srcPath, dstPath);
return true;
}
/**
* copy hdfs file to local
*
* @param srcHdfsFilePath source hdfs file path
* @param dstFile destination file
* @param deleteSource delete source
* @param overwrite overwrite
* @return result of copy hdfs file to local
* @throws IOException errors
*/
public boolean copyHdfsToLocal(String srcHdfsFilePath, String dstFile, boolean deleteSource, boolean overwrite) throws IOException {
Path srcPath = new Path(srcHdfsFilePath);
File dstPath = new File(dstFile);
if (dstPath.exists()) {
if (dstPath.isFile()) {
if (overwrite) {
Files.delete(dstPath.toPath());
}
} else {
logger.error("destination file must be a file");
}
}
if (!dstPath.getParentFile().exists()) {
dstPath.getParentFile().mkdirs();
}
return FileUtil.copy(fs, srcPath, dstPath, deleteSource, fs.getConf());
}
/**
* delete a file
*
* @param hdfsFilePath the path to delete.
* @param recursive if path is a directory and set to
* true, the directory is deleted else throws an exception. In
* case of a file the recursive can be set to either true or false.
* @return true if delete is successful else false.
* @throws IOException errors
*/
public boolean delete(String hdfsFilePath, boolean recursive) throws IOException {
return fs.delete(new Path(hdfsFilePath), recursive);
}
/**
* check if exists
*
* @param hdfsFilePath source file path
* @return result of exists or not
* @throws IOException errors
*/
public boolean exists(String hdfsFilePath) throws IOException {
return fs.exists(new Path(hdfsFilePath));
}
/**
* Gets a list of files in the directory
*
* @param filePath file path
* @return {@link FileStatus} file status
* @throws Exception errors
*/
public FileStatus[] listFileStatus(String filePath) throws Exception {
try {
return fs.listStatus(new Path(filePath));
} catch (IOException e) {
logger.error("Get file list exception", e);
throw new Exception("Get file list exception", e);
}
}
/**
* Renames Path src to Path dst. Can take place on local fs
* or remote DFS.
*
* @param src path to be renamed
* @param dst new path after rename
* @return true if rename is successful
* @throws IOException on failure
*/
public boolean rename(String src, String dst) throws IOException {
return fs.rename(new Path(src), new Path(dst));
}
/**
* hadoop resourcemanager enabled or not
*
* @return result
*/
public boolean isYarnEnabled() {
return yarnEnabled;
}
/**
* get the state of an application
*
* @param applicationId application id
* @return the return may be null or there may be other parse exceptions
*/
public ExecutionStatus getApplicationStatus(String applicationId) throws Exception{
if (StringUtils.isEmpty(applicationId)) {
return null;
}
String result = Constants.FAILED;
String applicationUrl = getApplicationUrl(applicationId);
logger.info("applicationUrl={}", applicationUrl);
String responseContent = HttpUtils.get(applicationUrl);
if (responseContent != null) {
ObjectNode jsonObject = JSONUtils.parseObject(responseContent);
result = jsonObject.path("app").path("finalStatus").asText();
} else {
//may be in job history
String jobHistoryUrl = getJobHistoryUrl(applicationId);
logger.info("jobHistoryUrl={}", jobHistoryUrl);
responseContent = HttpUtils.get(jobHistoryUrl);
ObjectNode jsonObject = JSONUtils.parseObject(responseContent);
if (!jsonObject.has("job")){
return ExecutionStatus.FAILURE;
}
result = jsonObject.path("job").path("state").asText();
}
switch (result) {
case Constants.ACCEPTED:
return ExecutionStatus.SUBMITTED_SUCCESS;
case Constants.SUCCEEDED:
return ExecutionStatus.SUCCESS;
case Constants.NEW:
case Constants.NEW_SAVING:
case Constants.SUBMITTED:
case Constants.FAILED:
return ExecutionStatus.FAILURE;
case Constants.KILLED:
return ExecutionStatus.KILL;
case Constants.RUNNING:
default:
return ExecutionStatus.RUNNING_EXEUTION;
}
}
/**
* get data hdfs path
*
* @return data hdfs path
*/
public static String getHdfsDataBasePath() {
if ("/".equals(resourceUploadPath)) {
// if basepath is configured to /, the generated url may be //default/resources (with extra leading /)
return "";
} else {
return resourceUploadPath;
}
}
/**
* hdfs resource dir
*
* @param tenantCode tenant code
* @param resourceType resource type
* @return hdfs resource dir
*/
public static String getHdfsDir(ResourceType resourceType, String tenantCode) {
String hdfsDir = "";
if (resourceType.equals(ResourceType.FILE)) {
hdfsDir = getHdfsResDir(tenantCode);
} else if (resourceType.equals(ResourceType.UDF)) {
hdfsDir = getHdfsUdfDir(tenantCode);
}
return hdfsDir;
}
/**
* hdfs resource dir
*
* @param tenantCode tenant code
* @return hdfs resource dir
*/
public static String getHdfsResDir(String tenantCode) {
return String.format("%s/resources", getHdfsTenantDir(tenantCode));
}
/**
* hdfs user dir
*
* @param tenantCode tenant code
* @param userId user id
* @return hdfs resource dir
*/
public static String getHdfsUserDir(String tenantCode, int userId) {
return String.format("%s/home/%d", getHdfsTenantDir(tenantCode), userId);
}
/**
* hdfs udf dir
*
* @param tenantCode tenant code
* @return get udf dir on hdfs
*/
public static String getHdfsUdfDir(String tenantCode) {
return String.format("%s/udfs", getHdfsTenantDir(tenantCode));
}
/**
* get hdfs file name
*
* @param resourceType resource type
* @param tenantCode tenant code
* @param fileName file name
* @return hdfs file name
*/
public static String getHdfsFileName(ResourceType resourceType, String tenantCode, String fileName) {
if (fileName.startsWith("/")) {
fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsDir(resourceType, tenantCode), fileName);
}
/**
* get absolute path and name for resource file on hdfs
*
* @param tenantCode tenant code
* @param fileName file name
* @return get absolute path and name for file on hdfs
*/
public static String getHdfsResourceFileName(String tenantCode, String fileName) {
if (fileName.startsWith("/")) {
fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsResDir(tenantCode), fileName);
}
/**
* get absolute path and name for udf file on hdfs
*
* @param tenantCode tenant code
* @param fileName file name
* @return get absolute path and name for udf file on hdfs
*/
public static String getHdfsUdfFileName(String tenantCode, String fileName) {
if (fileName.startsWith("/")) {
fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsUdfDir(tenantCode), fileName);
}
/**
* @param tenantCode tenant code
* @return file directory of tenants on hdfs
*/
public static String getHdfsTenantDir(String tenantCode) {
return String.format("%s/%s", getHdfsDataBasePath(), tenantCode);
}
/**
* getAppAddress
*
* @param appAddress app address
* @param rmHa resource manager ha
* @return app address
*/
public static String getAppAddress(String appAddress, String rmHa) {
//get active ResourceManager
String activeRM = YarnHAAdminUtils.getAcitveRMName(rmHa);
String[] split1 = appAddress.split(Constants.DOUBLE_SLASH);
if (split1.length != 2) {
return null;
}
String start = split1[0] + Constants.DOUBLE_SLASH;
String[] split2 = split1[1].split(Constants.COLON);
if (split2.length != 2) {
return null;
}
String end = Constants.COLON + split2[1];
return start + activeRM + end;
}
@Override
public void close() throws IOException {
if (fs != null) {
try {
fs.close();
} catch (IOException e) {
logger.error("Close HadoopUtils instance failed", e);
throw new IOException("Close HadoopUtils instance failed", e);
}
}
}
/**
* yarn ha admin utils
*/
private static final class YarnHAAdminUtils extends RMAdminCLI {
/**
* get active resourcemanager
*
* @param rmIds
* @return
*/
public static String getAcitveRMName(String rmIds) {
String[] rmIdArr = rmIds.split(Constants.COMMA);
int activeResourceManagerPort = PropertyUtils.getInt(Constants.HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT, 8088);
String yarnUrl = "http://%s:" + activeResourceManagerPort + "/ws/v1/cluster/info";
String state = null;
try {
/**
* send http get request to rm1
*/
state = getRMState(String.format(yarnUrl, rmIdArr[0]));
if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) {
return rmIdArr[0];
} else if (Constants.HADOOP_RM_STATE_STANDBY.equals(state)) {
state = getRMState(String.format(yarnUrl, rmIdArr[1]));
if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) {
return rmIdArr[1];
}
} else {
return null;
}
} catch (Exception e) {
state = getRMState(String.format(yarnUrl, rmIdArr[1]));
if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) {
return rmIdArr[0];
}
}
return null;
}
/**
* get ResourceManager state
*
* @param url
* @return
*/
public static String getRMState(String url) {
String retStr = HttpUtils.get(url);
if (StringUtils.isEmpty(retStr)) {
return null;
}
//to json
ObjectNode jsonObject = JSONUtils.parseObject(retStr);
//get ResourceManager state
if (!jsonObject.has("clusterInfo")){
return null;
}
return jsonObject.get("clusterInfo").path("haState").asText();
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,923 | [BUG] Hive JDBC connection parameter ignored | **Describe the bug**
The jdbc connection parameter of Hive datasource, should append after the question mark when building jdbc url, like `jdbc:hive2://host:port/default?mapred.job.queue.name=root.users.a`. But actually, it append after the semicolon, so the result is `jdbc:hive2://host:port/default;mapred.job.queue.name=root.users.a`, which make the parameter being ignored
![image](https://user-images.githubusercontent.com/16650282/83969066-3efbb980-a900-11ea-8c83-ffa899d7a65b.png)
For testing, I set the parameter in this way `{"?mapred.job.queue.name":"root.user.a"}`, and now it can be set correctly
**Which version of Dolphin Scheduler:**
- [1.2.1-release]
| https://github.com/apache/dolphinscheduler/issues/2923 | https://github.com/apache/dolphinscheduler/pull/3194 | 6d43c21d80a3b210a7e01df9fa73d4f076698a58 | 98fdba6740abcb8dd30bc08d6ff39793dd9dd598 | "2020-06-07T12:52:13Z" | java | "2020-07-13T05:58:01Z" | dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.datasource;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbType;
/**
* data source of hive
*/
public class HiveDataSource extends BaseDataSource {
/**
* gets the JDBC url for the data source connection
* @return jdbc url
*/
@Override
public String driverClassSelector() {
return Constants.ORG_APACHE_HIVE_JDBC_HIVE_DRIVER;
}
/**
* @return db type
*/
@Override
public DbType dbTypeSelector() {
return DbType.HIVE;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,923 | [BUG] Hive JDBC connection parameter ignored | **Describe the bug**
The jdbc connection parameter of Hive datasource, should append after the question mark when building jdbc url, like `jdbc:hive2://host:port/default?mapred.job.queue.name=root.users.a`. But actually, it append after the semicolon, so the result is `jdbc:hive2://host:port/default;mapred.job.queue.name=root.users.a`, which make the parameter being ignored
![image](https://user-images.githubusercontent.com/16650282/83969066-3efbb980-a900-11ea-8c83-ffa899d7a65b.png)
For testing, I set the parameter in this way `{"?mapred.job.queue.name":"root.user.a"}`, and now it can be set correctly
**Which version of Dolphin Scheduler:**
- [1.2.1-release]
| https://github.com/apache/dolphinscheduler/issues/2923 | https://github.com/apache/dolphinscheduler/pull/3194 | 6d43c21d80a3b210a7e01df9fa73d4f076698a58 | 98fdba6740abcb8dd30bc08d6ff39793dd9dd598 | "2020-06-07T12:52:13Z" | java | "2020-07-13T05:58:01Z" | dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/datasource/HiveDataSourceTest.java | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,923 | [BUG] Hive JDBC connection parameter ignored | **Describe the bug**
The jdbc connection parameter of Hive datasource, should append after the question mark when building jdbc url, like `jdbc:hive2://host:port/default?mapred.job.queue.name=root.users.a`. But actually, it append after the semicolon, so the result is `jdbc:hive2://host:port/default;mapred.job.queue.name=root.users.a`, which make the parameter being ignored
![image](https://user-images.githubusercontent.com/16650282/83969066-3efbb980-a900-11ea-8c83-ffa899d7a65b.png)
For testing, I set the parameter in this way `{"?mapred.job.queue.name":"root.user.a"}`, and now it can be set correctly
**Which version of Dolphin Scheduler:**
- [1.2.1-release]
| https://github.com/apache/dolphinscheduler/issues/2923 | https://github.com/apache/dolphinscheduler/pull/3194 | 6d43c21d80a3b210a7e01df9fa73d4f076698a58 | 98fdba6740abcb8dd30bc08d6ff39793dd9dd598 | "2020-06-07T12:52:13Z" | java | "2020-07-13T05:58:01Z" | pom.xml | <?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId>
<version>1.3.0-SNAPSHOT</version>
<packaging>pom</packaging>
<name>${project.artifactId}</name>
<url>http://dolphinscheduler.apache.org</url>
<description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated
to solving the complex dependencies in data processing, making the scheduling system out of the box for data
processing.
</description>
<licenses>
<license>
<name>Apache License 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<connection>scm:git:https://github.com/apache/incubator-dolphinscheduler.git</connection>
<developerConnection>scm:git:https://github.com/apache/incubator-dolphinscheduler.git</developerConnection>
<url>https://github.com/apache/incubator-dolphinscheduler</url>
<tag>HEAD</tag>
</scm>
<mailingLists>
<mailingList>
<name>DolphinScheduler Developer List</name>
<post>dev@dolphinscheduler.incubator.apache.org</post>
<subscribe>dev-subscribe@dolphinscheduler.incubator.apache.org</subscribe>
<unsubscribe>dev-unsubscribe@dolphinscheduler.incubator.apache.org</unsubscribe>
</mailingList>
</mailingLists>
<parent>
<groupId>org.apache</groupId>
<artifactId>apache</artifactId>
<version>21</version>
</parent>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<curator.version>4.3.0</curator.version>
<spring.version>5.1.5.RELEASE</spring.version>
<spring.boot.version>2.1.3.RELEASE</spring.boot.version>
<java.version>1.8</java.version>
<logback.version>1.2.3</logback.version>
<hadoop.version>2.7.3</hadoop.version>
<quartz.version>2.2.3</quartz.version>
<jackson.version>2.9.8</jackson.version>
<mybatis-plus.version>3.2.0</mybatis-plus.version>
<mybatis.spring.version>2.0.1</mybatis.spring.version>
<cron.utils.version>5.0.5</cron.utils.version>
<druid.version>1.1.14</druid.version>
<h2.version>1.4.200</h2.version>
<commons.codec.version>1.6</commons.codec.version>
<commons.logging.version>1.1.1</commons.logging.version>
<httpclient.version>4.4.1</httpclient.version>
<httpcore.version>4.4.1</httpcore.version>
<junit.version>4.12</junit.version>
<mysql.connector.version>5.1.34</mysql.connector.version>
<slf4j.api.version>1.7.5</slf4j.api.version>
<slf4j.log4j12.version>1.7.5</slf4j.log4j12.version>
<commons.collections.version>3.2.2</commons.collections.version>
<commons.httpclient>3.0.1</commons.httpclient>
<commons.beanutils.version>1.7.0</commons.beanutils.version>
<commons.configuration.version>1.10</commons.configuration.version>
<commons.email.version>1.5</commons.email.version>
<poi.version>3.17</poi.version>
<javax.servlet.api.version>3.1.0</javax.servlet.api.version>
<commons.collections4.version>4.1</commons.collections4.version>
<guava.version>20.0</guava.version>
<postgresql.version>42.1.4</postgresql.version>
<hive.jdbc.version>2.1.0</hive.jdbc.version>
<commons.io.version>2.4</commons.io.version>
<oshi.core.version>3.5.0</oshi.core.version>
<clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version>
<mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version>
<jsp-2.1.version>6.1.14</jsp-2.1.version>
<spotbugs.version>3.1.12</spotbugs.version>
<checkstyle.version>3.0.0</checkstyle.version>
<apache.rat.version>0.13</apache.rat.version>
<zookeeper.version>3.4.14</zookeeper.version>
<frontend-maven-plugin.version>1.6</frontend-maven-plugin.version>
<maven-compiler-plugin.version>3.3</maven-compiler-plugin.version>
<maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version>
<maven-release-plugin.version>2.5.3</maven-release-plugin.version>
<maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version>
<maven-source-plugin.version>2.4</maven-source-plugin.version>
<maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version>
<maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version>
<rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version>
<jacoco.version>0.8.4</jacoco.version>
<jcip.version>1.0</jcip.version>
<maven.deploy.skip>false</maven.deploy.skip>
<cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version>
<mockito.version>2.21.0</mockito.version>
<powermock.version>2.0.2</powermock.version>
<servlet-api.version>2.5</servlet-api.version>
<swagger.version>1.9.3</swagger.version>
<springfox.version>2.9.2</springfox.version>
<guava-retry.version>2.0.0</guava-retry.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<!-- quartz-->
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
<version>${quartz.version}</version>
</dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz-jobs</artifactId>
<version>${quartz.version}</version>
</dependency>
<dependency>
<groupId>com.cronutils</groupId>
<artifactId>cron-utils</artifactId>
<version>${cron.utils.version}</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>${druid.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>${spring.boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-server</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-plugin-api</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-dao</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-api</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-remote</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-service</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-alert</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-framework</artifactId>
<version>${curator.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<version>${curator.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<artifactId>netty</artifactId>
<groupId>io.netty</groupId>
</exclusion>
<exclusion>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-annotations</artifactId>
</exclusion>
</exclusions>
<version>${zookeeper.version}</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>${commons.codec.version}</version>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>${commons.logging.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>${httpclient.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>${httpcore.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>${mockito.version}</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<version>${powermock.version}</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito2</artifactId>
<version>${powermock.version}</version>
<type>jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.connector.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>${h2.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.api.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.log4j12.version}</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>${commons.collections.version}</version>
</dependency>
<dependency>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
<version>${commons.httpclient}</version>
</dependency>
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>${commons.beanutils.version}</version>
</dependency>
<dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
<version>${commons.configuration.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-email</artifactId>
<version>${commons.email.version}</version>
</dependency>
<!--excel poi-->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${poi.version}</version>
</dependency>
<!-- hadoop -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>com.sun.jersey</artifactId>
<groupId>jersey-json</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-aws</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
<version>${commons.collections4.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>${postgresql.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.jdbc.version}</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons.io.version}</version>
</dependency>
<dependency>
<groupId>com.github.oshi</groupId>
<artifactId>oshi-core</artifactId>
<version>${oshi.core.version}</version>
</dependency>
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<version>${clickhouse.jdbc.version}</version>
</dependency>
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
<version>${mssql.jdbc.version}</version>
</dependency>
<dependency>
<groupId>net.jcip</groupId>
<artifactId>jcip-annotations</artifactId>
<version>${jcip.version}</version>
<optional>true</optional>
</dependency>
<!-- for api module -->
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jsp-2.1</artifactId>
<version>${jsp-2.1.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<version>${servlet-api.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>${javax.servlet.api.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<version>${springfox.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
<version>${springfox.version}</version>
</dependency>
<dependency>
<groupId>com.github.xiaoymin</groupId>
<artifactId>swagger-bootstrap-ui</artifactId>
<version>${swagger.version}</version>
</dependency>
<dependency>
<groupId>com.github.rholder</groupId>
<artifactId>guava-retrying</artifactId>
<version>${guava-retry.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<finalName>apache-dolphinscheduler-incubating-${project.version}</finalName>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>rpm-maven-plugin</artifactId>
<version>${rpm-maven-plugion.version}</version>
<inherited>false</inherited>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<testSource>${java.version}</testSource>
<testTarget>${java.version}</testTarget>
</configuration>
<version>${maven-compiler-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>${maven-release-plugin.version}</version>
<configuration>
<tagNameFormat>@{project.version}</tagNameFormat>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>${maven-assembly-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<configuration>
<source>8</source>
<failOnError>false</failOnError>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>${maven-source-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<version>${maven-dependency-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<phase>verify</phase>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
<configuration>
<aggregate>true</aggregate>
<charset>${project.build.sourceEncoding}</charset>
<encoding>${project.build.sourceEncoding}</encoding>
<docencoding>${project.build.sourceEncoding}</docencoding>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>${maven-release-plugin.version}</version>
<configuration>
<autoVersionSubmodules>true</autoVersionSubmodules>
<tagNameFormat>@{project.version}</tagNameFormat>
<tagBase>${project.version}</tagBase>
<!--<goals>-f pom.xml deploy</goals>-->
</configuration>
<dependencies>
<dependency>
<groupId>org.apache.maven.scm</groupId>
<artifactId>maven-scm-provider-jgit</artifactId>
<version>1.9.5</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<encoding>${project.build.sourceEncoding}</encoding>
<skip>false</skip><!--not skip compile test classes-->
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<includes>
<include>**/alert/utils/DingTalkUtilsTest.java</include>
<include>**/alert/template/AlertTemplateFactoryTest.java</include>
<include>**/alert/template/impl/DefaultHTMLTemplateTest.java</include>
<include>**/alert/utils/EnterpriseWeChatUtilsTest.java</include>
<include>**/alert/utils/ExcelUtilsTest.java</include>
<include>**/alert/utils/FuncUtilsTest.java</include>
<include>**/alert/utils/JSONUtilsTest.java</include>
<!--<include>**/alert/utils/MailUtilsTest.java</include>-->
<include>**/alert/plugin/EmailAlertPluginTest.java</include>
<include>**/api/dto/resources/filter/ResourceFilterTest.java</include>
<include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include>
<includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest>
<include>**/api/enums/StatusTest.java</include>
<include>**/api/exceptions/ApiExceptionHandlerTest.java</include>
<include>**/api/exceptions/ServiceExceptionTest.java</include>
<include>**/api/interceptor/LoginHandlerInterceptorTest.java</include>
<include>**/api/security/PasswordAuthenticatorTest.java</include>
<include>**/api/security/SecurityConfigTest.java</include>
<include>**/api/service/AccessTokenServiceTest.java</include>
<include>**/api/service/AlertGroupServiceTest.java</include>
<include>**/api/service/BaseDAGServiceTest.java</include>
<include>**/api/service/BaseServiceTest.java</include>
<include>**/api/service/DataAnalysisServiceTest.java</include>
<!--<include>**/api/service/DataSourceServiceTest.java</include>-->
<include>**/api/service/ExecutorService2Test.java</include>
<include>**/api/service/ExecutorServiceTest.java</include>
<include>**/api/service/LoggerServiceTest.java</include>
<include>**/api/service/MonitorServiceTest.java</include>
<include>**/api/service/ProcessDefinitionServiceTest.java</include>
<include>**/api/service/ProcessInstanceServiceTest.java</include>
<include>**/api/service/ProjectServiceTest.java</include>
<include>**/api/service/QueueServiceTest.java</include>
<include>**/api/service/ResourcesServiceTest.java</include>
<include>**/api/service/SchedulerServiceTest.java</include>
<include>**/api/service/SessionServiceTest.java</include>
<include>**/api/service/TaskInstanceServiceTest.java</include>
<include>**/api/service/TenantServiceTest.java</include>
<include>**/api/service/UdfFuncServiceTest.java</include>
<include>**/api/service/UserAlertGroupServiceTest.java</include>
<include>**/api/service/UsersServiceTest.java</include>
<include>**/api/service/WorkerGroupServiceTest.java</include>
<include>**/api/controller/ProcessDefinitionControllerTest.java</include>
<include>**/api/utils/exportprocess/DataSourceParamTest.java</include>
<include>**/api/utils/exportprocess/DependentParamTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/FileUtilsTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/ResultTest.java</include>
<include>**/common/graph/DAGTest.java</include>
<include>**/common/os/OshiTest.java</include>
<include>**/common/os/OSUtilsTest.java</include>
<include>**/common/shell/ShellExecutorTest.java</include>
<include>**/common/task/EntityTestUtils.java</include>
<include>**/common/task/FlinkParametersTest.java</include>
<include>**/common/task/HttpParametersTest.java</include>
<include>**/common/task/SqoopParameterEntityTest.java</include>
<include>**/common/threadutils/ThreadPoolExecutorsTest.java</include>
<include>**/common/threadutils/ThreadUtilsTest.java</include>
<include>**/common/utils/process/ProcessBuilderForWin32Test.java</include>
<include>**/common/utils/process/ProcessEnvironmentForWin32Test.java</include>
<include>**/common/utils/process/ProcessImplForWin32Test.java</include>
<include>**/common/utils/CollectionUtilsTest.java</include>
<include>**/common/utils/CommonUtilsTest.java</include>
<include>**/common/utils/DateUtilsTest.java</include>
<include>**/common/utils/DependentUtilsTest.java</include>
<include>**/common/utils/EncryptionUtilsTest.java</include>
<include>**/common/utils/FileUtilsTest.java</include>
<include>**/common/utils/IpUtilsTest.java</include>
<include>**/common/utils/JSONUtilsTest.java</include>
<include>**/common/utils/LoggerUtilsTest.java</include>
<include>**/common/utils/OSUtilsTest.java</include>
<include>**/common/utils/ParameterUtilsTest.java</include>
<include>**/common/utils/PreconditionsTest.java</include>
<include>**/common/utils/PropertyUtilsTest.java</include>
<include>**/common/utils/SchemaUtilsTest.java</include>
<include>**/common/utils/ScriptRunnerTest.java</include>
<include>**/common/utils/SensitiveLogUtilsTest.java</include>
<include>**/common/utils/StringTest.java</include>
<include>**/common/utils/StringUtilsTest.java</include>
<include>**/common/utils/TaskParametersUtilsTest.java</include>
<include>**/common/utils/HadoopUtilsTest.java</include>
<include>**/common/utils/HttpUtilsTest.java</include>
<include>**/common/ConstantsTest.java</include>
<include>**/common/utils/HadoopUtils.java</include>
<include>**/common/utils/RetryerUtilsTest.java</include>
<include>**/common/plugin/FilePluginManagerTest</include>
<include>**/common/plugin/PluginClassLoaderTest</include>
<include>**/dao/mapper/AccessTokenMapperTest.java</include>
<include>**/dao/mapper/AlertGroupMapperTest.java</include>
<include>**/dao/mapper/CommandMapperTest.java</include>
<include>**/dao/mapper/ConnectionFactoryTest.java</include>
<include>**/dao/mapper/DataSourceMapperTest.java</include>
<include>**/remote/JsonSerializerTest.java</include>
<include>**/remote/RemoveTaskLogResponseCommandTest.java</include>
<include>**/remote/RemoveTaskLogRequestCommandTest.java</include>
<!--<include>**/remote/NettyRemotingClientTest.java</include>-->
<include>**/remote/ResponseFutureTest.java</include>
<!--<include>**/server/log/LoggerServerTest.java</include>-->
<include>**/server/log/MasterLogFilterTest.java</include>
<include>**/server/log/SensitiveDataConverterTest.java</include>
<!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>-->
<include>**/server/log/TaskLogFilterTest.java</include>
<include>**/server/log/WorkerLogFilterTest.java</include>
<!--<include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include>-->
<include>**/server/master/runner/MasterTaskExecThreadTest.java</include>
<!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>-->
<include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include>
<include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include>
<include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include>
<!--<include>**/server/master/register/MasterRegistryTest.java</include>-->
<include>**/server/master/AlertManagerTest.java</include>
<include>**/server/master/MasterCommandTest.java</include>
<include>**/server/master/DependentTaskTest.java</include>
<!--<include>**/server/master/ConditionsTaskTest.java</include>-->
<include>**/server/master/MasterExecThreadTest.java</include>
<include>**/server/master/ParamsTest.java</include>
<!--<include>**/server/register/ZookeeperNodeManagerTest.java</include>-->
<include>**/server/utils/DataxUtilsTest.java</include>
<include>**/server/utils/ExecutionContextTestUtils.java</include>
<!--<include>**/server/utils/FlinkArgsUtilsTest.java</include>-->
<include>**/server/utils/ParamUtilsTest.java</include>
<include>**/server/utils/ProcessUtilsTest.java</include>
<include>**/server/utils/SparkArgsUtilsTest.java</include>
<!--<include>**/server/worker/processor/TaskCallbackServiceTest.java</include>-->
<!--<include>**/server/worker/registry/WorkerRegistryTest.java</include>-->
<include>**/server/worker/shell/ShellCommandExecutorTest.java</include>
<include>**/server/worker/sql/SqlExecutorTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<include>**/server/worker/task/EnvFileTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<!--<include>**/server/worker/task/datax/DataxTaskTest.java</include>-->
<!--<include>**/server/worker/task/http/HttpTaskTest.java</include>-->
<include>**/server/worker/task/sqoop/SqoopTaskTest.java</include>
<include>**/server/worker/EnvFileTest.java</include>
<include>**/service/quartz/cron/CronUtilsTest.java</include>
<include>**/service/zk/DefaultEnsembleProviderTest.java</include>
<include>**/service/zk/ZKServerTest.java</include>
<include>**/service/queue/TaskUpdateQueueTest.java</include>
<include>**/dao/mapper/DataSourceUserMapperTest.java</include>
<!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>-->
<include>**/dao/mapper/ProcessDefinitionMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapperTest.java</include>
<include>**/dao/mapper/ProjectMapperTest.java</include>
<include>**/dao/mapper/ProjectUserMapperTest.java</include>
<include>**/dao/mapper/QueueMapperTest.java</include>
<include>**/dao/mapper/ResourceUserMapperTest.java</include>
<include>**/dao/mapper/ScheduleMapperTest.java</include>
<include>**/dao/mapper/SessionMapperTest.java</include>
<include>**/dao/mapper/TaskInstanceMapperTest.java</include>
<include>**/dao/mapper/TenantMapperTest.java</include>
<include>**/dao/mapper/UdfFuncMapperTest.java</include>
<include>**/dao/mapper/UDFUserMapperTest.java</include>
<include>**/dao/mapper/UserAlertGroupMapperTest.java</include>
<include>**/dao/mapper/UserMapperTest.java</include>
<include>**/dao/utils/DagHelperTest.java</include>
<include>**/dao/AlertDaoTest.java</include>
<include>**/dao/datasource/OracleDataSourceTest.java</include>
<include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include>
<include>**/dao/upgrade/WokrerGrouopDaoTest.java</include>
<include>**/dao/upgrade/UpgradeDaoTest.java</include>
<include>**/plugin/model/AlertDataTest.java</include>
<include>**/plugin/model/AlertInfoTest.java</include>
<include>**/plugin/utils/PropertyUtilsTest.java</include>
</includes>
<!-- <skip>true</skip> -->
</configuration>
</plugin>
<!-- jenkins plugin jacoco report-->
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>${jacoco.version}</version>
<configuration>
<destFile>target/jacoco.exec</destFile>
<dataFile>target/jacoco.exec</dataFile>
</configuration>
<executions>
<execution>
<id>jacoco-initialize</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>jacoco-site</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<version>${apache.rat.version}</version>
<configuration>
<excludeSubProjects>false</excludeSubProjects>
<addDefaultLicenseMatchers>false</addDefaultLicenseMatchers>
<licenses>
<license implementation="org.apache.rat.analysis.license.SimplePatternBasedLicense">
<licenseFamilyCategory>AL20</licenseFamilyCategory>
<licenseFamilyName>Apache License, 2.0</licenseFamilyName>
<patterns>
<pattern>Licensed to the Apache Software Foundation (ASF)</pattern>
</patterns>
</license>
</licenses>
<licenseFamilies>
<licenseFamily implementation="org.apache.rat.license.SimpleLicenseFamily">
<familyName>Apache License, 2.0</familyName>
</licenseFamily>
</licenseFamilies>
<excludes>
<exclude>**/node_modules/**</exclude>
<exclude>**/node/**</exclude>
<exclude>**/dist/**</exclude>
<exclude>**/licenses/**</exclude>
<exclude>.github/**</exclude>
<exclude>sql/soft_version</exclude>
<exclude>**/common/utils/ScriptRunner.java</exclude>
<exclude>**/*.json</exclude>
<!-- document files -->
<exclude>**/*.md</exclude>
<excldue>**/*.MD</excldue>
<exclude>**/*.txt</exclude>
<exclude>**/docs/**</exclude>
<exclude>**/*.babelrc</exclude>
<exclude>**/*.eslintrc</exclude>
<exclude>**/.mvn/jvm.config</exclude>
<exclude>**/.mvn/wrapper/**</exclude>
</excludes>
<consoleOutput>true</consoleOutput>
</configuration>
</plugin>
<plugin>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-maven-plugin</artifactId>
<version>${spotbugs.version}</version>
<configuration>
<xmlOutput>true</xmlOutput>
<threshold>medium</threshold>
<effort>default</effort>
<excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile>
<failOnError>true</failOnError>
</configuration>
<dependencies>
<dependency>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs</artifactId>
<version>4.0.0-beta4</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>${checkstyle.version}</version>
<dependencies>
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>8.18</version>
</dependency>
</dependencies>
<configuration>
<consoleOutput>true</consoleOutput>
<encoding>UTF-8</encoding>
<configLocation>style/checkstyle.xml</configLocation>
<suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation>
<suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
<failOnViolation>true</failOnViolation>
<violationSeverity>warning</violationSeverity>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<sourceDirectories>
<sourceDirectory>${project.build.sourceDirectory}</sourceDirectory>
</sourceDirectories>
<excludes>**\/generated-sources\/</excludes>
<skip>true</skip>
</configuration>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>cobertura-maven-plugin</artifactId>
<version>${cobertura-maven-plugin.version}</version>
<configuration>
<check>
</check>
<aggregate>true</aggregate>
<outputDirectory>./target/cobertura</outputDirectory>
<encoding>${project.build.sourceEncoding}</encoding>
<quiet>true</quiet>
<format>xml</format>
<instrumentation>
<ignoreTrivial>true</ignoreTrivial>
</instrumentation>
</configuration>
</plugin>
</plugins>
</build>
<modules>
<module>dolphinscheduler-ui</module>
<module>dolphinscheduler-server</module>
<module>dolphinscheduler-common</module>
<module>dolphinscheduler-api</module>
<module>dolphinscheduler-dao</module>
<module>dolphinscheduler-alert</module>
<module>dolphinscheduler-dist</module>
<module>dolphinscheduler-remote</module>
<module>dolphinscheduler-service</module>
<module>dolphinscheduler-plugin-api</module>
<module>dolphinscheduler-microbench</module>
</modules>
</project>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,187 | [BUG] Heartbeat thread pool does not shutdown when MasterRegistry unRegistry | *For better global communication, please give priority to using English description, thx! *
**Describe the bug**
look at the method of MasterRegistry
```java
public void unRegistry() {
String address = getLocalAddress();
String localNodePath = getMasterPath();
zookeeperRegistryCenter.getZookeeperCachedOperator().remove(localNodePath);
logger.info("master node : {} unRegistry to ZK.", address);
}
```
The method, which is invoke when close the MasterServer, does not shutdown the Heartbeat thread pool. | https://github.com/apache/dolphinscheduler/issues/3187 | https://github.com/apache/dolphinscheduler/pull/3188 | 98fdba6740abcb8dd30bc08d6ff39793dd9dd598 | 88f9bed726d479d4be4a46fb599161d5e61f496f | "2020-07-10T17:28:52Z" | java | "2020-07-13T06:27:38Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistry.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.master.registry;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.state.ConnectionState;
import org.apache.curator.framework.state.ConnectionStateListener;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.NetUtils;
import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.registry.HeartBeatTask;
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.util.Date;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import static org.apache.dolphinscheduler.remote.utils.Constants.COMMA;
/**
* master registry
*/
@Service
public class MasterRegistry {
private final Logger logger = LoggerFactory.getLogger(MasterRegistry.class);
/**
* zookeeper registry center
*/
@Autowired
private ZookeeperRegistryCenter zookeeperRegistryCenter;
/**
* master config
*/
@Autowired
private MasterConfig masterConfig;
/**
* heartbeat executor
*/
private ScheduledExecutorService heartBeatExecutor;
/**
* worker start time
*/
private String startTime;
@PostConstruct
public void init(){
this.startTime = DateUtils.dateToString(new Date());
this.heartBeatExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("HeartBeatExecutor"));
}
/**
* registry
*/
public void registry() {
String address = NetUtils.getHost();
String localNodePath = getMasterPath();
zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(localNodePath, "");
zookeeperRegistryCenter.getZookeeperCachedOperator().getZkClient().getConnectionStateListenable().addListener(new ConnectionStateListener() {
@Override
public void stateChanged(CuratorFramework client, ConnectionState newState) {
if(newState == ConnectionState.LOST){
logger.error("master : {} connection lost from zookeeper", address);
} else if(newState == ConnectionState.RECONNECTED){
logger.info("master : {} reconnected to zookeeper", address);
zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(localNodePath, "");
} else if(newState == ConnectionState.SUSPENDED){
logger.warn("master : {} connection SUSPENDED ", address);
}
}
});
int masterHeartbeatInterval = masterConfig.getMasterHeartbeatInterval();
HeartBeatTask heartBeatTask = new HeartBeatTask(startTime,
masterConfig.getMasterReservedMemory(),
masterConfig.getMasterMaxCpuloadAvg(),
getMasterPath(),
zookeeperRegistryCenter);
this.heartBeatExecutor.scheduleAtFixedRate(heartBeatTask, masterHeartbeatInterval, masterHeartbeatInterval, TimeUnit.SECONDS);
logger.info("master node : {} registry to ZK successfully with heartBeatInterval : {}s", address, masterHeartbeatInterval);
}
/**
* remove registry info
*/
public void unRegistry() {
String address = getLocalAddress();
String localNodePath = getMasterPath();
zookeeperRegistryCenter.getZookeeperCachedOperator().remove(localNodePath);
logger.info("master node : {} unRegistry to ZK.", address);
}
/**
* get master path
* @return
*/
private String getMasterPath() {
String address = getLocalAddress();
String localNodePath = this.zookeeperRegistryCenter.getMasterPath() + "/" + address;
return localNodePath;
}
/**
* get local address
* @return
*/
private String getLocalAddress(){
return NetUtils.getHost() + ":" + masterConfig.getListenPort();
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,574 | [Feature]Configurable registration ipaddress | **Is your feature request related to a problem? Please describe.**
on macos,OSUtils.getHost() returns diffrent ip in running time. Also,the method OSUtils.getHost() didn't
perform well in container situation like docker ,some error may happen
**Describe the solution you'd like**
provide a param like register-ip to specific the ip which will be registried on zk
**Describe alternatives you've considered**
provide a param like networkinterface to specific the networkinterface which will be registried on zk
**Additional context**
<img width="1467" alt="8C1E5A5B-CC64-40FE-9049-5FC6154E72FE" src="https://user-images.githubusercontent.com/42403258/80572245-80ab6180-8a30-11ea-9525-73b33f1bf49e.png">
<img width="1481" alt="25DECE0A-2A92-479F-A76F-7DAEB8F691FF" src="https://user-images.githubusercontent.com/42403258/80572256-8608ac00-8a30-11ea-8784-eeb88c0fb7ea.png">
英文不好,简单的说就是可以指定注册到zk上的ip或可选的网卡配置 | https://github.com/apache/dolphinscheduler/issues/2574 | https://github.com/apache/dolphinscheduler/pull/3186 | 6c9ac84f73a20717ab1014fe8c09e98668262baa | 6f9970b189d71d043e79200a70a95f2f33ad10f4 | "2020-04-29T07:48:39Z" | java | "2020-07-13T10:51:38Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import java.util.regex.Pattern;
/**
* Constants
*/
public final class Constants {
private Constants() {
throw new IllegalStateException("Constants class");
}
/**
* quartz config
*/
public static final String ORG_QUARTZ_JOBSTORE_DRIVERDELEGATECLASS = "org.quartz.jobStore.driverDelegateClass";
public static final String ORG_QUARTZ_SCHEDULER_INSTANCENAME = "org.quartz.scheduler.instanceName";
public static final String ORG_QUARTZ_SCHEDULER_INSTANCEID = "org.quartz.scheduler.instanceId";
public static final String ORG_QUARTZ_SCHEDULER_MAKESCHEDULERTHREADDAEMON = "org.quartz.scheduler.makeSchedulerThreadDaemon";
public static final String ORG_QUARTZ_JOBSTORE_USEPROPERTIES = "org.quartz.jobStore.useProperties";
public static final String ORG_QUARTZ_THREADPOOL_CLASS = "org.quartz.threadPool.class";
public static final String ORG_QUARTZ_THREADPOOL_THREADCOUNT = "org.quartz.threadPool.threadCount";
public static final String ORG_QUARTZ_THREADPOOL_MAKETHREADSDAEMONS = "org.quartz.threadPool.makeThreadsDaemons";
public static final String ORG_QUARTZ_THREADPOOL_THREADPRIORITY = "org.quartz.threadPool.threadPriority";
public static final String ORG_QUARTZ_JOBSTORE_CLASS = "org.quartz.jobStore.class";
public static final String ORG_QUARTZ_JOBSTORE_TABLEPREFIX = "org.quartz.jobStore.tablePrefix";
public static final String ORG_QUARTZ_JOBSTORE_ISCLUSTERED = "org.quartz.jobStore.isClustered";
public static final String ORG_QUARTZ_JOBSTORE_MISFIRETHRESHOLD = "org.quartz.jobStore.misfireThreshold";
public static final String ORG_QUARTZ_JOBSTORE_CLUSTERCHECKININTERVAL = "org.quartz.jobStore.clusterCheckinInterval";
public static final String ORG_QUARTZ_JOBSTORE_ACQUIRETRIGGERSWITHINLOCK = "org.quartz.jobStore.acquireTriggersWithinLock";
public static final String ORG_QUARTZ_JOBSTORE_DATASOURCE = "org.quartz.jobStore.dataSource";
public static final String ORG_QUARTZ_DATASOURCE_MYDS_CONNECTIONPROVIDER_CLASS = "org.quartz.dataSource.myDs.connectionProvider.class";
/**
* quartz config default value
*/
public static final String QUARTZ_TABLE_PREFIX = "QRTZ_";
public static final String QUARTZ_MISFIRETHRESHOLD = "60000";
public static final String QUARTZ_CLUSTERCHECKININTERVAL = "5000";
public static final String QUARTZ_DATASOURCE = "myDs";
public static final String QUARTZ_THREADCOUNT = "25";
public static final String QUARTZ_THREADPRIORITY = "5";
public static final String QUARTZ_INSTANCENAME = "DolphinScheduler";
public static final String QUARTZ_INSTANCEID = "AUTO";
public static final String QUARTZ_ACQUIRETRIGGERSWITHINLOCK = "true";
/**
* common properties path
*/
public static final String COMMON_PROPERTIES_PATH = "/common.properties";
/**
* fs.defaultFS
*/
public static final String FS_DEFAULTFS = "fs.defaultFS";
/**
* fs s3a endpoint
*/
public static final String FS_S3A_ENDPOINT = "fs.s3a.endpoint";
/**
* fs s3a access key
*/
public static final String FS_S3A_ACCESS_KEY = "fs.s3a.access.key";
/**
* fs s3a secret key
*/
public static final String FS_S3A_SECRET_KEY = "fs.s3a.secret.key";
/**
* yarn.resourcemanager.ha.rm.ids
*/
public static final String YARN_RESOURCEMANAGER_HA_RM_IDS = "yarn.resourcemanager.ha.rm.ids";
public static final String YARN_RESOURCEMANAGER_HA_XX = "xx";
/**
* yarn.application.status.address
*/
public static final String YARN_APPLICATION_STATUS_ADDRESS = "yarn.application.status.address";
/**
* yarn.job.history.status.address
*/
public static final String YARN_JOB_HISTORY_STATUS_ADDRESS = "yarn.job.history.status.address";
/**
* hdfs configuration
* hdfs.root.user
*/
public static final String HDFS_ROOT_USER = "hdfs.root.user";
/**
* hdfs/s3 configuration
* resource.upload.path
*/
public static final String RESOURCE_UPLOAD_PATH = "resource.upload.path";
/**
* data basedir path
*/
public static final String DATA_BASEDIR_PATH = "data.basedir.path";
/**
* dolphinscheduler.env.path
*/
public static final String DOLPHINSCHEDULER_ENV_PATH = "dolphinscheduler.env.path";
/**
* environment properties default path
*/
public static final String ENV_PATH = "env/dolphinscheduler_env.sh";
/**
* python home
*/
public static final String PYTHON_HOME="PYTHON_HOME";
/**
* resource.view.suffixs
*/
public static final String RESOURCE_VIEW_SUFFIXS = "resource.view.suffixs";
public static final String RESOURCE_VIEW_SUFFIXS_DEFAULT_VALUE = "txt,log,sh,conf,cfg,py,java,sql,hql,xml,properties";
/**
* development.state
*/
public static final String DEVELOPMENT_STATE = "development.state";
public static final String DEVELOPMENT_STATE_DEFAULT_VALUE = "true";
/**
* string true
*/
public static final String STRING_TRUE = "true";
/**
* string false
*/
public static final String STRING_FALSE = "false";
/**
* resource storage type
*/
public static final String RESOURCE_STORAGE_TYPE = "resource.storage.type";
/**
* MasterServer directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_MASTERS = "/nodes/master";
/**
* WorkerServer directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_WORKERS = "/nodes/worker";
/**
* all servers directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_DEAD_SERVERS = "/dead-servers";
/**
* MasterServer lock directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_MASTERS = "/lock/masters";
/**
* MasterServer failover directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_MASTERS = "/lock/failover/masters";
/**
* WorkerServer failover directory registered in zookeeper
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_WORKERS = "/lock/failover/workers";
/**
* MasterServer startup failover runing and fault tolerance process
*/
public static final String ZOOKEEPER_DOLPHINSCHEDULER_LOCK_FAILOVER_STARTUP_MASTERS = "/lock/failover/startup-masters";
/**
* comma ,
*/
public static final String COMMA = ",";
/**
* slash /
*/
public static final String SLASH = "/";
/**
* COLON :
*/
public static final String COLON = ":";
/**
* SINGLE_SLASH /
*/
public static final String SINGLE_SLASH = "/";
/**
* DOUBLE_SLASH //
*/
public static final String DOUBLE_SLASH = "//";
/**
* SEMICOLON ;
*/
public static final String SEMICOLON = ";";
/**
* EQUAL SIGN
*/
public static final String EQUAL_SIGN = "=";
/**
* AT SIGN
*/
public static final String AT_SIGN = "@";
public static final String WORKER_MAX_CPULOAD_AVG = "worker.max.cpuload.avg";
public static final String WORKER_RESERVED_MEMORY = "worker.reserved.memory";
public static final String MASTER_MAX_CPULOAD_AVG = "master.max.cpuload.avg";
public static final String MASTER_RESERVED_MEMORY = "master.reserved.memory";
/**
* date format of yyyy-MM-dd HH:mm:ss
*/
public static final String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss";
/**
* date format of yyyyMMddHHmmss
*/
public static final String YYYYMMDDHHMMSS = "yyyyMMddHHmmss";
/**
* http connect time out
*/
public static final int HTTP_CONNECT_TIMEOUT = 60 * 1000;
/**
* http connect request time out
*/
public static final int HTTP_CONNECTION_REQUEST_TIMEOUT = 60 * 1000;
/**
* httpclient soceket time out
*/
public static final int SOCKET_TIMEOUT = 60 * 1000;
/**
* http header
*/
public static final String HTTP_HEADER_UNKNOWN = "unKnown";
/**
* http X-Forwarded-For
*/
public static final String HTTP_X_FORWARDED_FOR = "X-Forwarded-For";
/**
* http X-Real-IP
*/
public static final String HTTP_X_REAL_IP = "X-Real-IP";
/**
* UTF-8
*/
public static final String UTF_8 = "UTF-8";
/**
* user name regex
*/
public static final Pattern REGEX_USER_NAME = Pattern.compile("^[a-zA-Z0-9._-]{3,39}$");
/**
* email regex
*/
public static final Pattern REGEX_MAIL_NAME = Pattern.compile("^([a-z0-9A-Z]+[_|\\-|\\.]?)+[a-z0-9A-Z]@([a-z0-9A-Z]+(-[a-z0-9A-Z]+)?\\.)+[a-zA-Z]{2,}$");
/**
* read permission
*/
public static final int READ_PERMISSION = 2 * 1;
/**
* write permission
*/
public static final int WRITE_PERMISSION = 2 * 2;
/**
* execute permission
*/
public static final int EXECUTE_PERMISSION = 1;
/**
* default admin permission
*/
public static final int DEFAULT_ADMIN_PERMISSION = 7;
/**
* all permissions
*/
public static final int ALL_PERMISSIONS = READ_PERMISSION | WRITE_PERMISSION | EXECUTE_PERMISSION;
/**
* max task timeout
*/
public static final int MAX_TASK_TIMEOUT = 24 * 3600;
/**
* master cpu load
*/
public static final int DEFAULT_MASTER_CPU_LOAD = Runtime.getRuntime().availableProcessors() * 2;
/**
* master reserved memory
*/
public static final double DEFAULT_MASTER_RESERVED_MEMORY = OSUtils.totalMemorySize() / 10;
/**
* worker cpu load
*/
public static final int DEFAULT_WORKER_CPU_LOAD = Runtime.getRuntime().availableProcessors() * 2;
/**
* worker reserved memory
*/
public static final double DEFAULT_WORKER_RESERVED_MEMORY = OSUtils.totalMemorySize() / 10;
/**
* default log cache rows num,output when reach the number
*/
public static final int DEFAULT_LOG_ROWS_NUM = 4 * 16;
/**
* log flush interval?output when reach the interval
*/
public static final int DEFAULT_LOG_FLUSH_INTERVAL = 1000;
/**
* time unit secong to minutes
*/
public static final int SEC_2_MINUTES_TIME_UNIT = 60;
/***
*
* rpc port
*/
public static final int RPC_PORT = 50051;
/**
* forbid running task
*/
public static final String FLOWNODE_RUN_FLAG_FORBIDDEN = "FORBIDDEN";
/**
* datasource configuration path
*/
public static final String DATASOURCE_PROPERTIES = "/datasource.properties";
public static final String TASK_RECORD_URL = "task.record.datasource.url";
public static final String TASK_RECORD_FLAG = "task.record.flag";
public static final String TASK_RECORD_USER = "task.record.datasource.username";
public static final String TASK_RECORD_PWD = "task.record.datasource.password";
public static final String DEFAULT = "Default";
public static final String USER = "user";
public static final String PASSWORD = "password";
public static final String XXXXXX = "******";
public static final String NULL = "NULL";
public static final String THREAD_NAME_MASTER_SERVER = "Master-Server";
public static final String THREAD_NAME_WORKER_SERVER = "Worker-Server";
public static final String TASK_RECORD_TABLE_HIVE_LOG = "eamp_hive_log_hd";
public static final String TASK_RECORD_TABLE_HISTORY_HIVE_LOG = "eamp_hive_hist_log_hd";
/**
* command parameter keys
*/
public static final String CMDPARAM_RECOVER_PROCESS_ID_STRING = "ProcessInstanceId";
public static final String CMDPARAM_RECOVERY_START_NODE_STRING = "StartNodeIdList";
public static final String CMDPARAM_RECOVERY_WAITTING_THREAD = "WaittingThreadInstanceId";
public static final String CMDPARAM_SUB_PROCESS = "processInstanceId";
public static final String CMDPARAM_EMPTY_SUB_PROCESS = "0";
public static final String CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID = "parentProcessInstanceId";
public static final String CMDPARAM_SUB_PROCESS_DEFINE_ID = "processDefinitionId";
public static final String CMDPARAM_START_NODE_NAMES = "StartNodeNameList";
/**
* complement data start date
*/
public static final String CMDPARAM_COMPLEMENT_DATA_START_DATE = "complementStartDate";
/**
* complement data end date
*/
public static final String CMDPARAM_COMPLEMENT_DATA_END_DATE = "complementEndDate";
/**
* hadoop configuration
*/
public static final String HADOOP_RM_STATE_ACTIVE = "ACTIVE";
public static final String HADOOP_RM_STATE_STANDBY = "STANDBY";
public static final String HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT = "resource.manager.httpaddress.port";
/**
* data source config
*/
public static final String SPRING_DATASOURCE_DRIVER_CLASS_NAME = "spring.datasource.driver-class-name";
public static final String SPRING_DATASOURCE_URL = "spring.datasource.url";
public static final String SPRING_DATASOURCE_USERNAME = "spring.datasource.username";
public static final String SPRING_DATASOURCE_PASSWORD = "spring.datasource.password";
public static final String SPRING_DATASOURCE_VALIDATION_QUERY_TIMEOUT = "spring.datasource.validationQueryTimeout";
public static final String SPRING_DATASOURCE_INITIAL_SIZE = "spring.datasource.initialSize";
public static final String SPRING_DATASOURCE_MIN_IDLE = "spring.datasource.minIdle";
public static final String SPRING_DATASOURCE_MAX_ACTIVE = "spring.datasource.maxActive";
public static final String SPRING_DATASOURCE_MAX_WAIT = "spring.datasource.maxWait";
public static final String SPRING_DATASOURCE_TIME_BETWEEN_EVICTION_RUNS_MILLIS = "spring.datasource.timeBetweenEvictionRunsMillis";
public static final String SPRING_DATASOURCE_TIME_BETWEEN_CONNECT_ERROR_MILLIS = "spring.datasource.timeBetweenConnectErrorMillis";
public static final String SPRING_DATASOURCE_MIN_EVICTABLE_IDLE_TIME_MILLIS = "spring.datasource.minEvictableIdleTimeMillis";
public static final String SPRING_DATASOURCE_VALIDATION_QUERY = "spring.datasource.validationQuery";
public static final String SPRING_DATASOURCE_TEST_WHILE_IDLE = "spring.datasource.testWhileIdle";
public static final String SPRING_DATASOURCE_TEST_ON_BORROW = "spring.datasource.testOnBorrow";
public static final String SPRING_DATASOURCE_TEST_ON_RETURN = "spring.datasource.testOnReturn";
public static final String SPRING_DATASOURCE_POOL_PREPARED_STATEMENTS = "spring.datasource.poolPreparedStatements";
public static final String SPRING_DATASOURCE_DEFAULT_AUTO_COMMIT = "spring.datasource.defaultAutoCommit";
public static final String SPRING_DATASOURCE_KEEP_ALIVE = "spring.datasource.keepAlive";
public static final String SPRING_DATASOURCE_MAX_POOL_PREPARED_STATEMENT_PER_CONNECTION_SIZE = "spring.datasource.maxPoolPreparedStatementPerConnectionSize";
public static final String DEVELOPMENT = "development";
public static final String QUARTZ_PROPERTIES_PATH = "quartz.properties";
/**
* sleep time
*/
public static final int SLEEP_TIME_MILLIS = 1000;
/**
* heartbeat for zk info length
*/
public static final int HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH = 10;
/**
* hadoop params constant
*/
/**
* jar
*/
public static final String JAR = "jar";
/**
* hadoop
*/
public static final String HADOOP = "hadoop";
/**
* -D parameter
*/
public static final String D = "-D";
/**
* -D mapreduce.job.queuename=ququename
*/
public static final String MR_QUEUE = "mapreduce.job.queuename";
/**
* spark params constant
*/
public static final String MASTER = "--master";
public static final String DEPLOY_MODE = "--deploy-mode";
/**
* --class CLASS_NAME
*/
public static final String MAIN_CLASS = "--class";
/**
* --driver-cores NUM
*/
public static final String DRIVER_CORES = "--driver-cores";
/**
* --driver-memory MEM
*/
public static final String DRIVER_MEMORY = "--driver-memory";
/**
* --num-executors NUM
*/
public static final String NUM_EXECUTORS = "--num-executors";
/**
* --executor-cores NUM
*/
public static final String EXECUTOR_CORES = "--executor-cores";
/**
* --executor-memory MEM
*/
public static final String EXECUTOR_MEMORY = "--executor-memory";
/**
* --queue QUEUE
*/
public static final String SPARK_QUEUE = "--queue";
/**
* --queue --qu
*/
public static final String FLINK_QUEUE = "--qu";
/**
* exit code success
*/
public static final int EXIT_CODE_SUCCESS = 0;
/**
* exit code kill
*/
public static final int EXIT_CODE_KILL = 137;
/**
* exit code failure
*/
public static final int EXIT_CODE_FAILURE = -1;
/**
* date format of yyyyMMdd
*/
public static final String PARAMETER_FORMAT_DATE = "yyyyMMdd";
/**
* date format of yyyyMMddHHmmss
*/
public static final String PARAMETER_FORMAT_TIME = "yyyyMMddHHmmss";
/**
* system date(yyyyMMddHHmmss)
*/
public static final String PARAMETER_DATETIME = "system.datetime";
/**
* system date(yyyymmdd) today
*/
public static final String PARAMETER_CURRENT_DATE = "system.biz.curdate";
/**
* system date(yyyymmdd) yesterday
*/
public static final String PARAMETER_BUSINESS_DATE = "system.biz.date";
/**
* ACCEPTED
*/
public static final String ACCEPTED = "ACCEPTED";
/**
* SUCCEEDED
*/
public static final String SUCCEEDED = "SUCCEEDED";
/**
* NEW
*/
public static final String NEW = "NEW";
/**
* NEW_SAVING
*/
public static final String NEW_SAVING = "NEW_SAVING";
/**
* SUBMITTED
*/
public static final String SUBMITTED = "SUBMITTED";
/**
* FAILED
*/
public static final String FAILED = "FAILED";
/**
* KILLED
*/
public static final String KILLED = "KILLED";
/**
* RUNNING
*/
public static final String RUNNING = "RUNNING";
/**
* underline "_"
*/
public static final String UNDERLINE = "_";
/**
* quartz job prifix
*/
public static final String QUARTZ_JOB_PRIFIX = "job";
/**
* quartz job group prifix
*/
public static final String QUARTZ_JOB_GROUP_PRIFIX = "jobgroup";
/**
* projectId
*/
public static final String PROJECT_ID = "projectId";
/**
* processId
*/
public static final String SCHEDULE_ID = "scheduleId";
/**
* schedule
*/
public static final String SCHEDULE = "schedule";
/**
* application regex
*/
public static final String APPLICATION_REGEX = "application_\\d+_\\d+";
public static final String PID = OSUtils.isWindows() ? "handle" : "pid";
/**
* month_begin
*/
public static final String MONTH_BEGIN = "month_begin";
/**
* add_months
*/
public static final String ADD_MONTHS = "add_months";
/**
* month_end
*/
public static final String MONTH_END = "month_end";
/**
* week_begin
*/
public static final String WEEK_BEGIN = "week_begin";
/**
* week_end
*/
public static final String WEEK_END = "week_end";
/**
* timestamp
*/
public static final String TIMESTAMP = "timestamp";
public static final char SUBTRACT_CHAR = '-';
public static final char ADD_CHAR = '+';
public static final char MULTIPLY_CHAR = '*';
public static final char DIVISION_CHAR = '/';
public static final char LEFT_BRACE_CHAR = '(';
public static final char RIGHT_BRACE_CHAR = ')';
public static final String ADD_STRING = "+";
public static final String MULTIPLY_STRING = "*";
public static final String DIVISION_STRING = "/";
public static final String LEFT_BRACE_STRING = "(";
public static final char P = 'P';
public static final char N = 'N';
public static final String SUBTRACT_STRING = "-";
public static final String GLOBAL_PARAMS = "globalParams";
public static final String LOCAL_PARAMS = "localParams";
public static final String PROCESS_INSTANCE_STATE = "processInstanceState";
public static final String TASK_LIST = "taskList";
public static final String RWXR_XR_X = "rwxr-xr-x";
/**
* master/worker server use for zk
*/
public static final String MASTER_PREFIX = "master";
public static final String WORKER_PREFIX = "worker";
public static final String DELETE_ZK_OP = "delete";
public static final String ADD_ZK_OP = "add";
public static final String ALIAS = "alias";
public static final String CONTENT = "content";
public static final String DEPENDENT_SPLIT = ":||";
public static final String DEPENDENT_ALL = "ALL";
/**
* preview schedule execute count
*/
public static final int PREVIEW_SCHEDULE_EXECUTE_COUNT = 5;
/**
* kerberos
*/
public static final String KERBEROS = "kerberos";
/**
* kerberos expire time
*/
public static final String KERBEROS_EXPIRE_TIME = "kerberos.expire.time";
/**
* java.security.krb5.conf
*/
public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf";
/**
* java.security.krb5.conf.path
*/
public static final String JAVA_SECURITY_KRB5_CONF_PATH = "java.security.krb5.conf.path";
/**
* hadoop.security.authentication
*/
public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
/**
* hadoop.security.authentication
*/
public static final String HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE = "hadoop.security.authentication.startup.state";
/**
* com.amazonaws.services.s3.enableV4
*/
public static final String AWS_S3_V4 = "com.amazonaws.services.s3.enableV4";
/**
* loginUserFromKeytab user
*/
public static final String LOGIN_USER_KEY_TAB_USERNAME = "login.user.keytab.username";
/**
* default worker group id
*/
public static final int DEFAULT_WORKER_ID = -1;
/**
* loginUserFromKeytab path
*/
public static final String LOGIN_USER_KEY_TAB_PATH = "login.user.keytab.path";
/**
* task log info format
*/
public static final String TASK_LOG_INFO_FORMAT = "TaskLogInfo-%s";
/**
* hive conf
*/
public static final String HIVE_CONF = "hiveconf:";
//flink ??
public static final String FLINK_YARN_CLUSTER = "yarn-cluster";
public static final String FLINK_RUN_MODE = "-m";
public static final String FLINK_YARN_SLOT = "-ys";
public static final String FLINK_APP_NAME = "-ynm";
public static final String FLINK_TASK_MANAGE = "-yn";
public static final String FLINK_JOB_MANAGE_MEM = "-yjm";
public static final String FLINK_TASK_MANAGE_MEM = "-ytm";
public static final String FLINK_DETACH = "-d";
public static final String FLINK_MAIN_CLASS = "-c";
public static final int[] NOT_TERMINATED_STATES = new int[]{
ExecutionStatus.SUBMITTED_SUCCESS.ordinal(),
ExecutionStatus.RUNNING_EXEUTION.ordinal(),
ExecutionStatus.READY_PAUSE.ordinal(),
ExecutionStatus.READY_STOP.ordinal(),
ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal(),
ExecutionStatus.WAITTING_THREAD.ordinal(),
ExecutionStatus.WAITTING_DEPEND.ordinal()
};
/**
* status
*/
public static final String STATUS = "status";
/**
* message
*/
public static final String MSG = "msg";
/**
* data total
*/
public static final String COUNT = "count";
/**
* page size
*/
public static final String PAGE_SIZE = "pageSize";
/**
* current page no
*/
public static final String PAGE_NUMBER = "pageNo";
/**
*
*/
public static final String DATA_LIST = "data";
public static final String TOTAL_LIST = "totalList";
public static final String CURRENT_PAGE = "currentPage";
public static final String TOTAL_PAGE = "totalPage";
public static final String TOTAL = "total";
/**
* session user
*/
public static final String SESSION_USER = "session.user";
public static final String SESSION_ID = "sessionId";
public static final String PASSWORD_DEFAULT = "******";
/**
* driver
*/
public static final String ORG_POSTGRESQL_DRIVER = "org.postgresql.Driver";
public static final String COM_MYSQL_JDBC_DRIVER = "com.mysql.jdbc.Driver";
public static final String ORG_APACHE_HIVE_JDBC_HIVE_DRIVER = "org.apache.hive.jdbc.HiveDriver";
public static final String COM_CLICKHOUSE_JDBC_DRIVER = "ru.yandex.clickhouse.ClickHouseDriver";
public static final String COM_ORACLE_JDBC_DRIVER = "oracle.jdbc.driver.OracleDriver";
public static final String COM_SQLSERVER_JDBC_DRIVER = "com.microsoft.sqlserver.jdbc.SQLServerDriver";
public static final String COM_DB2_JDBC_DRIVER = "com.ibm.db2.jcc.DB2Driver";
/**
* database type
*/
public static final String MYSQL = "MYSQL";
public static final String POSTGRESQL = "POSTGRESQL";
public static final String HIVE = "HIVE";
public static final String SPARK = "SPARK";
public static final String CLICKHOUSE = "CLICKHOUSE";
public static final String ORACLE = "ORACLE";
public static final String SQLSERVER = "SQLSERVER";
public static final String DB2 = "DB2";
/**
* jdbc url
*/
public static final String JDBC_MYSQL = "jdbc:mysql://";
public static final String JDBC_POSTGRESQL = "jdbc:postgresql://";
public static final String JDBC_HIVE_2 = "jdbc:hive2://";
public static final String JDBC_CLICKHOUSE = "jdbc:clickhouse://";
public static final String JDBC_ORACLE_SID = "jdbc:oracle:thin:@";
public static final String JDBC_ORACLE_SERVICE_NAME = "jdbc:oracle:thin:@//";
public static final String JDBC_SQLSERVER = "jdbc:sqlserver://";
public static final String JDBC_DB2 = "jdbc:db2://";
public static final String ADDRESS = "address";
public static final String DATABASE = "database";
public static final String JDBC_URL = "jdbcUrl";
public static final String PRINCIPAL = "principal";
public static final String OTHER = "other";
public static final String ORACLE_DB_CONNECT_TYPE = "connectType";
/**
* session timeout
*/
public static final int SESSION_TIME_OUT = 7200;
public static final int MAX_FILE_SIZE = 1024 * 1024 * 1024;
public static final String UDF = "UDF";
public static final String CLASS = "class";
public static final String RECEIVERS = "receivers";
public static final String RECEIVERS_CC = "receiversCc";
/**
* dataSource sensitive param
*/
public static final String DATASOURCE_PASSWORD_REGEX = "(?<=(\"password\":\")).*?(?=(\"))";
/**
* default worker group
*/
public static final String DEFAULT_WORKER_GROUP = "default";
public static final Integer TASK_INFO_LENGTH = 5;
/**
* new
* schedule time
*/
public static final String PARAMETER_SHECDULE_TIME = "schedule.time";
/**
* authorize writable perm
*/
public static final int AUTHORIZE_WRITABLE_PERM=7;
/**
* authorize readable perm
*/
public static final int AUTHORIZE_READABLE_PERM=4;
/**
* plugin configurations
*/
public static final String PLUGIN_JAR_SUFFIX = ".jar";
public static final int NORAML_NODE_STATUS = 0;
public static final int ABNORMAL_NODE_STATUS = 1;
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,574 | [Feature]Configurable registration ipaddress | **Is your feature request related to a problem? Please describe.**
on macos,OSUtils.getHost() returns diffrent ip in running time. Also,the method OSUtils.getHost() didn't
perform well in container situation like docker ,some error may happen
**Describe the solution you'd like**
provide a param like register-ip to specific the ip which will be registried on zk
**Describe alternatives you've considered**
provide a param like networkinterface to specific the networkinterface which will be registried on zk
**Additional context**
<img width="1467" alt="8C1E5A5B-CC64-40FE-9049-5FC6154E72FE" src="https://user-images.githubusercontent.com/42403258/80572245-80ab6180-8a30-11ea-9525-73b33f1bf49e.png">
<img width="1481" alt="25DECE0A-2A92-479F-A76F-7DAEB8F691FF" src="https://user-images.githubusercontent.com/42403258/80572256-8608ac00-8a30-11ea-8784-eeb88c0fb7ea.png">
英文不好,简单的说就是可以指定注册到zk上的ip或可选的网卡配置 | https://github.com/apache/dolphinscheduler/issues/2574 | https://github.com/apache/dolphinscheduler/pull/3186 | 6c9ac84f73a20717ab1014fe8c09e98668262baa | 6f9970b189d71d043e79200a70a95f2f33ad10f4 | "2020-04-29T07:48:39Z" | java | "2020-07-13T10:51:38Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/NetUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.*;
import java.util.Enumeration;
import java.util.LinkedList;
import java.util.List;
import java.util.Optional;
import java.util.regex.Pattern;
import static java.util.Collections.emptyList;
/**
* NetUtils
*/
public class NetUtils {
private NetUtils() {
throw new IllegalStateException("Utility class");
}
private static Logger logger = LoggerFactory.getLogger(NetUtils.class);
private static final Pattern IP_PATTERN = Pattern.compile("\\d{1,3}(\\.\\d{1,3}){3,5}$");
private static String ANY_HOST_VALUE = "0.0.0.0";
private static String LOCAL_HOST_VALUE = "127.0.0.1";
private static InetAddress LOCAL_ADDRESS = null;
private static volatile String HOST_ADDRESS;
public static String getHost() {
if (HOST_ADDRESS != null) {
return HOST_ADDRESS;
}
InetAddress address = getLocalAddress();
if (address != null) {
HOST_ADDRESS = address.getHostAddress();
return HOST_ADDRESS;
}
return LOCAL_HOST_VALUE;
}
private static InetAddress getLocalAddress() {
if (null != LOCAL_ADDRESS) {
return LOCAL_ADDRESS;
}
return getLocalAddress0();
}
/**
* Find first valid IP from local network card
*
* @return first valid local IP
*/
private static synchronized InetAddress getLocalAddress0() {
if (null != LOCAL_ADDRESS) {
return LOCAL_ADDRESS;
}
InetAddress localAddress = null;
NetworkInterface networkInterface = findNetworkInterface();
Enumeration<InetAddress> addresses = networkInterface.getInetAddresses();
while (addresses.hasMoreElements()) {
Optional<InetAddress> addressOp = toValidAddress(addresses.nextElement());
if (addressOp.isPresent()) {
try {
if (addressOp.get().isReachable(100)) {
LOCAL_ADDRESS = addressOp.get();
return LOCAL_ADDRESS;
}
} catch (IOException e) {
logger.warn("test address id reachable io exception", e);
}
}
}
try {
localAddress = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
logger.warn("InetAddress get LocalHost exception", e);
}
Optional<InetAddress> addressOp = toValidAddress(localAddress);
if (addressOp.isPresent()) {
LOCAL_ADDRESS = addressOp.get();
}
return LOCAL_ADDRESS;
}
private static Optional<InetAddress> toValidAddress(InetAddress address) {
if (address instanceof Inet6Address) {
Inet6Address v6Address = (Inet6Address) address;
if (isPreferIPV6Address()) {
return Optional.ofNullable(normalizeV6Address(v6Address));
}
}
if (isValidV4Address(address)) {
return Optional.of(address);
}
return Optional.empty();
}
private static InetAddress normalizeV6Address(Inet6Address address) {
String addr = address.getHostAddress();
int i = addr.lastIndexOf('%');
if (i > 0) {
try {
return InetAddress.getByName(addr.substring(0, i) + '%' + address.getScopeId());
} catch (UnknownHostException e) {
logger.debug("Unknown IPV6 address: ", e);
}
}
return address;
}
public static boolean isValidV4Address(InetAddress address) {
if (address == null || address.isLoopbackAddress()) {
return false;
}
String name = address.getHostAddress();
return (name != null
&& IP_PATTERN.matcher(name).matches()
&& !ANY_HOST_VALUE.equals(name)
&& !LOCAL_HOST_VALUE.equals(name));
}
/**
* Check if an ipv6 address
*
* @return true if it is reachable
*/
private static boolean isPreferIPV6Address() {
return Boolean.getBoolean("java.net.preferIPv6Addresses");
}
/**
* Get the suitable {@link NetworkInterface}
*
* @return If no {@link NetworkInterface} is available , return <code>null</code>
*/
private static NetworkInterface findNetworkInterface() {
List<NetworkInterface> validNetworkInterfaces = emptyList();
try {
validNetworkInterfaces = getValidNetworkInterfaces();
} catch (SocketException e) {
logger.warn("ValidNetworkInterfaces exception", e);
}
return validNetworkInterfaces.get(0);
}
/**
* Get the valid {@link NetworkInterface network interfaces}
*
* @throws SocketException SocketException if an I/O error occurs.
*/
private static List<NetworkInterface> getValidNetworkInterfaces() throws SocketException {
List<NetworkInterface> validNetworkInterfaces = new LinkedList<>();
Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
while (interfaces.hasMoreElements()) {
NetworkInterface networkInterface = interfaces.nextElement();
if (ignoreNetworkInterface(networkInterface)) { // ignore
continue;
}
validNetworkInterfaces.add(networkInterface);
}
return validNetworkInterfaces;
}
/**
* @param networkInterface {@link NetworkInterface}
* @return if the specified {@link NetworkInterface} should be ignored, return <code>true</code>
* @throws SocketException SocketException if an I/O error occurs.
*/
public static boolean ignoreNetworkInterface(NetworkInterface networkInterface) throws SocketException {
return networkInterface == null
|| networkInterface.isLoopback()
|| networkInterface.isVirtual()
|| !networkInterface.isUp();
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,140 | [BUG] Embed zookeeper server 'ZKServer' has deadlock problem | *For better global communication, please give priority to using English description, thx! *
**Describe the bug**
look at the code snippet
```java
private static synchronized void startLocalZkServer(final int port, final String dataDirPath,final int tickTime,String maxClientCnxns) {
if (zkServer != null) {
throw new RuntimeException("Zookeeper server is already started!");
}
zkServer = new PublicZooKeeperServerMain();
logger.info("Zookeeper data path : {} ", dataDirPath);
dataDir = dataDirPath;
final String[] args = new String[]{Integer.toString(port), dataDirPath, Integer.toString(tickTime), maxClientCnxns};
try {
logger.info("Zookeeper server started ");
isStarted.compareAndSet(false, true);
zkServer.initializeAndRun(args);
} catch (QuorumPeerConfig.ConfigException e) {
logger.warn("Caught exception while starting ZK", e);
} catch (IOException e) {
logger.warn("Caught exception while starting ZK", e);
}
}
```
The above start method will acquire the lock of the ZKServer class. But the line "zkServer.initializeAndRun(args);" will block until the application exit. So it will never release the lock.
```java
private static synchronized void stopLocalZkServer(final boolean deleteDataDir) {
// 省略
}
```
The above stop method try to acquire the lock of the ZKServer class, but it will never succeed.
**To Reproduce**
Steps to reproduce the behavior, for example:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Which version of Dolphin Scheduler:**
-[1.1.0-preview]
**Additional context**
Add any other context about the problem here.
**Requirement or improvement
- Please describe about your requirements or improvement suggestions.
| https://github.com/apache/dolphinscheduler/issues/3140 | https://github.com/apache/dolphinscheduler/pull/3141 | 6f9970b189d71d043e79200a70a95f2f33ad10f4 | 753ed58fb9f15f85240918fe6457cf9015ee5101 | "2020-07-05T15:26:02Z" | java | "2020-07-13T10:52:33Z" | dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/zk/ZKServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.zk;
import org.apache.zookeeper.server.ZooKeeperServer;
import org.apache.zookeeper.server.ZooKeeperServerMain;
import org.apache.zookeeper.server.quorum.QuorumPeerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* just speed experience version
* embedded zookeeper service
*/
public class ZKServer {
private static final Logger logger = LoggerFactory.getLogger(ZKServer.class);
private static volatile PublicZooKeeperServerMain zkServer = null;
public static final int DEFAULT_ZK_TEST_PORT = 2181;
private static String dataDir = null;
private static final AtomicBoolean isStarted = new AtomicBoolean(false);
public static void main(String[] args) {
if(!isStarted()){
ZKServer.start();
/**
* register hooks, which are called before the process exits
*/
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
@Override
public void run() {
stop();
}
}));
}else{
logger.info("zk server aleady started");
}
}
/**
* start service
*/
public static void start() {
try {
startLocalZkServer(DEFAULT_ZK_TEST_PORT);
} catch (Exception e) {
logger.error("Failed to start ZK: " + e);
}
}
public static boolean isStarted(){
return isStarted.get();
}
static class PublicZooKeeperServerMain extends ZooKeeperServerMain {
@Override
public void initializeAndRun(String[] args)
throws QuorumPeerConfig.ConfigException, IOException {
super.initializeAndRun(args);
}
@Override
public void shutdown() {
super.shutdown();
}
}
/**
* Starts a local Zk instance with a generated empty data directory
*
* @param port The port to listen on
*/
public static void startLocalZkServer(final int port) {
String zkDataDir = System.getProperty("user.dir") +"/zookeeper_data";
logger.info("zk server starting, data dir path:{}" , zkDataDir);
startLocalZkServer(port, zkDataDir, ZooKeeperServer.DEFAULT_TICK_TIME,"60");
}
/**
* Starts a local Zk instance
*
* @param port The port to listen on
* @param dataDirPath The path for the Zk data directory
* @param tickTime zk tick time
* @param maxClientCnxns zk max client connections
*/
private static synchronized void startLocalZkServer(final int port, final String dataDirPath,final int tickTime,String maxClientCnxns) {
if (zkServer != null) {
throw new RuntimeException("Zookeeper server is already started!");
}
zkServer = new PublicZooKeeperServerMain();
logger.info("Zookeeper data path : {} ", dataDirPath);
dataDir = dataDirPath;
final String[] args = new String[]{Integer.toString(port), dataDirPath, Integer.toString(tickTime), maxClientCnxns};
try {
logger.info("Zookeeper server started ");
isStarted.compareAndSet(false, true);
zkServer.initializeAndRun(args);
} catch (QuorumPeerConfig.ConfigException e) {
logger.warn("Caught exception while starting ZK", e);
} catch (IOException e) {
logger.warn("Caught exception while starting ZK", e);
}
}
/**
* Stops a local Zk instance, deleting its data directory
*/
public static void stop() {
try {
stopLocalZkServer(true);
logger.info("zk server stopped");
} catch (Exception e) {
logger.error("Failed to stop ZK ",e);
}
}
/**
* Stops a local Zk instance.
*
* @param deleteDataDir Whether or not to delete the data directory
*/
private static synchronized void stopLocalZkServer(final boolean deleteDataDir) {
if (zkServer != null) {
try {
zkServer.shutdown();
zkServer = null;
if (deleteDataDir) {
org.apache.commons.io.FileUtils.deleteDirectory(new File(dataDir));
}
isStarted.compareAndSet(true, false);
} catch (Exception e) {
logger.warn("Caught exception while stopping ZK server", e);
throw new RuntimeException(e);
}
}
}
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,140 | [BUG] Embed zookeeper server 'ZKServer' has deadlock problem | *For better global communication, please give priority to using English description, thx! *
**Describe the bug**
look at the code snippet
```java
private static synchronized void startLocalZkServer(final int port, final String dataDirPath,final int tickTime,String maxClientCnxns) {
if (zkServer != null) {
throw new RuntimeException("Zookeeper server is already started!");
}
zkServer = new PublicZooKeeperServerMain();
logger.info("Zookeeper data path : {} ", dataDirPath);
dataDir = dataDirPath;
final String[] args = new String[]{Integer.toString(port), dataDirPath, Integer.toString(tickTime), maxClientCnxns};
try {
logger.info("Zookeeper server started ");
isStarted.compareAndSet(false, true);
zkServer.initializeAndRun(args);
} catch (QuorumPeerConfig.ConfigException e) {
logger.warn("Caught exception while starting ZK", e);
} catch (IOException e) {
logger.warn("Caught exception while starting ZK", e);
}
}
```
The above start method will acquire the lock of the ZKServer class. But the line "zkServer.initializeAndRun(args);" will block until the application exit. So it will never release the lock.
```java
private static synchronized void stopLocalZkServer(final boolean deleteDataDir) {
// 省略
}
```
The above stop method try to acquire the lock of the ZKServer class, but it will never succeed.
**To Reproduce**
Steps to reproduce the behavior, for example:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Which version of Dolphin Scheduler:**
-[1.1.0-preview]
**Additional context**
Add any other context about the problem here.
**Requirement or improvement
- Please describe about your requirements or improvement suggestions.
| https://github.com/apache/dolphinscheduler/issues/3140 | https://github.com/apache/dolphinscheduler/pull/3141 | 6f9970b189d71d043e79200a70a95f2f33ad10f4 | 753ed58fb9f15f85240918fe6457cf9015ee5101 | "2020-07-05T15:26:02Z" | java | "2020-07-13T10:52:33Z" | dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/zk/ZKServerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.zk;
import org.junit.Assert;
import org.junit.Test;
// ZKServer is a process, can't unit test
public class ZKServerTest {
@Test
public void isStarted() {
Assert.assertEquals(false, ZKServer.isStarted());
}
@Test
public void stop() {
ZKServer.stop();
}
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,179 | [BUG] cache bug |
**Describe the bug**
![image](https://user-images.githubusercontent.com/7700151/87138506-1a18ae80-c2d1-11ea-8a75-98af77b07589.png)
When I go to use it
![image](https://user-images.githubusercontent.com/7700151/87138427-fd7c7680-c2d0-11ea-971d-02800505ba9b.png)
| https://github.com/apache/dolphinscheduler/issues/3179 | https://github.com/apache/dolphinscheduler/pull/3200 | a7aa58e6ca258c817ec53d8ae27743e4904189c7 | 3aa34bc72363eab368ade91b6e5be77b67897b02 | "2020-07-10T09:17:26Z" | java | "2020-07-16T06:56:53Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/startingParam/index.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="starting-params-dag-index">
<template v-if="isView && isActive">
<div class="box">
<p class="box-hd"><em class="fa ans-icon-arrow-circle-right"></em><strong>{{$t('Startup parameter')}}</strong></p>
<ul class="box-bd">
<li><span class="tab">{{$t('Startup type')}}:</span><span class="content">{{_rtRunningType(startupParam.commandType)}}</span></li>
<li><span class="tab">{{$t('Complement range')}}:</span><span class="content" v-if="startupParam.commandParam && startupParam.commandParam.complementStartDate">{{startupParam.commandParam.complementStartDate}}-{{startupParam.commandParam.complementEndDate}}</span><span class="content" v-else>-</span></li>
<li><span class="tab">{{$t('Failure Strategy')}}:</span><span class="content">{{startupParam.failureStrategy === 'END' ? $t('End') : $t('Continue')}}</span></li>
<li><span class="tab">{{$t('Process priority')}}:</span><span class="content">{{startupParam.processInstancePriority}}</span></li>
<li><span class="tab">{{$t('Worker group')}}:</span><span class="content" v-if="workerGroupList.length">{{startupParam.workerGroup}}</span></li>
<li><span class="tab">{{$t('Notification strategy')}}:</span><span class="content">{{_rtWarningType(startupParam.warningType)}}</span></li>
<li><span class="tab">{{$t('Notification group')}}:</span><span class="content" v-if="notifyGroupList.length">{{_rtNotifyGroupName(startupParam.warningGroupId)}}</span></li>
<li><span class="tab">{{$t('Recipient')}}:</span><span class="content">{{startupParam.receivers || '-'}}</span></li>
<li><span class="tab">{{$t('Cc')}}:</span><span class="content">{{startupParam.receiversCc || '-'}}</span></li>
</ul>
</div>
</template>
</div>
</template>
<script>
import store from '@/conf/home/store'
import { runningType } from '@/conf/home/pages/dag/_source/config'
import { warningTypeList } from '@/conf/home/pages/projects/pages/definition/pages/list/_source/util'
export default {
name: 'starting-params-dag-index',
data () {
return {
store,
startupParam: store.state.dag.startup,
isView: false,
isActive: true,
notifyGroupList: null,
workerGroupList: null
}
},
methods: {
_toggleParam () {
this.isView = !this.isView
},
_rtRunningType (code) {
return _.filter(runningType, v => v.code === code)[0].desc
},
_rtWarningType (id) {
return _.filter(warningTypeList, v => v.id === id)[0].code
},
_rtNotifyGroupName (id) {
let o = _.filter(this.notifyGroupList, v => v.id === id)
if (o && o.length) {
return o[0].code
}
return '-'
},
_rtWorkerGroupName (id) {
let o = _.filter(this.workerGroupList, v => v.id === id)
if (o && o.length) {
return o[0].name
}
return '-'
},
_getNotifyGroupList () {
let notifyGroupListS = _.cloneDeep(this.store.state.dag.notifyGroupListS) || []
if (!notifyGroupListS.length) {
this.store.dispatch('dag/getNotifyGroupList').then(res => {
this.notifyGroupList = res
})
} else {
this.notifyGroupList = notifyGroupListS
}
},
_getWorkerGroupList () {
let stateWorkerGroupsList = this.store.state.security.workerGroupsListAll || []
if (!stateWorkerGroupsList.length) {
this.store.dispatch('security/getWorkerGroupsAll').then(res => {
this.workerGroupList = res
})
} else {
this.workerGroupList = stateWorkerGroupsList
}
}
},
watch: {
'$route': {
deep: true,
handler () {
this.isActive = false
this.$nextTick(() => (this.isActive = true))
}
}
},
mounted () {
this._getNotifyGroupList()
this._getWorkerGroupList()
}
}
</script>
<style lang="scss">
.starting-params-dag-index {
.box {
padding: 5px 10px 10px;
.box-hd {
.fa {
color: #0097e0;
margin-right: 4px;
}
font-size: 16px;
}
.box-bd {
margin-left: 20px;
}
}
}
.tab {
font-size: 12px;
font-weight: bold;
}
.content {
font-size: 12px;
&:hover {
color: #47c3ff;
}
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,179 | [BUG] cache bug |
**Describe the bug**
![image](https://user-images.githubusercontent.com/7700151/87138506-1a18ae80-c2d1-11ea-8a75-98af77b07589.png)
When I go to use it
![image](https://user-images.githubusercontent.com/7700151/87138427-fd7c7680-c2d0-11ea-971d-02800505ba9b.png)
| https://github.com/apache/dolphinscheduler/issues/3179 | https://github.com/apache/dolphinscheduler/pull/3200 | a7aa58e6ca258c817ec53d8ae27743e4904189c7 | 3aa34bc72363eab368ade91b6e5be77b67897b02 | "2020-07-10T09:17:26Z" | java | "2020-07-16T06:56:53Z" | dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/start.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="start-process-model">
<div class="title-box">
<span>{{$t('Please set the parameters before starting')}}</span>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Process Name')}}
</div>
<div style="line-height: 32px;">{{workflowName}}</div>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Failure Strategy')}}
</div>
<div class="cont">
<x-radio-group v-model="failureStrategy" style="margin-top: 7px;">
<x-radio :label="'CONTINUE'">{{$t('Continue')}}</x-radio>
<x-radio :label="'END'">{{$t('End')}}</x-radio>
</x-radio-group>
</div>
</div>
<div class="clearfix list" v-if="sourceType === 'contextmenu'" style="margin-top: -8px;">
<div class="text">
{{$t('Node execution')}}
</div>
<div class="cont" style="padding-top: 6px;">
<x-radio-group v-model="taskDependType">
<x-radio :label="'TASK_POST'">{{$t('Backward execution')}}</x-radio>
<x-radio :label="'TASK_PRE'">{{$t('Forward execution')}}</x-radio>
<x-radio :label="'TASK_ONLY'">{{$t('Execute only the current node')}}</x-radio>
</x-radio-group>
</div>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Notification strategy')}}
</div>
<div class="cont">
<x-select style="width: 200px;" v-model="warningType">
<x-option
v-for="city in warningTypeList"
:key="city.id"
:value="city.id"
:label="city.code">
</x-option>
</x-select>
</div>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Process priority')}}
</div>
<div class="cont">
<m-priority v-model="processInstancePriority"></m-priority>
</div>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Worker group')}}
</div>
<div class="cont">
<m-worker-groups v-model="workerGroup"></m-worker-groups>
</div>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Notification group')}}
</div>
<div class="cont">
<x-select
style="width: 200px;"
v-model="warningGroupId"
:disabled="!notifyGroupList.length">
<x-input slot="trigger" slot-scope="{ selectedModel }" readonly :placeholder="$t('Please select a notification group')" :value="selectedModel ? selectedModel.label : ''" style="width: 200px;" @on-click-icon.stop="warningGroupId = ''">
<em slot="suffix" class="ans-icon-fail-solid" style="font-size: 15px;cursor: pointer;" v-show="warningGroupId"></em>
<em slot="suffix" class="ans-icon-arrow-down" style="font-size: 12px;" v-show="!warningGroupId"></em>
</x-input>
<x-option
v-for="city in notifyGroupList"
:key="city.id"
:value="city.id"
:label="city.code">
</x-option>
</x-select>
</div>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Recipient')}}
</div>
<div class="cont" style="width: 688px;">
<m-email v-model="receivers" :repeat-data="receiversCc"></m-email>
</div>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Cc')}}
</div>
<div class="cont" style="width: 688px;">
<m-email v-model="receiversCc" :repeat-data="receivers"></m-email>
</div>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Complement Data')}}
</div>
<div class="cont">
<div style="padding-top: 6px;">
<x-checkbox v-model="execType">{{$t('Whether it is a complement process?')}}</x-checkbox>
</div>
</div>
</div>
<template v-if="execType">
<div class="clearfix list" style="margin:-6px 0 16px 0">
<div class="text">
{{$t('Mode of execution')}}
</div>
<div class="cont">
<x-radio-group v-model="runMode" style="margin-top: 7px;">
<x-radio :label="'RUN_MODE_SERIAL'">{{$t('Serial execution')}}</x-radio>
<x-radio :label="'RUN_MODE_PARALLEL'">{{$t('Parallel execution')}}</x-radio>
</x-radio-group>
</div>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Schedule date')}}
</div>
<div class="cont">
<x-datepicker
style="width: 360px;"
:panel-num="2"
placement="bottom-start"
@on-change="_datepicker"
:value="scheduleTime"
type="daterange"
:placeholder="$t('Select date range')"
format="YYYY-MM-DD HH:mm:ss">
</x-datepicker>
</div>
</div>
</template>
<div class="submit">
<x-button type="text" @click="close()"> {{$t('Cancel')}} </x-button>
<x-button type="primary" shape="circle" :loading="spinnerLoading" @click="ok()">{{spinnerLoading ? 'Loading...' : $t('Start')}} </x-button>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import dayjs from 'dayjs'
import mEmail from './email.vue'
import store from '@/conf/home/store'
import { warningTypeList } from './util'
import mPriority from '@/module/components/priority/priority'
import mWorkerGroups from '@/conf/home/pages/dag/_source/formModel/_source/workerGroups'
export default {
name: 'start-process',
data () {
return {
store,
processDefinitionId: 0,
failureStrategy: 'CONTINUE',
warningTypeList: warningTypeList,
workflowName: '',
warningType: '',
notifyGroupList: [],
warningGroupId: '',
scheduleTime: '',
spinnerLoading: false,
execType: false,
taskDependType: 'TASK_POST',
receivers: [],
receiversCc: [],
runMode: 'RUN_MODE_SERIAL',
processInstancePriority: 'MEDIUM',
workerGroup: 'default'
}
},
props: {
item: Object,
startNodeList: {
type: String,
default: ''
},
sourceType: String
},
methods: {
_datepicker (val) {
this.scheduleTime = val
},
_start () {
this.spinnerLoading = true
let param = {
processDefinitionId: this.item.id,
scheduleTime: this.scheduleTime.length && this.scheduleTime.join(',') || '',
failureStrategy: this.failureStrategy,
warningType: this.warningType,
warningGroupId: this.warningGroupId=='' ? 0 : this.warningGroupId,
execType: this.execType ? 'COMPLEMENT_DATA' : null,
startNodeList: this.startNodeList,
taskDependType: this.taskDependType,
runMode: this.runMode,
processInstancePriority: this.processInstancePriority,
receivers: this.receivers.join(',') || '',
receiversCc: this.receiversCc.join(',') || '',
workerGroup: this.workerGroup
}
// Executed from the specified node
if (this.sourceType === 'contextmenu') {
param.taskDependType = this.taskDependType
}
this.store.dispatch('dag/processStart', param).then(res => {
this.$message.success(res.msg)
this.$emit('onUpdate')
setTimeout(() => {
this.spinnerLoading = false
this.close()
}, 500)
}).catch(e => {
this.$message.error(e.msg || '')
this.spinnerLoading = false
})
},
_getNotifyGroupList () {
return new Promise((resolve, reject) => {
let notifyGroupListS = _.cloneDeep(this.store.state.dag.notifyGroupListS) || []
if (!notifyGroupListS.length) {
this.store.dispatch('dag/getNotifyGroupList').then(res => {
this.notifyGroupList = res
resolve()
})
} else {
this.notifyGroupList = notifyGroupListS
resolve()
}
})
},
_getReceiver () {
this.store.dispatch('dag/getReceiver', { processDefinitionId: this.item.id }).then(res => {
this.receivers = res.receivers && res.receivers.split(',') || []
this.receiversCc = res.receiversCc && res.receiversCc.split(',') || []
})
},
ok () {
this._start()
},
close () {
this.$emit('close')
}
},
watch: {
execType (a) {
this.scheduleTime = a ? [dayjs().format('YYYY-MM-DD 00:00:00'), dayjs().format('YYYY-MM-DD 00:00:00')] : ''
}
},
created () {
this.warningType = this.warningTypeList[0].id
this.workflowName = this.item.name
this._getReceiver()
let stateWorkerGroupsList = this.store.state.security.workerGroupsListAll || []
if (stateWorkerGroupsList.length) {
this.workerGroup = stateWorkerGroupsList[0].id
} else {
this.store.dispatch('security/getWorkerGroupsAll').then(res => {
this.$nextTick(() => {
if(res.length>0) {
this.workerGroup = res[0].id
}
})
})
}
},
mounted () {
this._getNotifyGroupList().then(() => {
this.$nextTick(() => {
this.warningGroupId = ''
})
})
this.workflowName = this.item.name
},
computed: {},
components: { mEmail, mPriority, mWorkerGroups }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.start-process-model {
width: 860px;
min-height: 300px;
background: #fff;
border-radius: 3px;
.title-box {
margin-bottom: 18px;
span {
padding-left: 30px;
font-size: 16px;
padding-top: 29px;
display: block;
}
}
.list {
margin-bottom: 14px;
.text {
width: 140px;
float: left;
text-align: right;
line-height: 32px;
padding-right: 8px;
}
.cont {
width: 350px;
float: left;
.add-email-model {
padding: 20px;
}
}
}
.submit {
text-align: right;
padding-right: 30px;
padding-top: 10px;
padding-bottom: 30px;
}
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,179 | [BUG] cache bug |
**Describe the bug**
![image](https://user-images.githubusercontent.com/7700151/87138506-1a18ae80-c2d1-11ea-8a75-98af77b07589.png)
When I go to use it
![image](https://user-images.githubusercontent.com/7700151/87138427-fd7c7680-c2d0-11ea-971d-02800505ba9b.png)
| https://github.com/apache/dolphinscheduler/issues/3179 | https://github.com/apache/dolphinscheduler/pull/3200 | a7aa58e6ca258c817ec53d8ae27743e4904189c7 | 3aa34bc72363eab368ade91b6e5be77b67897b02 | "2020-07-10T09:17:26Z" | java | "2020-07-16T06:56:53Z" | dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/timing.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="timing-process-model">
<div class="title-box">
<span>{{$t('Set parameters before timing')}}</span>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Start and stop time')}}
</div>
<div class="cont">
<x-datepicker
style="width: 360px;"
:panel-num="2"
placement="bottom-start"
@on-change="_datepicker"
:value="scheduleTime"
type="daterange"
:placeholder="$t('Select date range')"
format="YYYY-MM-DD HH:mm:ss">
</x-datepicker>
</div>
</div>
<div class="clearfix list">
<x-button type="info" style="margin-left:20px" shape="circle" :loading="spinnerLoading" @click="preview()">{{$t('Execute time')}}</x-button>
<div class="text">
{{$t('Timing')}}
</div>
<div class="cont">
<template>
<x-poptip :ref="'poptip'" placement="bottom-start">
<div class="crontab-box">
<v-crontab v-model="crontab" :locale="i18n"></v-crontab>
</div>
<template slot="reference">
<x-input
style="width: 360px;"
type="text"
readonly
:value="crontab"
autocomplete="off">
</x-input>
</template>
</x-poptip>
</template>
</div>
</div>
<div class="clearfix list">
<div style = "padding-left: 150px;">{{$t('Next five execution times')}}</div>
<ul style = "padding-left: 150px;">
<li v-for="(time,i) in previewTimes" :key='i'>{{time}}</li>
</ul>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Failure Strategy')}}
</div>
<div class="cont">
<x-radio-group v-model="failureStrategy" style="margin-top: 7px;">
<x-radio :label="'CONTINUE'">{{$t('Continue')}}</x-radio>
<x-radio :label="'END'">{{$t('End')}}</x-radio>
</x-radio-group>
</div>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Notification strategy')}}
</div>
<div class="cont">
<x-select
style="width: 200px;"
v-model="warningType">
<x-option
v-for="city in warningTypeList"
:key="city.id"
:value="city.id"
:label="city.code">
</x-option>
</x-select>
</div>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Process priority')}}
</div>
<div class="cont">
<m-priority v-model="processInstancePriority"></m-priority>
</div>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Worker group')}}
</div>
<div class="cont">
<m-worker-groups v-model="workerGroup"></m-worker-groups>
</div>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Notification group')}}
</div>
<div class="cont">
<x-select
style="width: 200px;"
:disabled="!notifyGroupList.length"
v-model="warningGroupId">
<x-input slot="trigger" readonly slot-scope="{ selectedModel }" :placeholder="$t('Please select a notification group')" :value="selectedModel ? selectedModel.label : ''" style="width: 200px;" @on-click-icon.stop="warningGroupId = {}">
<em slot="suffix" class="ans-icon-fail-solid" style="font-size: 15px;cursor: pointer;" v-show="warningGroupId.id"></em>
<em slot="suffix" class="ans-icon-arrow-down" style="font-size: 12px;" v-show="!warningGroupId.id"></em>
</x-input>
<x-option
v-for="city in notifyGroupList"
:key="city.id"
:value="city.id"
:label="city.code">
</x-option>
</x-select>
</div>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Recipient')}}
</div>
<div class="cont" style="width: 680px;">
<m-email v-model="receivers" :repeat-data="receiversCc"></m-email>
</div>
</div>
<div class="clearfix list">
<div class="text">
{{$t('Cc')}}
</div>
<div class="cont" style="width: 680px;">
<m-email v-model="receiversCc" :repeat-data="receivers"></m-email>
</div>
</div>
<div class="submit">
<x-button type="text" @click="close()"> {{$t('Cancel')}} </x-button>
<x-button type="primary" shape="circle" :loading="spinnerLoading" @click="ok()">{{spinnerLoading ? 'Loading...' : (item.crontab ? $t('Edit') : $t('Create'))}} </x-button>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import i18n from '@/module/i18n'
import mEmail from './email.vue'
import store from '@/conf/home/store'
import { warningTypeList } from './util'
import { vCrontab } from '@/module/components/crontab/index'
import { formatDate } from '@/module/filter/filter'
import mPriority from '@/module/components/priority/priority'
import mWorkerGroups from '@/conf/home/pages/dag/_source/formModel/_source/workerGroups'
export default {
name: 'timing-process',
data () {
return {
store,
processDefinitionId: 0,
failureStrategy: 'CONTINUE',
warningTypeList: warningTypeList,
warningType: 'NONE',
notifyGroupList: [],
warningGroupId: '',
spinnerLoading: false,
scheduleTime: '',
crontab: '0 0 * * * ? *',
cronPopover: false,
receivers: [],
receiversCc: [],
i18n: i18n.globalScope.LOCALE,
processInstancePriority: 'MEDIUM',
workerGroup: '',
previewTimes: []
}
},
props: {
item: Object,
receiversD: Array,
receiversCcD: Array,
type: String
},
methods: {
_datepicker (val) {
this.scheduleTime = val
},
_verification () {
if (!this.scheduleTime) {
this.$message.warning(`${i18n.$t('Please select time')}`)
return false
}
if (this.scheduleTime[0] === this.scheduleTime[1]) {
this.$message.warning(`${i18n.$t('The start time must not be the same as the end')}`)
return false
}
if (!this.crontab) {
this.$message.warning(`${i18n.$t('Please enter crontab')}`)
return false
}
return true
},
_timing () {
if (this._verification()) {
let api = ''
let searchParams = {
schedule: JSON.stringify({
startTime: this.scheduleTime[0],
endTime: this.scheduleTime[1],
crontab: this.crontab
}),
failureStrategy: this.failureStrategy,
warningType: this.warningType,
processInstancePriority: this.processInstancePriority,
warningGroupId: this.warningGroupId =='' ? 0 : this.warningGroupId,
receivers: this.receivers.join(',') || '',
receiversCc: this.receiversCc.join(',') || '',
workerGroup: this.workerGroup
}
let msg = ''
// edit
if (this.item.crontab) {
api = 'dag/updateSchedule'
searchParams.id = this.item.id
msg = `${i18n.$t('Edit')}${i18n.$t('success')},${i18n.$t('Please go online')}`
} else {
api = 'dag/createSchedule'
searchParams.processDefinitionId = this.item.id
msg = `${i18n.$t('Create')}${i18n.$t('success')}`
}
this.store.dispatch(api, searchParams).then(res => {
this.$message.success(msg)
this.$emit('onUpdate')
}).catch(e => {
this.$message.error(e.msg || '')
})
}
},
_preview () {
if (this._verification()) {
let api = 'dag/previewSchedule'
let searchParams = {
schedule: JSON.stringify({
startTime: this.scheduleTime[0],
endTime: this.scheduleTime[1],
crontab: this.crontab
})
}
let msg = ''
this.store.dispatch(api, searchParams).then(res => {
if (res.length) {
this.previewTimes = res
} else {
this.$message.warning(`${i18n.$t('There is no data for this period of time')}`)
}
})
}
},
_getNotifyGroupList () {
return new Promise((resolve, reject) => {
let notifyGroupListS = _.cloneDeep(this.store.state.dag.notifyGroupListS) || []
if (!notifyGroupListS.length) {
this.store.dispatch('dag/getNotifyGroupList').then(res => {
this.notifyGroupList = res
if (this.notifyGroupList.length) {
resolve()
} else {
reject(new Error(0))
}
})
} else {
this.notifyGroupList = notifyGroupListS
resolve()
}
})
},
ok () {
this._timing()
},
close () {
this.$emit('close')
},
preview () {
this._preview()
}
},
watch: {
},
created () {
if(this.item.workerGroup===undefined) {
let stateWorkerGroupsList = this.store.state.security.workerGroupsListAll || []
if (stateWorkerGroupsList.length) {
this.workerGroup = stateWorkerGroupsList[0].id
} else {
this.store.dispatch('security/getWorkerGroupsAll').then(res => {
this.$nextTick(() => {
this.workerGroup = res[0].id
})
})
}
} else {
this.workerGroup = this.item.workerGroup
}
if(this.item.crontab !== null){
this.crontab = this.item.crontab
}
if(this.type == 'timing') {
let date = new Date()
let year = date.getFullYear()
let month = date.getMonth() + 1
let day = date.getDate()
if (month < 10) {
month = "0" + month;
}
if (day < 10) {
day = "0" + day;
}
let startDate = year + "-" + month + "-" + day + ' ' + '00:00:00'
let endDate = (year+100) + "-" + month + "-" + day + ' ' + '00:00:00'
let times = []
times[0] = startDate
times[1] = endDate
this.crontab = '0 0 * * * ? *'
this.scheduleTime = times
}
this.receivers = _.cloneDeep(this.receiversD)
this.receiversCc = _.cloneDeep(this.receiversCcD)
},
mounted () {
let item = this.item
// Determine whether to echo
if (this.item.crontab) {
this.crontab = item.crontab
this.scheduleTime = [formatDate(item.startTime), formatDate(item.endTime)]
this.failureStrategy = item.failureStrategy
this.warningType = item.warningType
this.processInstancePriority = item.processInstancePriority
this._getNotifyGroupList().then(() => {
this.$nextTick(() => {
// let list = _.filter(this.notifyGroupList, v => v.id === item.warningGroupId)
this.warningGroupId = item.warningGroupId
})
}).catch(() => this.warningGroupId = '')
} else {
this._getNotifyGroupList().then(() => {
this.$nextTick(() => {
this.warningGroupId = ''
})
}).catch(() => this.warningGroupId = '')
}
},
components: { vCrontab, mEmail, mPriority, mWorkerGroups }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.timing-process-model {
width: 860px;
min-height: 300px;
background: #fff;
border-radius: 3px;
margin-top: 0;
.crontab-box {
margin: -6px;
.v-crontab {
}
}
.from-model {
padding-top: 0;
}
.title-box {
margin-bottom: 18px;
span {
padding-left: 30px;
font-size: 16px;
padding-top: 29px;
display: block;
}
}
.list {
margin-bottom: 14px;
>.text {
width: 140px;
float: left;
text-align: right;
line-height: 32px;
padding-right: 8px;
}
.cont {
width: 350px;
float: left;
}
}
.submit {
text-align: right;
padding-right: 30px;
padding-top: 10px;
padding-bottom: 30px;
}
}
.v-crontab-from-model {
.list-box {
padding: 0;
}
}
.x-date-packer-panel .x-date-packer-day .lattice label.bg-hover {
background: #00BFFF!important;
margin-top: -4px;
}
.x-date-packer-panel .x-date-packer-day .lattice em:hover {
background: #0098e1!important;
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,240 | [Bug][worker] Cannot find a (Map) Key deserializer for type | When I execute SQL query task, JSON parsing error
error info :
````
com.fasterxml.jackson.databind.exc.InvalidDefinitionException:
Cannot find a (Map) Key deserializer for type [simple type, class org.apache.dolphinscheduler.dao.entity.UdfFunc]
`````
![image](https://user-images.githubusercontent.com/39816903/87851699-ecb1ad80-c92d-11ea-8c3d-820f38f5d68d.png)
show code :
TaskExecutionContext taskExecutionContext = JSONUtils.parseObject(contextJson, TaskExecutionContext.class);
![image](https://user-images.githubusercontent.com/39816903/87851753-69448c00-c92e-11ea-94e0-36b45135e533.png)
contextJson:
```
{
"taskInstanceId":2,
"taskName":"SQL-QUERY",
"startTime":"2020-07-18 19:18:58",
"taskType":"SQL",
"host":null,
"executePath":"/tmp/dolphinscheduler/exec/process/2/17/2/2",
"logPath":null,
"taskJson":"{"id":"tasks-5736","name":"SQL-QUERY","desc":null,"type":"SQL","runFlag":"NORMAL","loc":null,"maxRetryTimes":0,"retryInterval":1,"params":{"type":"MYSQL","datasource":3,"sql":"SELECT * FROM person","udfs":"","sqlType":"0","title":"SQL-QUERY","receivers":"zhangboyi_mx@163.com","receiversCc":"","showType":"TABLE","localParams":[],"connParams":"","preStatements":[],"postStatements":[]},"preTasks":[],"extras":null,"depList":[],"dependence":{},"conditionResult":{"successNode":[""],"failedNode":[""]},"taskInstancePriority":"MEDIUM","workerGroup":"default","workerGroupId":null,"timeout":{"strategy":"","interval":null,"enable":false},"conditionsTask":false,"forbidden":false,"taskTimeoutParameter":{"enable":false,"strategy":null,"interval":0}}",
"processId":0,
"appIds":null,
"processInstanceId":2,
"scheduleTime":null,
"globalParams":null,
"executorId":2,
"cmdTypeIfComplement":0,
"tenantCode":"sysadmin",
"queue":"default",
"processDefineId":17,
"projectId":2,
"taskParams":null,
"envFile":null,
"definedParams":null,
"taskAppId":null,
"taskTimeoutStrategy":0,
"taskTimeout":0,
"workerGroup":"default",
"resources":{
},
"sqlTaskExecutionContext":{
"warningGroupId":0,
"connectionParams":"{"type":null,"address":"jdbc:mysql://127.0.0.1:3306","database":"test","jdbcUrl":"jdbc:mysql://127.0.0.1:3306/test","user":"root","password":"IUAjJCVeJipyb290"}",
"udfFuncTenantCodeMap":null
},
"dataxTaskExecutionContext":{
"dataSourceId":0,
"sourcetype":0,
"sourceConnectionParams":null,
"dataTargetId":0,
"targetType":0,
"targetConnectionParams":null
},
"dependenceTaskExecutionContext":null,
"sqoopTaskExecutionContext":{
"dataSourceId":0,
"sourcetype":0,
"sourceConnectionParams":null,
"dataTargetId":0,
"targetType":0,
"targetConnectionParams":null
},
"procedureTaskExecutionContext":{
"connectionParams":null
}
}
```
test unit :
```
public class TaskExecuteProcessorTest {
@Test
public void testJson(){
String contextJson = "{\n" +
" \"taskInstanceId\":2,\n" +
" \"taskName\":\"SQL-QUERY\",\n" +
" \"startTime\":\"2020-07-18 19:18:58\",\n" +
" \"taskType\":\"SQL\",\n" +
" \"host\":null,\n" +
" \"executePath\":\"/tmp/dolphinscheduler/exec/process/2/17/2/2\",\n" +
" \"logPath\":null,\n" +
" \"taskJson\":\"{\"id\":\"tasks-5736\",\"name\":\"SQL-QUERY\",\"desc\":null,\"type\":\"SQL\",\"runFlag\":\"NORMAL\",\"loc\":null,\"maxRetryTimes\":0,\"retryInterval\":1,\"params\":{\"type\":\"MYSQL\",\"datasource\":3,\"sql\":\"SELECT * FROM person\",\"udfs\":\"\",\"sqlType\":\"0\",\"title\":\"SQL-QUERY\",\"receivers\":\"zhangboyi_mx@163.com\",\"receiversCc\":\"\",\"showType\":\"TABLE\",\"localParams\":[],\"connParams\":\"\",\"preStatements\":[],\"postStatements\":[]},\"preTasks\":[],\"extras\":null,\"depList\":[],\"dependence\":{},\"conditionResult\":{\"successNode\":[\"\"],\"failedNode\":[\"\"]},\"taskInstancePriority\":\"MEDIUM\",\"workerGroup\":\"default\",\"workerGroupId\":null,\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"conditionsTask\":false,\"forbidden\":false,\"taskTimeoutParameter\":{\"enable\":false,\"strategy\":null,\"interval\":0}}\",\n" +
" \"processId\":0,\n" +
" \"appIds\":null,\n" +
" \"processInstanceId\":2,\n" +
" \"scheduleTime\":null,\n" +
" \"globalParams\":null,\n" +
" \"executorId\":2,\n" +
" \"cmdTypeIfComplement\":0,\n" +
" \"tenantCode\":\"sysadmin\",\n" +
" \"queue\":\"default\",\n" +
" \"processDefineId\":17,\n" +
" \"projectId\":2,\n" +
" \"taskParams\":null,\n" +
" \"envFile\":null,\n" +
" \"definedParams\":null,\n" +
" \"taskAppId\":null,\n" +
" \"taskTimeoutStrategy\":0,\n" +
" \"taskTimeout\":0,\n" +
" \"workerGroup\":\"default\",\n" +
" \"resources\":{\n" +
"\n" +
" },\n" +
" \"sqlTaskExecutionContext\":{\n" +
" \"warningGroupId\":0,\n" +
" \"connectionParams\":\"{\"type\":null,\"address\":\"jdbc:mysql://127.0.0.1:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://127.0.0.1:3306/test\",\"user\":\"root\",\"password\":\"IUAjJCVeJipyb290\"}\",\n" +
" \"udfFuncTenantCodeMap\":null\n" +
" },\n" +
" \"dataxTaskExecutionContext\":{\n" +
" \"dataSourceId\":0,\n" +
" \"sourcetype\":0,\n" +
" \"sourceConnectionParams\":null,\n" +
" \"dataTargetId\":0,\n" +
" \"targetType\":0,\n" +
" \"targetConnectionParams\":null\n" +
" },\n" +
" \"dependenceTaskExecutionContext\":null,\n" +
" \"sqoopTaskExecutionContext\":{\n" +
" \"dataSourceId\":0,\n" +
" \"sourcetype\":0,\n" +
" \"sourceConnectionParams\":null,\n" +
" \"dataTargetId\":0,\n" +
" \"targetType\":0,\n" +
" \"targetConnectionParams\":null\n" +
" },\n" +
" \"procedureTaskExecutionContext\":{\n" +
" \"connectionParams\":null\n" +
" }\n" +
"}" ;
TaskExecutionContext taskExecutionContext = JSONUtils.parseObject(contextJson, TaskExecutionContext.class);
assertTrue(taskExecutionContext != null);
}
}
```
error info :
```
Connected to the target VM, address: '127.0.0.1:53429', transport: 'socket'
19:43:31.405 [main] ERROR org.apache.dolphinscheduler.common.utils.JSONUtils - parse object exception!
com.fasterxml.jackson.databind.exc.InvalidDefinitionException: Cannot find a (Map) Key deserializer for type [simple type, class org.apache.dolphinscheduler.dao.entity.UdfFunc]
at [Source: (String)"{
"taskInstanceId":2,
"taskName":"SQL-QUERY",
"startTime":"2020-07-18 19:18:58",
"taskType":"SQL",
"host":null,
"executePath":"/tmp/dolphinscheduler/exec/process/2/17/2/2",
"logPath":null,
"taskJson":"{"id":"tasks-5736","name":"SQL-QUERY","desc":null,"type":"SQL","runFlag":"NORMAL","loc":null,"maxRetryTimes":0,"retryInterval":1,"params":{"type":"MYSQL","datasource":3,"sql":"SELECT * FROM person","udfs":"","sqlType":"0","title":"SQL-QUERY","receivers":"zhangboyi_mx"[truncated 1787 chars]; line: 1, column: 1]
at com.fasterxml.jackson.databind.exc.InvalidDefinitionException.from(InvalidDefinitionException.java:67)
at com.fasterxml.jackson.databind.DeserializationContext.reportBadDefinition(DeserializationContext.java:1452)
at com.fasterxml.jackson.databind.deser.DeserializerCache._handleUnknownKeyDeserializer(DeserializerCache.java:599)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findKeyDeserializer(DeserializerCache.java:168)
at com.fasterxml.jackson.databind.DeserializationContext.findKeyDeserializer(DeserializationContext.java:500)
at com.fasterxml.jackson.databind.deser.std.MapDeserializer.createContextual(MapDeserializer.java:248)
at com.fasterxml.jackson.databind.DeserializationContext.handlePrimaryContextualization(DeserializationContext.java:651)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.resolve(BeanDeserializerBase.java:484)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCache2(DeserializerCache.java:293)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCacheValueDeserializer(DeserializerCache.java:244)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findValueDeserializer(DeserializerCache.java:142)
at com.fasterxml.jackson.databind.DeserializationContext.findNonContextualValueDeserializer(DeserializationContext.java:467)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.resolve(BeanDeserializerBase.java:473)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCache2(DeserializerCache.java:293)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCacheValueDeserializer(DeserializerCache.java:244)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findValueDeserializer(DeserializerCache.java:142)
at com.fasterxml.jackson.databind.DeserializationContext.findRootValueDeserializer(DeserializationContext.java:477)
at com.fasterxml.jackson.databind.ObjectMapper._findRootDeserializer(ObjectMapper.java:4190)
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4009)
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3004)
at org.apache.dolphinscheduler.common.utils.JSONUtils.parseObject(JSONUtils.java:108)
at org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessorTest.testJson(TaskExecuteProcessorTest.java:71)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33)
at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)
at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)
Disconnected from the target VM, address: '127.0.0.1:53429', transport: 'socket'
java.lang.AssertionError
at org.junit.Assert.fail(Assert.java:86)
at org.junit.Assert.assertTrue(Assert.java:41)
at org.junit.Assert.assertTrue(Assert.java:52)
at org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessorTest.testJson(TaskExecuteProcessorTest.java:73)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33)
at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)
at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)
```
| https://github.com/apache/dolphinscheduler/issues/3240 | https://github.com/apache/dolphinscheduler/pull/3245 | 252abbdaed4a120b0bc5c23da3e956e3d429a7a1 | 6f2667bf1aab2715cfd734a5a694d2b2fd0adcc9 | "2020-07-18T11:39:43Z" | java | "2020-07-21T10:54:07Z" | dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/UdfFunc.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.entity;
import com.fasterxml.jackson.annotation.JsonFormat;
import org.apache.dolphinscheduler.common.enums.UdfType;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import java.util.Date;
/**
* udf function
*/
@TableName("t_ds_udfs")
public class UdfFunc {
/**
* id
*/
@TableId(value="id", type=IdType.AUTO)
private int id;
/**
* user id
*/
private int userId;
/**
* udf function name
*/
private String funcName;
/**
* udf class name
*/
private String className;
/**
* udf argument types
*/
private String argTypes;
/**
* udf data base
*/
private String database;
/**
* udf description
*/
private String description;
/**
* resource id
*/
private int resourceId;
/**
* resource name
*/
private String resourceName;
/**
* udf function type: hive / spark
*/
private UdfType type;
/**
* create time
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8")
private Date createTime;
/**
* update time
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8")
private Date updateTime;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public int getUserId() {
return userId;
}
public void setUserId(int userId) {
this.userId = userId;
}
public String getFuncName() {
return funcName;
}
public void setFuncName(String funcName) {
this.funcName = funcName;
}
public String getClassName() {
return className;
}
public void setClassName(String className) {
this.className = className;
}
public String getArgTypes() {
return argTypes;
}
public void setArgTypes(String argTypes) {
this.argTypes = argTypes;
}
public String getDatabase() {
return database;
}
public void setDatabase(String database) {
this.database = database;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public int getResourceId() {
return resourceId;
}
public void setResourceId(int resourceId) {
this.resourceId = resourceId;
}
public String getResourceName() {
return resourceName;
}
public void setResourceName(String resourceName) {
this.resourceName = resourceName;
}
public UdfType getType() {
return type;
}
public void setType(UdfType type) {
this.type = type;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Date updateTime) {
this.updateTime = updateTime;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
UdfFunc udfFunc = (UdfFunc) o;
if (id != udfFunc.id) {
return false;
}
return !(funcName != null ? !funcName.equals(udfFunc.funcName) : udfFunc.funcName != null);
}
@Override
public int hashCode() {
int result = id;
result = 31 * result + (funcName != null ? funcName.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "UdfFunc{" +
"id=" + id +
", userId=" + userId +
", funcName='" + funcName + '\'' +
", className='" + className + '\'' +
", argTypes='" + argTypes + '\'' +
", database='" + database + '\'' +
", description='" + description + '\'' +
", resourceId=" + resourceId +
", resourceName='" + resourceName + '\'' +
", type=" + type +
", createTime=" + createTime +
", updateTime=" + updateTime +
'}';
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,240 | [Bug][worker] Cannot find a (Map) Key deserializer for type | When I execute SQL query task, JSON parsing error
error info :
````
com.fasterxml.jackson.databind.exc.InvalidDefinitionException:
Cannot find a (Map) Key deserializer for type [simple type, class org.apache.dolphinscheduler.dao.entity.UdfFunc]
`````
![image](https://user-images.githubusercontent.com/39816903/87851699-ecb1ad80-c92d-11ea-8c3d-820f38f5d68d.png)
show code :
TaskExecutionContext taskExecutionContext = JSONUtils.parseObject(contextJson, TaskExecutionContext.class);
![image](https://user-images.githubusercontent.com/39816903/87851753-69448c00-c92e-11ea-94e0-36b45135e533.png)
contextJson:
```
{
"taskInstanceId":2,
"taskName":"SQL-QUERY",
"startTime":"2020-07-18 19:18:58",
"taskType":"SQL",
"host":null,
"executePath":"/tmp/dolphinscheduler/exec/process/2/17/2/2",
"logPath":null,
"taskJson":"{"id":"tasks-5736","name":"SQL-QUERY","desc":null,"type":"SQL","runFlag":"NORMAL","loc":null,"maxRetryTimes":0,"retryInterval":1,"params":{"type":"MYSQL","datasource":3,"sql":"SELECT * FROM person","udfs":"","sqlType":"0","title":"SQL-QUERY","receivers":"zhangboyi_mx@163.com","receiversCc":"","showType":"TABLE","localParams":[],"connParams":"","preStatements":[],"postStatements":[]},"preTasks":[],"extras":null,"depList":[],"dependence":{},"conditionResult":{"successNode":[""],"failedNode":[""]},"taskInstancePriority":"MEDIUM","workerGroup":"default","workerGroupId":null,"timeout":{"strategy":"","interval":null,"enable":false},"conditionsTask":false,"forbidden":false,"taskTimeoutParameter":{"enable":false,"strategy":null,"interval":0}}",
"processId":0,
"appIds":null,
"processInstanceId":2,
"scheduleTime":null,
"globalParams":null,
"executorId":2,
"cmdTypeIfComplement":0,
"tenantCode":"sysadmin",
"queue":"default",
"processDefineId":17,
"projectId":2,
"taskParams":null,
"envFile":null,
"definedParams":null,
"taskAppId":null,
"taskTimeoutStrategy":0,
"taskTimeout":0,
"workerGroup":"default",
"resources":{
},
"sqlTaskExecutionContext":{
"warningGroupId":0,
"connectionParams":"{"type":null,"address":"jdbc:mysql://127.0.0.1:3306","database":"test","jdbcUrl":"jdbc:mysql://127.0.0.1:3306/test","user":"root","password":"IUAjJCVeJipyb290"}",
"udfFuncTenantCodeMap":null
},
"dataxTaskExecutionContext":{
"dataSourceId":0,
"sourcetype":0,
"sourceConnectionParams":null,
"dataTargetId":0,
"targetType":0,
"targetConnectionParams":null
},
"dependenceTaskExecutionContext":null,
"sqoopTaskExecutionContext":{
"dataSourceId":0,
"sourcetype":0,
"sourceConnectionParams":null,
"dataTargetId":0,
"targetType":0,
"targetConnectionParams":null
},
"procedureTaskExecutionContext":{
"connectionParams":null
}
}
```
test unit :
```
public class TaskExecuteProcessorTest {
@Test
public void testJson(){
String contextJson = "{\n" +
" \"taskInstanceId\":2,\n" +
" \"taskName\":\"SQL-QUERY\",\n" +
" \"startTime\":\"2020-07-18 19:18:58\",\n" +
" \"taskType\":\"SQL\",\n" +
" \"host\":null,\n" +
" \"executePath\":\"/tmp/dolphinscheduler/exec/process/2/17/2/2\",\n" +
" \"logPath\":null,\n" +
" \"taskJson\":\"{\"id\":\"tasks-5736\",\"name\":\"SQL-QUERY\",\"desc\":null,\"type\":\"SQL\",\"runFlag\":\"NORMAL\",\"loc\":null,\"maxRetryTimes\":0,\"retryInterval\":1,\"params\":{\"type\":\"MYSQL\",\"datasource\":3,\"sql\":\"SELECT * FROM person\",\"udfs\":\"\",\"sqlType\":\"0\",\"title\":\"SQL-QUERY\",\"receivers\":\"zhangboyi_mx@163.com\",\"receiversCc\":\"\",\"showType\":\"TABLE\",\"localParams\":[],\"connParams\":\"\",\"preStatements\":[],\"postStatements\":[]},\"preTasks\":[],\"extras\":null,\"depList\":[],\"dependence\":{},\"conditionResult\":{\"successNode\":[\"\"],\"failedNode\":[\"\"]},\"taskInstancePriority\":\"MEDIUM\",\"workerGroup\":\"default\",\"workerGroupId\":null,\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"conditionsTask\":false,\"forbidden\":false,\"taskTimeoutParameter\":{\"enable\":false,\"strategy\":null,\"interval\":0}}\",\n" +
" \"processId\":0,\n" +
" \"appIds\":null,\n" +
" \"processInstanceId\":2,\n" +
" \"scheduleTime\":null,\n" +
" \"globalParams\":null,\n" +
" \"executorId\":2,\n" +
" \"cmdTypeIfComplement\":0,\n" +
" \"tenantCode\":\"sysadmin\",\n" +
" \"queue\":\"default\",\n" +
" \"processDefineId\":17,\n" +
" \"projectId\":2,\n" +
" \"taskParams\":null,\n" +
" \"envFile\":null,\n" +
" \"definedParams\":null,\n" +
" \"taskAppId\":null,\n" +
" \"taskTimeoutStrategy\":0,\n" +
" \"taskTimeout\":0,\n" +
" \"workerGroup\":\"default\",\n" +
" \"resources\":{\n" +
"\n" +
" },\n" +
" \"sqlTaskExecutionContext\":{\n" +
" \"warningGroupId\":0,\n" +
" \"connectionParams\":\"{\"type\":null,\"address\":\"jdbc:mysql://127.0.0.1:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://127.0.0.1:3306/test\",\"user\":\"root\",\"password\":\"IUAjJCVeJipyb290\"}\",\n" +
" \"udfFuncTenantCodeMap\":null\n" +
" },\n" +
" \"dataxTaskExecutionContext\":{\n" +
" \"dataSourceId\":0,\n" +
" \"sourcetype\":0,\n" +
" \"sourceConnectionParams\":null,\n" +
" \"dataTargetId\":0,\n" +
" \"targetType\":0,\n" +
" \"targetConnectionParams\":null\n" +
" },\n" +
" \"dependenceTaskExecutionContext\":null,\n" +
" \"sqoopTaskExecutionContext\":{\n" +
" \"dataSourceId\":0,\n" +
" \"sourcetype\":0,\n" +
" \"sourceConnectionParams\":null,\n" +
" \"dataTargetId\":0,\n" +
" \"targetType\":0,\n" +
" \"targetConnectionParams\":null\n" +
" },\n" +
" \"procedureTaskExecutionContext\":{\n" +
" \"connectionParams\":null\n" +
" }\n" +
"}" ;
TaskExecutionContext taskExecutionContext = JSONUtils.parseObject(contextJson, TaskExecutionContext.class);
assertTrue(taskExecutionContext != null);
}
}
```
error info :
```
Connected to the target VM, address: '127.0.0.1:53429', transport: 'socket'
19:43:31.405 [main] ERROR org.apache.dolphinscheduler.common.utils.JSONUtils - parse object exception!
com.fasterxml.jackson.databind.exc.InvalidDefinitionException: Cannot find a (Map) Key deserializer for type [simple type, class org.apache.dolphinscheduler.dao.entity.UdfFunc]
at [Source: (String)"{
"taskInstanceId":2,
"taskName":"SQL-QUERY",
"startTime":"2020-07-18 19:18:58",
"taskType":"SQL",
"host":null,
"executePath":"/tmp/dolphinscheduler/exec/process/2/17/2/2",
"logPath":null,
"taskJson":"{"id":"tasks-5736","name":"SQL-QUERY","desc":null,"type":"SQL","runFlag":"NORMAL","loc":null,"maxRetryTimes":0,"retryInterval":1,"params":{"type":"MYSQL","datasource":3,"sql":"SELECT * FROM person","udfs":"","sqlType":"0","title":"SQL-QUERY","receivers":"zhangboyi_mx"[truncated 1787 chars]; line: 1, column: 1]
at com.fasterxml.jackson.databind.exc.InvalidDefinitionException.from(InvalidDefinitionException.java:67)
at com.fasterxml.jackson.databind.DeserializationContext.reportBadDefinition(DeserializationContext.java:1452)
at com.fasterxml.jackson.databind.deser.DeserializerCache._handleUnknownKeyDeserializer(DeserializerCache.java:599)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findKeyDeserializer(DeserializerCache.java:168)
at com.fasterxml.jackson.databind.DeserializationContext.findKeyDeserializer(DeserializationContext.java:500)
at com.fasterxml.jackson.databind.deser.std.MapDeserializer.createContextual(MapDeserializer.java:248)
at com.fasterxml.jackson.databind.DeserializationContext.handlePrimaryContextualization(DeserializationContext.java:651)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.resolve(BeanDeserializerBase.java:484)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCache2(DeserializerCache.java:293)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCacheValueDeserializer(DeserializerCache.java:244)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findValueDeserializer(DeserializerCache.java:142)
at com.fasterxml.jackson.databind.DeserializationContext.findNonContextualValueDeserializer(DeserializationContext.java:467)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.resolve(BeanDeserializerBase.java:473)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCache2(DeserializerCache.java:293)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCacheValueDeserializer(DeserializerCache.java:244)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findValueDeserializer(DeserializerCache.java:142)
at com.fasterxml.jackson.databind.DeserializationContext.findRootValueDeserializer(DeserializationContext.java:477)
at com.fasterxml.jackson.databind.ObjectMapper._findRootDeserializer(ObjectMapper.java:4190)
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4009)
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3004)
at org.apache.dolphinscheduler.common.utils.JSONUtils.parseObject(JSONUtils.java:108)
at org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessorTest.testJson(TaskExecuteProcessorTest.java:71)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33)
at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)
at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)
Disconnected from the target VM, address: '127.0.0.1:53429', transport: 'socket'
java.lang.AssertionError
at org.junit.Assert.fail(Assert.java:86)
at org.junit.Assert.assertTrue(Assert.java:41)
at org.junit.Assert.assertTrue(Assert.java:52)
at org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessorTest.testJson(TaskExecuteProcessorTest.java:73)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33)
at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)
at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)
```
| https://github.com/apache/dolphinscheduler/issues/3240 | https://github.com/apache/dolphinscheduler/pull/3245 | 252abbdaed4a120b0bc5c23da3e956e3d429a7a1 | 6f2667bf1aab2715cfd734a5a694d2b2fd0adcc9 | "2020-07-18T11:39:43Z" | java | "2020-07-21T10:54:07Z" | dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/UdfFuncTest.java | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,240 | [Bug][worker] Cannot find a (Map) Key deserializer for type | When I execute SQL query task, JSON parsing error
error info :
````
com.fasterxml.jackson.databind.exc.InvalidDefinitionException:
Cannot find a (Map) Key deserializer for type [simple type, class org.apache.dolphinscheduler.dao.entity.UdfFunc]
`````
![image](https://user-images.githubusercontent.com/39816903/87851699-ecb1ad80-c92d-11ea-8c3d-820f38f5d68d.png)
show code :
TaskExecutionContext taskExecutionContext = JSONUtils.parseObject(contextJson, TaskExecutionContext.class);
![image](https://user-images.githubusercontent.com/39816903/87851753-69448c00-c92e-11ea-94e0-36b45135e533.png)
contextJson:
```
{
"taskInstanceId":2,
"taskName":"SQL-QUERY",
"startTime":"2020-07-18 19:18:58",
"taskType":"SQL",
"host":null,
"executePath":"/tmp/dolphinscheduler/exec/process/2/17/2/2",
"logPath":null,
"taskJson":"{"id":"tasks-5736","name":"SQL-QUERY","desc":null,"type":"SQL","runFlag":"NORMAL","loc":null,"maxRetryTimes":0,"retryInterval":1,"params":{"type":"MYSQL","datasource":3,"sql":"SELECT * FROM person","udfs":"","sqlType":"0","title":"SQL-QUERY","receivers":"zhangboyi_mx@163.com","receiversCc":"","showType":"TABLE","localParams":[],"connParams":"","preStatements":[],"postStatements":[]},"preTasks":[],"extras":null,"depList":[],"dependence":{},"conditionResult":{"successNode":[""],"failedNode":[""]},"taskInstancePriority":"MEDIUM","workerGroup":"default","workerGroupId":null,"timeout":{"strategy":"","interval":null,"enable":false},"conditionsTask":false,"forbidden":false,"taskTimeoutParameter":{"enable":false,"strategy":null,"interval":0}}",
"processId":0,
"appIds":null,
"processInstanceId":2,
"scheduleTime":null,
"globalParams":null,
"executorId":2,
"cmdTypeIfComplement":0,
"tenantCode":"sysadmin",
"queue":"default",
"processDefineId":17,
"projectId":2,
"taskParams":null,
"envFile":null,
"definedParams":null,
"taskAppId":null,
"taskTimeoutStrategy":0,
"taskTimeout":0,
"workerGroup":"default",
"resources":{
},
"sqlTaskExecutionContext":{
"warningGroupId":0,
"connectionParams":"{"type":null,"address":"jdbc:mysql://127.0.0.1:3306","database":"test","jdbcUrl":"jdbc:mysql://127.0.0.1:3306/test","user":"root","password":"IUAjJCVeJipyb290"}",
"udfFuncTenantCodeMap":null
},
"dataxTaskExecutionContext":{
"dataSourceId":0,
"sourcetype":0,
"sourceConnectionParams":null,
"dataTargetId":0,
"targetType":0,
"targetConnectionParams":null
},
"dependenceTaskExecutionContext":null,
"sqoopTaskExecutionContext":{
"dataSourceId":0,
"sourcetype":0,
"sourceConnectionParams":null,
"dataTargetId":0,
"targetType":0,
"targetConnectionParams":null
},
"procedureTaskExecutionContext":{
"connectionParams":null
}
}
```
test unit :
```
public class TaskExecuteProcessorTest {
@Test
public void testJson(){
String contextJson = "{\n" +
" \"taskInstanceId\":2,\n" +
" \"taskName\":\"SQL-QUERY\",\n" +
" \"startTime\":\"2020-07-18 19:18:58\",\n" +
" \"taskType\":\"SQL\",\n" +
" \"host\":null,\n" +
" \"executePath\":\"/tmp/dolphinscheduler/exec/process/2/17/2/2\",\n" +
" \"logPath\":null,\n" +
" \"taskJson\":\"{\"id\":\"tasks-5736\",\"name\":\"SQL-QUERY\",\"desc\":null,\"type\":\"SQL\",\"runFlag\":\"NORMAL\",\"loc\":null,\"maxRetryTimes\":0,\"retryInterval\":1,\"params\":{\"type\":\"MYSQL\",\"datasource\":3,\"sql\":\"SELECT * FROM person\",\"udfs\":\"\",\"sqlType\":\"0\",\"title\":\"SQL-QUERY\",\"receivers\":\"zhangboyi_mx@163.com\",\"receiversCc\":\"\",\"showType\":\"TABLE\",\"localParams\":[],\"connParams\":\"\",\"preStatements\":[],\"postStatements\":[]},\"preTasks\":[],\"extras\":null,\"depList\":[],\"dependence\":{},\"conditionResult\":{\"successNode\":[\"\"],\"failedNode\":[\"\"]},\"taskInstancePriority\":\"MEDIUM\",\"workerGroup\":\"default\",\"workerGroupId\":null,\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"conditionsTask\":false,\"forbidden\":false,\"taskTimeoutParameter\":{\"enable\":false,\"strategy\":null,\"interval\":0}}\",\n" +
" \"processId\":0,\n" +
" \"appIds\":null,\n" +
" \"processInstanceId\":2,\n" +
" \"scheduleTime\":null,\n" +
" \"globalParams\":null,\n" +
" \"executorId\":2,\n" +
" \"cmdTypeIfComplement\":0,\n" +
" \"tenantCode\":\"sysadmin\",\n" +
" \"queue\":\"default\",\n" +
" \"processDefineId\":17,\n" +
" \"projectId\":2,\n" +
" \"taskParams\":null,\n" +
" \"envFile\":null,\n" +
" \"definedParams\":null,\n" +
" \"taskAppId\":null,\n" +
" \"taskTimeoutStrategy\":0,\n" +
" \"taskTimeout\":0,\n" +
" \"workerGroup\":\"default\",\n" +
" \"resources\":{\n" +
"\n" +
" },\n" +
" \"sqlTaskExecutionContext\":{\n" +
" \"warningGroupId\":0,\n" +
" \"connectionParams\":\"{\"type\":null,\"address\":\"jdbc:mysql://127.0.0.1:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://127.0.0.1:3306/test\",\"user\":\"root\",\"password\":\"IUAjJCVeJipyb290\"}\",\n" +
" \"udfFuncTenantCodeMap\":null\n" +
" },\n" +
" \"dataxTaskExecutionContext\":{\n" +
" \"dataSourceId\":0,\n" +
" \"sourcetype\":0,\n" +
" \"sourceConnectionParams\":null,\n" +
" \"dataTargetId\":0,\n" +
" \"targetType\":0,\n" +
" \"targetConnectionParams\":null\n" +
" },\n" +
" \"dependenceTaskExecutionContext\":null,\n" +
" \"sqoopTaskExecutionContext\":{\n" +
" \"dataSourceId\":0,\n" +
" \"sourcetype\":0,\n" +
" \"sourceConnectionParams\":null,\n" +
" \"dataTargetId\":0,\n" +
" \"targetType\":0,\n" +
" \"targetConnectionParams\":null\n" +
" },\n" +
" \"procedureTaskExecutionContext\":{\n" +
" \"connectionParams\":null\n" +
" }\n" +
"}" ;
TaskExecutionContext taskExecutionContext = JSONUtils.parseObject(contextJson, TaskExecutionContext.class);
assertTrue(taskExecutionContext != null);
}
}
```
error info :
```
Connected to the target VM, address: '127.0.0.1:53429', transport: 'socket'
19:43:31.405 [main] ERROR org.apache.dolphinscheduler.common.utils.JSONUtils - parse object exception!
com.fasterxml.jackson.databind.exc.InvalidDefinitionException: Cannot find a (Map) Key deserializer for type [simple type, class org.apache.dolphinscheduler.dao.entity.UdfFunc]
at [Source: (String)"{
"taskInstanceId":2,
"taskName":"SQL-QUERY",
"startTime":"2020-07-18 19:18:58",
"taskType":"SQL",
"host":null,
"executePath":"/tmp/dolphinscheduler/exec/process/2/17/2/2",
"logPath":null,
"taskJson":"{"id":"tasks-5736","name":"SQL-QUERY","desc":null,"type":"SQL","runFlag":"NORMAL","loc":null,"maxRetryTimes":0,"retryInterval":1,"params":{"type":"MYSQL","datasource":3,"sql":"SELECT * FROM person","udfs":"","sqlType":"0","title":"SQL-QUERY","receivers":"zhangboyi_mx"[truncated 1787 chars]; line: 1, column: 1]
at com.fasterxml.jackson.databind.exc.InvalidDefinitionException.from(InvalidDefinitionException.java:67)
at com.fasterxml.jackson.databind.DeserializationContext.reportBadDefinition(DeserializationContext.java:1452)
at com.fasterxml.jackson.databind.deser.DeserializerCache._handleUnknownKeyDeserializer(DeserializerCache.java:599)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findKeyDeserializer(DeserializerCache.java:168)
at com.fasterxml.jackson.databind.DeserializationContext.findKeyDeserializer(DeserializationContext.java:500)
at com.fasterxml.jackson.databind.deser.std.MapDeserializer.createContextual(MapDeserializer.java:248)
at com.fasterxml.jackson.databind.DeserializationContext.handlePrimaryContextualization(DeserializationContext.java:651)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.resolve(BeanDeserializerBase.java:484)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCache2(DeserializerCache.java:293)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCacheValueDeserializer(DeserializerCache.java:244)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findValueDeserializer(DeserializerCache.java:142)
at com.fasterxml.jackson.databind.DeserializationContext.findNonContextualValueDeserializer(DeserializationContext.java:467)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.resolve(BeanDeserializerBase.java:473)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCache2(DeserializerCache.java:293)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCacheValueDeserializer(DeserializerCache.java:244)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findValueDeserializer(DeserializerCache.java:142)
at com.fasterxml.jackson.databind.DeserializationContext.findRootValueDeserializer(DeserializationContext.java:477)
at com.fasterxml.jackson.databind.ObjectMapper._findRootDeserializer(ObjectMapper.java:4190)
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4009)
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3004)
at org.apache.dolphinscheduler.common.utils.JSONUtils.parseObject(JSONUtils.java:108)
at org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessorTest.testJson(TaskExecuteProcessorTest.java:71)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33)
at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)
at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)
Disconnected from the target VM, address: '127.0.0.1:53429', transport: 'socket'
java.lang.AssertionError
at org.junit.Assert.fail(Assert.java:86)
at org.junit.Assert.assertTrue(Assert.java:41)
at org.junit.Assert.assertTrue(Assert.java:52)
at org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessorTest.testJson(TaskExecuteProcessorTest.java:73)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33)
at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)
at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)
```
| https://github.com/apache/dolphinscheduler/issues/3240 | https://github.com/apache/dolphinscheduler/pull/3245 | 252abbdaed4a120b0bc5c23da3e956e3d429a7a1 | 6f2667bf1aab2715cfd734a5a694d2b2fd0adcc9 | "2020-07-18T11:39:43Z" | java | "2020-07-21T10:54:07Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/SQLTaskExecutionContext.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.entity;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import java.io.Serializable;
import java.util.Map;
/**
* SQL Task ExecutionContext
*/
public class SQLTaskExecutionContext implements Serializable {
/**
* warningGroupId
*/
private int warningGroupId;
/**
* connectionParams
*/
private String connectionParams;
/**
* udf function tenant code map
*/
private Map<UdfFunc,String> udfFuncTenantCodeMap;
public int getWarningGroupId() {
return warningGroupId;
}
public void setWarningGroupId(int warningGroupId) {
this.warningGroupId = warningGroupId;
}
public Map<UdfFunc, String> getUdfFuncTenantCodeMap() {
return udfFuncTenantCodeMap;
}
public void setUdfFuncTenantCodeMap(Map<UdfFunc, String> udfFuncTenantCodeMap) {
this.udfFuncTenantCodeMap = udfFuncTenantCodeMap;
}
public String getConnectionParams() {
return connectionParams;
}
public void setConnectionParams(String connectionParams) {
this.connectionParams = connectionParams;
}
@Override
public String toString() {
return "SQLTaskExecutionContext{" +
"warningGroupId=" + warningGroupId +
", connectionParams='" + connectionParams + '\'' +
", udfFuncTenantCodeMap=" + udfFuncTenantCodeMap +
'}';
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,240 | [Bug][worker] Cannot find a (Map) Key deserializer for type | When I execute SQL query task, JSON parsing error
error info :
````
com.fasterxml.jackson.databind.exc.InvalidDefinitionException:
Cannot find a (Map) Key deserializer for type [simple type, class org.apache.dolphinscheduler.dao.entity.UdfFunc]
`````
![image](https://user-images.githubusercontent.com/39816903/87851699-ecb1ad80-c92d-11ea-8c3d-820f38f5d68d.png)
show code :
TaskExecutionContext taskExecutionContext = JSONUtils.parseObject(contextJson, TaskExecutionContext.class);
![image](https://user-images.githubusercontent.com/39816903/87851753-69448c00-c92e-11ea-94e0-36b45135e533.png)
contextJson:
```
{
"taskInstanceId":2,
"taskName":"SQL-QUERY",
"startTime":"2020-07-18 19:18:58",
"taskType":"SQL",
"host":null,
"executePath":"/tmp/dolphinscheduler/exec/process/2/17/2/2",
"logPath":null,
"taskJson":"{"id":"tasks-5736","name":"SQL-QUERY","desc":null,"type":"SQL","runFlag":"NORMAL","loc":null,"maxRetryTimes":0,"retryInterval":1,"params":{"type":"MYSQL","datasource":3,"sql":"SELECT * FROM person","udfs":"","sqlType":"0","title":"SQL-QUERY","receivers":"zhangboyi_mx@163.com","receiversCc":"","showType":"TABLE","localParams":[],"connParams":"","preStatements":[],"postStatements":[]},"preTasks":[],"extras":null,"depList":[],"dependence":{},"conditionResult":{"successNode":[""],"failedNode":[""]},"taskInstancePriority":"MEDIUM","workerGroup":"default","workerGroupId":null,"timeout":{"strategy":"","interval":null,"enable":false},"conditionsTask":false,"forbidden":false,"taskTimeoutParameter":{"enable":false,"strategy":null,"interval":0}}",
"processId":0,
"appIds":null,
"processInstanceId":2,
"scheduleTime":null,
"globalParams":null,
"executorId":2,
"cmdTypeIfComplement":0,
"tenantCode":"sysadmin",
"queue":"default",
"processDefineId":17,
"projectId":2,
"taskParams":null,
"envFile":null,
"definedParams":null,
"taskAppId":null,
"taskTimeoutStrategy":0,
"taskTimeout":0,
"workerGroup":"default",
"resources":{
},
"sqlTaskExecutionContext":{
"warningGroupId":0,
"connectionParams":"{"type":null,"address":"jdbc:mysql://127.0.0.1:3306","database":"test","jdbcUrl":"jdbc:mysql://127.0.0.1:3306/test","user":"root","password":"IUAjJCVeJipyb290"}",
"udfFuncTenantCodeMap":null
},
"dataxTaskExecutionContext":{
"dataSourceId":0,
"sourcetype":0,
"sourceConnectionParams":null,
"dataTargetId":0,
"targetType":0,
"targetConnectionParams":null
},
"dependenceTaskExecutionContext":null,
"sqoopTaskExecutionContext":{
"dataSourceId":0,
"sourcetype":0,
"sourceConnectionParams":null,
"dataTargetId":0,
"targetType":0,
"targetConnectionParams":null
},
"procedureTaskExecutionContext":{
"connectionParams":null
}
}
```
test unit :
```
public class TaskExecuteProcessorTest {
@Test
public void testJson(){
String contextJson = "{\n" +
" \"taskInstanceId\":2,\n" +
" \"taskName\":\"SQL-QUERY\",\n" +
" \"startTime\":\"2020-07-18 19:18:58\",\n" +
" \"taskType\":\"SQL\",\n" +
" \"host\":null,\n" +
" \"executePath\":\"/tmp/dolphinscheduler/exec/process/2/17/2/2\",\n" +
" \"logPath\":null,\n" +
" \"taskJson\":\"{\"id\":\"tasks-5736\",\"name\":\"SQL-QUERY\",\"desc\":null,\"type\":\"SQL\",\"runFlag\":\"NORMAL\",\"loc\":null,\"maxRetryTimes\":0,\"retryInterval\":1,\"params\":{\"type\":\"MYSQL\",\"datasource\":3,\"sql\":\"SELECT * FROM person\",\"udfs\":\"\",\"sqlType\":\"0\",\"title\":\"SQL-QUERY\",\"receivers\":\"zhangboyi_mx@163.com\",\"receiversCc\":\"\",\"showType\":\"TABLE\",\"localParams\":[],\"connParams\":\"\",\"preStatements\":[],\"postStatements\":[]},\"preTasks\":[],\"extras\":null,\"depList\":[],\"dependence\":{},\"conditionResult\":{\"successNode\":[\"\"],\"failedNode\":[\"\"]},\"taskInstancePriority\":\"MEDIUM\",\"workerGroup\":\"default\",\"workerGroupId\":null,\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"conditionsTask\":false,\"forbidden\":false,\"taskTimeoutParameter\":{\"enable\":false,\"strategy\":null,\"interval\":0}}\",\n" +
" \"processId\":0,\n" +
" \"appIds\":null,\n" +
" \"processInstanceId\":2,\n" +
" \"scheduleTime\":null,\n" +
" \"globalParams\":null,\n" +
" \"executorId\":2,\n" +
" \"cmdTypeIfComplement\":0,\n" +
" \"tenantCode\":\"sysadmin\",\n" +
" \"queue\":\"default\",\n" +
" \"processDefineId\":17,\n" +
" \"projectId\":2,\n" +
" \"taskParams\":null,\n" +
" \"envFile\":null,\n" +
" \"definedParams\":null,\n" +
" \"taskAppId\":null,\n" +
" \"taskTimeoutStrategy\":0,\n" +
" \"taskTimeout\":0,\n" +
" \"workerGroup\":\"default\",\n" +
" \"resources\":{\n" +
"\n" +
" },\n" +
" \"sqlTaskExecutionContext\":{\n" +
" \"warningGroupId\":0,\n" +
" \"connectionParams\":\"{\"type\":null,\"address\":\"jdbc:mysql://127.0.0.1:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://127.0.0.1:3306/test\",\"user\":\"root\",\"password\":\"IUAjJCVeJipyb290\"}\",\n" +
" \"udfFuncTenantCodeMap\":null\n" +
" },\n" +
" \"dataxTaskExecutionContext\":{\n" +
" \"dataSourceId\":0,\n" +
" \"sourcetype\":0,\n" +
" \"sourceConnectionParams\":null,\n" +
" \"dataTargetId\":0,\n" +
" \"targetType\":0,\n" +
" \"targetConnectionParams\":null\n" +
" },\n" +
" \"dependenceTaskExecutionContext\":null,\n" +
" \"sqoopTaskExecutionContext\":{\n" +
" \"dataSourceId\":0,\n" +
" \"sourcetype\":0,\n" +
" \"sourceConnectionParams\":null,\n" +
" \"dataTargetId\":0,\n" +
" \"targetType\":0,\n" +
" \"targetConnectionParams\":null\n" +
" },\n" +
" \"procedureTaskExecutionContext\":{\n" +
" \"connectionParams\":null\n" +
" }\n" +
"}" ;
TaskExecutionContext taskExecutionContext = JSONUtils.parseObject(contextJson, TaskExecutionContext.class);
assertTrue(taskExecutionContext != null);
}
}
```
error info :
```
Connected to the target VM, address: '127.0.0.1:53429', transport: 'socket'
19:43:31.405 [main] ERROR org.apache.dolphinscheduler.common.utils.JSONUtils - parse object exception!
com.fasterxml.jackson.databind.exc.InvalidDefinitionException: Cannot find a (Map) Key deserializer for type [simple type, class org.apache.dolphinscheduler.dao.entity.UdfFunc]
at [Source: (String)"{
"taskInstanceId":2,
"taskName":"SQL-QUERY",
"startTime":"2020-07-18 19:18:58",
"taskType":"SQL",
"host":null,
"executePath":"/tmp/dolphinscheduler/exec/process/2/17/2/2",
"logPath":null,
"taskJson":"{"id":"tasks-5736","name":"SQL-QUERY","desc":null,"type":"SQL","runFlag":"NORMAL","loc":null,"maxRetryTimes":0,"retryInterval":1,"params":{"type":"MYSQL","datasource":3,"sql":"SELECT * FROM person","udfs":"","sqlType":"0","title":"SQL-QUERY","receivers":"zhangboyi_mx"[truncated 1787 chars]; line: 1, column: 1]
at com.fasterxml.jackson.databind.exc.InvalidDefinitionException.from(InvalidDefinitionException.java:67)
at com.fasterxml.jackson.databind.DeserializationContext.reportBadDefinition(DeserializationContext.java:1452)
at com.fasterxml.jackson.databind.deser.DeserializerCache._handleUnknownKeyDeserializer(DeserializerCache.java:599)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findKeyDeserializer(DeserializerCache.java:168)
at com.fasterxml.jackson.databind.DeserializationContext.findKeyDeserializer(DeserializationContext.java:500)
at com.fasterxml.jackson.databind.deser.std.MapDeserializer.createContextual(MapDeserializer.java:248)
at com.fasterxml.jackson.databind.DeserializationContext.handlePrimaryContextualization(DeserializationContext.java:651)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.resolve(BeanDeserializerBase.java:484)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCache2(DeserializerCache.java:293)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCacheValueDeserializer(DeserializerCache.java:244)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findValueDeserializer(DeserializerCache.java:142)
at com.fasterxml.jackson.databind.DeserializationContext.findNonContextualValueDeserializer(DeserializationContext.java:467)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.resolve(BeanDeserializerBase.java:473)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCache2(DeserializerCache.java:293)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCacheValueDeserializer(DeserializerCache.java:244)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findValueDeserializer(DeserializerCache.java:142)
at com.fasterxml.jackson.databind.DeserializationContext.findRootValueDeserializer(DeserializationContext.java:477)
at com.fasterxml.jackson.databind.ObjectMapper._findRootDeserializer(ObjectMapper.java:4190)
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4009)
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3004)
at org.apache.dolphinscheduler.common.utils.JSONUtils.parseObject(JSONUtils.java:108)
at org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessorTest.testJson(TaskExecuteProcessorTest.java:71)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33)
at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)
at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)
Disconnected from the target VM, address: '127.0.0.1:53429', transport: 'socket'
java.lang.AssertionError
at org.junit.Assert.fail(Assert.java:86)
at org.junit.Assert.assertTrue(Assert.java:41)
at org.junit.Assert.assertTrue(Assert.java:52)
at org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessorTest.testJson(TaskExecuteProcessorTest.java:73)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33)
at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)
at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)
```
| https://github.com/apache/dolphinscheduler/issues/3240 | https://github.com/apache/dolphinscheduler/pull/3245 | 252abbdaed4a120b0bc5c23da3e956e3d429a7a1 | 6f2667bf1aab2715cfd734a5a694d2b2fd0adcc9 | "2020-07-18T11:39:43Z" | java | "2020-07-21T10:54:07Z" | dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/entity/SQLTaskExecutionContextTest.java | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,240 | [Bug][worker] Cannot find a (Map) Key deserializer for type | When I execute SQL query task, JSON parsing error
error info :
````
com.fasterxml.jackson.databind.exc.InvalidDefinitionException:
Cannot find a (Map) Key deserializer for type [simple type, class org.apache.dolphinscheduler.dao.entity.UdfFunc]
`````
![image](https://user-images.githubusercontent.com/39816903/87851699-ecb1ad80-c92d-11ea-8c3d-820f38f5d68d.png)
show code :
TaskExecutionContext taskExecutionContext = JSONUtils.parseObject(contextJson, TaskExecutionContext.class);
![image](https://user-images.githubusercontent.com/39816903/87851753-69448c00-c92e-11ea-94e0-36b45135e533.png)
contextJson:
```
{
"taskInstanceId":2,
"taskName":"SQL-QUERY",
"startTime":"2020-07-18 19:18:58",
"taskType":"SQL",
"host":null,
"executePath":"/tmp/dolphinscheduler/exec/process/2/17/2/2",
"logPath":null,
"taskJson":"{"id":"tasks-5736","name":"SQL-QUERY","desc":null,"type":"SQL","runFlag":"NORMAL","loc":null,"maxRetryTimes":0,"retryInterval":1,"params":{"type":"MYSQL","datasource":3,"sql":"SELECT * FROM person","udfs":"","sqlType":"0","title":"SQL-QUERY","receivers":"zhangboyi_mx@163.com","receiversCc":"","showType":"TABLE","localParams":[],"connParams":"","preStatements":[],"postStatements":[]},"preTasks":[],"extras":null,"depList":[],"dependence":{},"conditionResult":{"successNode":[""],"failedNode":[""]},"taskInstancePriority":"MEDIUM","workerGroup":"default","workerGroupId":null,"timeout":{"strategy":"","interval":null,"enable":false},"conditionsTask":false,"forbidden":false,"taskTimeoutParameter":{"enable":false,"strategy":null,"interval":0}}",
"processId":0,
"appIds":null,
"processInstanceId":2,
"scheduleTime":null,
"globalParams":null,
"executorId":2,
"cmdTypeIfComplement":0,
"tenantCode":"sysadmin",
"queue":"default",
"processDefineId":17,
"projectId":2,
"taskParams":null,
"envFile":null,
"definedParams":null,
"taskAppId":null,
"taskTimeoutStrategy":0,
"taskTimeout":0,
"workerGroup":"default",
"resources":{
},
"sqlTaskExecutionContext":{
"warningGroupId":0,
"connectionParams":"{"type":null,"address":"jdbc:mysql://127.0.0.1:3306","database":"test","jdbcUrl":"jdbc:mysql://127.0.0.1:3306/test","user":"root","password":"IUAjJCVeJipyb290"}",
"udfFuncTenantCodeMap":null
},
"dataxTaskExecutionContext":{
"dataSourceId":0,
"sourcetype":0,
"sourceConnectionParams":null,
"dataTargetId":0,
"targetType":0,
"targetConnectionParams":null
},
"dependenceTaskExecutionContext":null,
"sqoopTaskExecutionContext":{
"dataSourceId":0,
"sourcetype":0,
"sourceConnectionParams":null,
"dataTargetId":0,
"targetType":0,
"targetConnectionParams":null
},
"procedureTaskExecutionContext":{
"connectionParams":null
}
}
```
test unit :
```
public class TaskExecuteProcessorTest {
@Test
public void testJson(){
String contextJson = "{\n" +
" \"taskInstanceId\":2,\n" +
" \"taskName\":\"SQL-QUERY\",\n" +
" \"startTime\":\"2020-07-18 19:18:58\",\n" +
" \"taskType\":\"SQL\",\n" +
" \"host\":null,\n" +
" \"executePath\":\"/tmp/dolphinscheduler/exec/process/2/17/2/2\",\n" +
" \"logPath\":null,\n" +
" \"taskJson\":\"{\"id\":\"tasks-5736\",\"name\":\"SQL-QUERY\",\"desc\":null,\"type\":\"SQL\",\"runFlag\":\"NORMAL\",\"loc\":null,\"maxRetryTimes\":0,\"retryInterval\":1,\"params\":{\"type\":\"MYSQL\",\"datasource\":3,\"sql\":\"SELECT * FROM person\",\"udfs\":\"\",\"sqlType\":\"0\",\"title\":\"SQL-QUERY\",\"receivers\":\"zhangboyi_mx@163.com\",\"receiversCc\":\"\",\"showType\":\"TABLE\",\"localParams\":[],\"connParams\":\"\",\"preStatements\":[],\"postStatements\":[]},\"preTasks\":[],\"extras\":null,\"depList\":[],\"dependence\":{},\"conditionResult\":{\"successNode\":[\"\"],\"failedNode\":[\"\"]},\"taskInstancePriority\":\"MEDIUM\",\"workerGroup\":\"default\",\"workerGroupId\":null,\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"conditionsTask\":false,\"forbidden\":false,\"taskTimeoutParameter\":{\"enable\":false,\"strategy\":null,\"interval\":0}}\",\n" +
" \"processId\":0,\n" +
" \"appIds\":null,\n" +
" \"processInstanceId\":2,\n" +
" \"scheduleTime\":null,\n" +
" \"globalParams\":null,\n" +
" \"executorId\":2,\n" +
" \"cmdTypeIfComplement\":0,\n" +
" \"tenantCode\":\"sysadmin\",\n" +
" \"queue\":\"default\",\n" +
" \"processDefineId\":17,\n" +
" \"projectId\":2,\n" +
" \"taskParams\":null,\n" +
" \"envFile\":null,\n" +
" \"definedParams\":null,\n" +
" \"taskAppId\":null,\n" +
" \"taskTimeoutStrategy\":0,\n" +
" \"taskTimeout\":0,\n" +
" \"workerGroup\":\"default\",\n" +
" \"resources\":{\n" +
"\n" +
" },\n" +
" \"sqlTaskExecutionContext\":{\n" +
" \"warningGroupId\":0,\n" +
" \"connectionParams\":\"{\"type\":null,\"address\":\"jdbc:mysql://127.0.0.1:3306\",\"database\":\"test\",\"jdbcUrl\":\"jdbc:mysql://127.0.0.1:3306/test\",\"user\":\"root\",\"password\":\"IUAjJCVeJipyb290\"}\",\n" +
" \"udfFuncTenantCodeMap\":null\n" +
" },\n" +
" \"dataxTaskExecutionContext\":{\n" +
" \"dataSourceId\":0,\n" +
" \"sourcetype\":0,\n" +
" \"sourceConnectionParams\":null,\n" +
" \"dataTargetId\":0,\n" +
" \"targetType\":0,\n" +
" \"targetConnectionParams\":null\n" +
" },\n" +
" \"dependenceTaskExecutionContext\":null,\n" +
" \"sqoopTaskExecutionContext\":{\n" +
" \"dataSourceId\":0,\n" +
" \"sourcetype\":0,\n" +
" \"sourceConnectionParams\":null,\n" +
" \"dataTargetId\":0,\n" +
" \"targetType\":0,\n" +
" \"targetConnectionParams\":null\n" +
" },\n" +
" \"procedureTaskExecutionContext\":{\n" +
" \"connectionParams\":null\n" +
" }\n" +
"}" ;
TaskExecutionContext taskExecutionContext = JSONUtils.parseObject(contextJson, TaskExecutionContext.class);
assertTrue(taskExecutionContext != null);
}
}
```
error info :
```
Connected to the target VM, address: '127.0.0.1:53429', transport: 'socket'
19:43:31.405 [main] ERROR org.apache.dolphinscheduler.common.utils.JSONUtils - parse object exception!
com.fasterxml.jackson.databind.exc.InvalidDefinitionException: Cannot find a (Map) Key deserializer for type [simple type, class org.apache.dolphinscheduler.dao.entity.UdfFunc]
at [Source: (String)"{
"taskInstanceId":2,
"taskName":"SQL-QUERY",
"startTime":"2020-07-18 19:18:58",
"taskType":"SQL",
"host":null,
"executePath":"/tmp/dolphinscheduler/exec/process/2/17/2/2",
"logPath":null,
"taskJson":"{"id":"tasks-5736","name":"SQL-QUERY","desc":null,"type":"SQL","runFlag":"NORMAL","loc":null,"maxRetryTimes":0,"retryInterval":1,"params":{"type":"MYSQL","datasource":3,"sql":"SELECT * FROM person","udfs":"","sqlType":"0","title":"SQL-QUERY","receivers":"zhangboyi_mx"[truncated 1787 chars]; line: 1, column: 1]
at com.fasterxml.jackson.databind.exc.InvalidDefinitionException.from(InvalidDefinitionException.java:67)
at com.fasterxml.jackson.databind.DeserializationContext.reportBadDefinition(DeserializationContext.java:1452)
at com.fasterxml.jackson.databind.deser.DeserializerCache._handleUnknownKeyDeserializer(DeserializerCache.java:599)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findKeyDeserializer(DeserializerCache.java:168)
at com.fasterxml.jackson.databind.DeserializationContext.findKeyDeserializer(DeserializationContext.java:500)
at com.fasterxml.jackson.databind.deser.std.MapDeserializer.createContextual(MapDeserializer.java:248)
at com.fasterxml.jackson.databind.DeserializationContext.handlePrimaryContextualization(DeserializationContext.java:651)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.resolve(BeanDeserializerBase.java:484)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCache2(DeserializerCache.java:293)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCacheValueDeserializer(DeserializerCache.java:244)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findValueDeserializer(DeserializerCache.java:142)
at com.fasterxml.jackson.databind.DeserializationContext.findNonContextualValueDeserializer(DeserializationContext.java:467)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.resolve(BeanDeserializerBase.java:473)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCache2(DeserializerCache.java:293)
at com.fasterxml.jackson.databind.deser.DeserializerCache._createAndCacheValueDeserializer(DeserializerCache.java:244)
at com.fasterxml.jackson.databind.deser.DeserializerCache.findValueDeserializer(DeserializerCache.java:142)
at com.fasterxml.jackson.databind.DeserializationContext.findRootValueDeserializer(DeserializationContext.java:477)
at com.fasterxml.jackson.databind.ObjectMapper._findRootDeserializer(ObjectMapper.java:4190)
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4009)
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3004)
at org.apache.dolphinscheduler.common.utils.JSONUtils.parseObject(JSONUtils.java:108)
at org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessorTest.testJson(TaskExecuteProcessorTest.java:71)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33)
at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)
at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)
Disconnected from the target VM, address: '127.0.0.1:53429', transport: 'socket'
java.lang.AssertionError
at org.junit.Assert.fail(Assert.java:86)
at org.junit.Assert.assertTrue(Assert.java:41)
at org.junit.Assert.assertTrue(Assert.java:52)
at org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessorTest.testJson(TaskExecuteProcessorTest.java:73)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33)
at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)
at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)
```
| https://github.com/apache/dolphinscheduler/issues/3240 | https://github.com/apache/dolphinscheduler/pull/3245 | 252abbdaed4a120b0bc5c23da3e956e3d429a7a1 | 6f2667bf1aab2715cfd734a5a694d2b2fd0adcc9 | "2020-07-18T11:39:43Z" | java | "2020-07-21T10:54:07Z" | pom.xml | <?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId>
<version>1.3.2-SNAPSHOT</version>
<packaging>pom</packaging>
<name>${project.artifactId}</name>
<url>http://dolphinscheduler.apache.org</url>
<description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated
to solving the complex dependencies in data processing, making the scheduling system out of the box for data
processing.
</description>
<licenses>
<license>
<name>Apache License 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<connection>scm:git:https://github.com/apache/incubator-dolphinscheduler.git</connection>
<developerConnection>scm:git:https://github.com/apache/incubator-dolphinscheduler.git</developerConnection>
<url>https://github.com/apache/incubator-dolphinscheduler</url>
<tag>HEAD</tag>
</scm>
<mailingLists>
<mailingList>
<name>DolphinScheduler Developer List</name>
<post>dev@dolphinscheduler.incubator.apache.org</post>
<subscribe>dev-subscribe@dolphinscheduler.incubator.apache.org</subscribe>
<unsubscribe>dev-unsubscribe@dolphinscheduler.incubator.apache.org</unsubscribe>
</mailingList>
</mailingLists>
<parent>
<groupId>org.apache</groupId>
<artifactId>apache</artifactId>
<version>21</version>
</parent>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<curator.version>4.3.0</curator.version>
<spring.version>5.1.5.RELEASE</spring.version>
<spring.boot.version>2.1.3.RELEASE</spring.boot.version>
<java.version>1.8</java.version>
<logback.version>1.2.3</logback.version>
<hadoop.version>2.7.3</hadoop.version>
<quartz.version>2.2.3</quartz.version>
<jackson.version>2.9.8</jackson.version>
<mybatis-plus.version>3.2.0</mybatis-plus.version>
<mybatis.spring.version>2.0.1</mybatis.spring.version>
<cron.utils.version>5.0.5</cron.utils.version>
<druid.version>1.1.14</druid.version>
<h2.version>1.4.200</h2.version>
<commons.codec.version>1.6</commons.codec.version>
<commons.logging.version>1.1.1</commons.logging.version>
<httpclient.version>4.4.1</httpclient.version>
<httpcore.version>4.4.1</httpcore.version>
<junit.version>4.12</junit.version>
<mysql.connector.version>5.1.34</mysql.connector.version>
<slf4j.api.version>1.7.5</slf4j.api.version>
<slf4j.log4j12.version>1.7.5</slf4j.log4j12.version>
<commons.collections.version>3.2.2</commons.collections.version>
<commons.httpclient>3.0.1</commons.httpclient>
<commons.beanutils.version>1.7.0</commons.beanutils.version>
<commons.configuration.version>1.10</commons.configuration.version>
<commons.email.version>1.5</commons.email.version>
<poi.version>3.17</poi.version>
<javax.servlet.api.version>3.1.0</javax.servlet.api.version>
<commons.collections4.version>4.1</commons.collections4.version>
<guava.version>20.0</guava.version>
<postgresql.version>42.1.4</postgresql.version>
<hive.jdbc.version>2.1.0</hive.jdbc.version>
<commons.io.version>2.4</commons.io.version>
<oshi.core.version>3.5.0</oshi.core.version>
<clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version>
<mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version>
<jsp-2.1.version>6.1.14</jsp-2.1.version>
<spotbugs.version>3.1.12</spotbugs.version>
<checkstyle.version>3.0.0</checkstyle.version>
<apache.rat.version>0.13</apache.rat.version>
<zookeeper.version>3.4.14</zookeeper.version>
<frontend-maven-plugin.version>1.6</frontend-maven-plugin.version>
<maven-compiler-plugin.version>3.3</maven-compiler-plugin.version>
<maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version>
<maven-release-plugin.version>2.5.3</maven-release-plugin.version>
<maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version>
<maven-source-plugin.version>2.4</maven-source-plugin.version>
<maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version>
<maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version>
<rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version>
<jacoco.version>0.8.4</jacoco.version>
<jcip.version>1.0</jcip.version>
<maven.deploy.skip>false</maven.deploy.skip>
<cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version>
<mockito.version>2.21.0</mockito.version>
<powermock.version>2.0.2</powermock.version>
<servlet-api.version>2.5</servlet-api.version>
<swagger.version>1.9.3</swagger.version>
<springfox.version>2.9.2</springfox.version>
<guava-retry.version>2.0.0</guava-retry.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<!-- quartz-->
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
<version>${quartz.version}</version>
</dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz-jobs</artifactId>
<version>${quartz.version}</version>
</dependency>
<dependency>
<groupId>com.cronutils</groupId>
<artifactId>cron-utils</artifactId>
<version>${cron.utils.version}</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>${druid.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>${spring.boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-server</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-plugin-api</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-dao</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-api</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-remote</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-service</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-alert</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-framework</artifactId>
<version>${curator.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<version>${curator.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<artifactId>netty</artifactId>
<groupId>io.netty</groupId>
</exclusion>
<exclusion>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-annotations</artifactId>
</exclusion>
</exclusions>
<version>${zookeeper.version}</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>${commons.codec.version}</version>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>${commons.logging.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>${httpclient.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>${httpcore.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>${mockito.version}</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<version>${powermock.version}</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito2</artifactId>
<version>${powermock.version}</version>
<type>jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.connector.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>${h2.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.api.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.log4j12.version}</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>${commons.collections.version}</version>
</dependency>
<dependency>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
<version>${commons.httpclient}</version>
</dependency>
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>${commons.beanutils.version}</version>
</dependency>
<dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
<version>${commons.configuration.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-email</artifactId>
<version>${commons.email.version}</version>
</dependency>
<!--excel poi-->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${poi.version}</version>
</dependency>
<!-- hadoop -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>com.sun.jersey</artifactId>
<groupId>jersey-json</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-aws</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
<version>${commons.collections4.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>${postgresql.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.jdbc.version}</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons.io.version}</version>
</dependency>
<dependency>
<groupId>com.github.oshi</groupId>
<artifactId>oshi-core</artifactId>
<version>${oshi.core.version}</version>
</dependency>
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<version>${clickhouse.jdbc.version}</version>
</dependency>
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
<version>${mssql.jdbc.version}</version>
</dependency>
<dependency>
<groupId>net.jcip</groupId>
<artifactId>jcip-annotations</artifactId>
<version>${jcip.version}</version>
<optional>true</optional>
</dependency>
<!-- for api module -->
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jsp-2.1</artifactId>
<version>${jsp-2.1.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<version>${servlet-api.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>${javax.servlet.api.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<version>${springfox.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
<version>${springfox.version}</version>
</dependency>
<dependency>
<groupId>com.github.xiaoymin</groupId>
<artifactId>swagger-bootstrap-ui</artifactId>
<version>${swagger.version}</version>
</dependency>
<dependency>
<groupId>com.github.rholder</groupId>
<artifactId>guava-retrying</artifactId>
<version>${guava-retry.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<finalName>apache-dolphinscheduler-incubating-${project.version}</finalName>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>rpm-maven-plugin</artifactId>
<version>${rpm-maven-plugion.version}</version>
<inherited>false</inherited>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<testSource>${java.version}</testSource>
<testTarget>${java.version}</testTarget>
</configuration>
<version>${maven-compiler-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>${maven-release-plugin.version}</version>
<configuration>
<tagNameFormat>@{project.version}</tagNameFormat>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>${maven-assembly-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<configuration>
<source>8</source>
<failOnError>false</failOnError>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>${maven-source-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<version>${maven-dependency-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<phase>verify</phase>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
<configuration>
<aggregate>true</aggregate>
<charset>${project.build.sourceEncoding}</charset>
<encoding>${project.build.sourceEncoding}</encoding>
<docencoding>${project.build.sourceEncoding}</docencoding>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>${maven-release-plugin.version}</version>
<configuration>
<autoVersionSubmodules>true</autoVersionSubmodules>
<tagNameFormat>@{project.version}</tagNameFormat>
<tagBase>${project.version}</tagBase>
<!--<goals>-f pom.xml deploy</goals>-->
</configuration>
<dependencies>
<dependency>
<groupId>org.apache.maven.scm</groupId>
<artifactId>maven-scm-provider-jgit</artifactId>
<version>1.9.5</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<encoding>${project.build.sourceEncoding}</encoding>
<skip>false</skip><!--not skip compile test classes-->
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<includes>
<include>**/alert/utils/DingTalkUtilsTest.java</include>
<include>**/alert/template/AlertTemplateFactoryTest.java</include>
<include>**/alert/template/impl/DefaultHTMLTemplateTest.java</include>
<include>**/alert/utils/EnterpriseWeChatUtilsTest.java</include>
<include>**/alert/utils/ExcelUtilsTest.java</include>
<include>**/alert/utils/FuncUtilsTest.java</include>
<include>**/alert/utils/JSONUtilsTest.java</include>
<!--<include>**/alert/utils/MailUtilsTest.java</include>-->
<include>**/alert/plugin/EmailAlertPluginTest.java</include>
<include>**/api/dto/resources/filter/ResourceFilterTest.java</include>
<include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include>
<includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest>
<include>**/api/enums/StatusTest.java</include>
<include>**/api/exceptions/ApiExceptionHandlerTest.java</include>
<include>**/api/exceptions/ServiceExceptionTest.java</include>
<include>**/api/interceptor/LoginHandlerInterceptorTest.java</include>
<include>**/api/security/PasswordAuthenticatorTest.java</include>
<include>**/api/security/SecurityConfigTest.java</include>
<include>**/api/service/AccessTokenServiceTest.java</include>
<include>**/api/service/AlertGroupServiceTest.java</include>
<include>**/api/service/BaseDAGServiceTest.java</include>
<include>**/api/service/BaseServiceTest.java</include>
<include>**/api/service/DataAnalysisServiceTest.java</include>
<!--<include>**/api/service/DataSourceServiceTest.java</include>-->
<include>**/api/service/ExecutorService2Test.java</include>
<include>**/api/service/ExecutorServiceTest.java</include>
<include>**/api/service/LoggerServiceTest.java</include>
<include>**/api/service/MonitorServiceTest.java</include>
<include>**/api/service/ProcessDefinitionServiceTest.java</include>
<include>**/api/service/ProcessInstanceServiceTest.java</include>
<include>**/api/service/ProjectServiceTest.java</include>
<include>**/api/service/QueueServiceTest.java</include>
<include>**/api/service/ResourcesServiceTest.java</include>
<include>**/api/service/SchedulerServiceTest.java</include>
<include>**/api/service/SessionServiceTest.java</include>
<include>**/api/service/TaskInstanceServiceTest.java</include>
<include>**/api/service/TenantServiceTest.java</include>
<include>**/api/service/UdfFuncServiceTest.java</include>
<include>**/api/service/UserAlertGroupServiceTest.java</include>
<include>**/api/service/UsersServiceTest.java</include>
<include>**/api/service/WorkerGroupServiceTest.java</include>
<include>**/api/controller/ProcessDefinitionControllerTest.java</include>
<include>**/api/utils/exportprocess/DataSourceParamTest.java</include>
<include>**/api/utils/exportprocess/DependentParamTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/FileUtilsTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/ResultTest.java</include>
<include>**/common/graph/DAGTest.java</include>
<include>**/common/os/OshiTest.java</include>
<include>**/common/os/OSUtilsTest.java</include>
<include>**/common/shell/ShellExecutorTest.java</include>
<include>**/common/task/EntityTestUtils.java</include>
<include>**/common/task/FlinkParametersTest.java</include>
<include>**/common/task/HttpParametersTest.java</include>
<include>**/common/task/SqoopParameterEntityTest.java</include>
<include>**/common/threadutils/ThreadPoolExecutorsTest.java</include>
<include>**/common/threadutils/ThreadUtilsTest.java</include>
<include>**/common/utils/process/ProcessBuilderForWin32Test.java</include>
<include>**/common/utils/process/ProcessEnvironmentForWin32Test.java</include>
<include>**/common/utils/process/ProcessImplForWin32Test.java</include>
<include>**/common/utils/CollectionUtilsTest.java</include>
<include>**/common/utils/CommonUtilsTest.java</include>
<include>**/common/utils/DateUtilsTest.java</include>
<include>**/common/utils/DependentUtilsTest.java</include>
<include>**/common/utils/EncryptionUtilsTest.java</include>
<include>**/common/utils/FileUtilsTest.java</include>
<include>**/common/utils/IpUtilsTest.java</include>
<include>**/common/utils/JSONUtilsTest.java</include>
<include>**/common/utils/LoggerUtilsTest.java</include>
<include>**/common/utils/OSUtilsTest.java</include>
<include>**/common/utils/ParameterUtilsTest.java</include>
<include>**/common/utils/PreconditionsTest.java</include>
<include>**/common/utils/PropertyUtilsTest.java</include>
<include>**/common/utils/SchemaUtilsTest.java</include>
<include>**/common/utils/ScriptRunnerTest.java</include>
<include>**/common/utils/SensitiveLogUtilsTest.java</include>
<include>**/common/utils/StringTest.java</include>
<include>**/common/utils/StringUtilsTest.java</include>
<include>**/common/utils/TaskParametersUtilsTest.java</include>
<include>**/common/utils/HadoopUtilsTest.java</include>
<include>**/common/utils/HttpUtilsTest.java</include>
<include>**/common/ConstantsTest.java</include>
<include>**/common/utils/HadoopUtils.java</include>
<include>**/common/utils/RetryerUtilsTest.java</include>
<include>**/common/plugin/FilePluginManagerTest</include>
<include>**/common/plugin/PluginClassLoaderTest</include>
<include>**/dao/mapper/AccessTokenMapperTest.java</include>
<include>**/dao/mapper/AlertGroupMapperTest.java</include>
<include>**/dao/mapper/CommandMapperTest.java</include>
<include>**/dao/mapper/ConnectionFactoryTest.java</include>
<include>**/dao/mapper/DataSourceMapperTest.java</include>
<include>**/remote/JsonSerializerTest.java</include>
<include>**/remote/RemoveTaskLogResponseCommandTest.java</include>
<include>**/remote/RemoveTaskLogRequestCommandTest.java</include>
<!--<include>**/remote/NettyRemotingClientTest.java</include>-->
<include>**/remote/ResponseFutureTest.java</include>
<!--<include>**/server/log/LoggerServerTest.java</include>-->
<include>**/server/log/MasterLogFilterTest.java</include>
<include>**/server/log/SensitiveDataConverterTest.java</include>
<!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>-->
<include>**/server/log/TaskLogFilterTest.java</include>
<include>**/server/log/WorkerLogFilterTest.java</include>
<!--<include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include>-->
<include>**/server/master/runner/MasterTaskExecThreadTest.java</include>
<!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>-->
<include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include>
<include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include>
<include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include>
<!--<include>**/server/master/register/MasterRegistryTest.java</include>-->
<include>**/server/master/AlertManagerTest.java</include>
<include>**/server/master/MasterCommandTest.java</include>
<include>**/server/master/DependentTaskTest.java</include>
<!--<include>**/server/master/ConditionsTaskTest.java</include>-->
<include>**/server/master/MasterExecThreadTest.java</include>
<include>**/server/master/ParamsTest.java</include>
<!--<include>**/server/register/ZookeeperNodeManagerTest.java</include>-->
<include>**/server/utils/DataxUtilsTest.java</include>
<include>**/server/utils/ExecutionContextTestUtils.java</include>
<!--<include>**/server/utils/FlinkArgsUtilsTest.java</include>-->
<include>**/server/utils/ParamUtilsTest.java</include>
<include>**/server/utils/ProcessUtilsTest.java</include>
<include>**/server/utils/SparkArgsUtilsTest.java</include>
<!--<include>**/server/worker/processor/TaskCallbackServiceTest.java</include>-->
<include>**/server/worker/registry/WorkerRegistryTest.java</include>
<include>**/server/worker/shell/ShellCommandExecutorTest.java</include>
<include>**/server/worker/sql/SqlExecutorTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<include>**/server/worker/task/EnvFileTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<!--<include>**/server/worker/task/datax/DataxTaskTest.java</include>-->
<!--<include>**/server/worker/task/http/HttpTaskTest.java</include>-->
<include>**/server/worker/task/sqoop/SqoopTaskTest.java</include>
<include>**/server/worker/EnvFileTest.java</include>
<include>**/service/quartz/cron/CronUtilsTest.java</include>
<include>**/service/zk/DefaultEnsembleProviderTest.java</include>
<include>**/service/zk/ZKServerTest.java</include>
<include>**/service/queue/TaskUpdateQueueTest.java</include>
<include>**/dao/mapper/DataSourceUserMapperTest.java</include>
<!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>-->
<include>**/dao/mapper/ProcessDefinitionMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapperTest.java</include>
<include>**/dao/mapper/ProjectMapperTest.java</include>
<include>**/dao/mapper/ProjectUserMapperTest.java</include>
<include>**/dao/mapper/QueueMapperTest.java</include>
<include>**/dao/mapper/ResourceUserMapperTest.java</include>
<include>**/dao/mapper/ScheduleMapperTest.java</include>
<include>**/dao/mapper/SessionMapperTest.java</include>
<include>**/dao/mapper/TaskInstanceMapperTest.java</include>
<include>**/dao/mapper/TenantMapperTest.java</include>
<include>**/dao/mapper/UdfFuncMapperTest.java</include>
<include>**/dao/mapper/UDFUserMapperTest.java</include>
<include>**/dao/mapper/UserAlertGroupMapperTest.java</include>
<include>**/dao/mapper/UserMapperTest.java</include>
<include>**/dao/utils/DagHelperTest.java</include>
<include>**/dao/AlertDaoTest.java</include>
<include>**/dao/datasource/OracleDataSourceTest.java</include>
<include>**/dao/datasource/HiveDataSourceTest.java</include>
<include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include>
<include>**/dao/upgrade/WokrerGrouopDaoTest.java</include>
<include>**/dao/upgrade/UpgradeDaoTest.java</include>
<include>**/plugin/model/AlertDataTest.java</include>
<include>**/plugin/model/AlertInfoTest.java</include>
<include>**/plugin/utils/PropertyUtilsTest.java</include>
</includes>
<!-- <skip>true</skip> -->
</configuration>
</plugin>
<!-- jenkins plugin jacoco report-->
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>${jacoco.version}</version>
<configuration>
<destFile>target/jacoco.exec</destFile>
<dataFile>target/jacoco.exec</dataFile>
</configuration>
<executions>
<execution>
<id>jacoco-initialize</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>jacoco-site</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<version>${apache.rat.version}</version>
<configuration>
<excludeSubProjects>false</excludeSubProjects>
<addDefaultLicenseMatchers>false</addDefaultLicenseMatchers>
<licenses>
<license implementation="org.apache.rat.analysis.license.SimplePatternBasedLicense">
<licenseFamilyCategory>AL20</licenseFamilyCategory>
<licenseFamilyName>Apache License, 2.0</licenseFamilyName>
<patterns>
<pattern>Licensed to the Apache Software Foundation (ASF)</pattern>
</patterns>
</license>
</licenses>
<licenseFamilies>
<licenseFamily implementation="org.apache.rat.license.SimpleLicenseFamily">
<familyName>Apache License, 2.0</familyName>
</licenseFamily>
</licenseFamilies>
<excludes>
<exclude>**/node_modules/**</exclude>
<exclude>**/node/**</exclude>
<exclude>**/dist/**</exclude>
<exclude>**/licenses/**</exclude>
<exclude>.github/**</exclude>
<exclude>**/sql/soft_version</exclude>
<exclude>**/common/utils/ScriptRunner.java</exclude>
<exclude>**/*.json</exclude>
<!-- document files -->
<exclude>**/*.md</exclude>
<excldue>**/*.MD</excldue>
<exclude>**/*.txt</exclude>
<exclude>**/docs/**</exclude>
<exclude>**/*.babelrc</exclude>
<exclude>**/*.eslintrc</exclude>
<exclude>**/.mvn/jvm.config</exclude>
<exclude>**/.mvn/wrapper/**</exclude>
</excludes>
<consoleOutput>true</consoleOutput>
</configuration>
</plugin>
<plugin>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-maven-plugin</artifactId>
<version>${spotbugs.version}</version>
<configuration>
<xmlOutput>true</xmlOutput>
<threshold>medium</threshold>
<effort>default</effort>
<excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile>
<failOnError>true</failOnError>
</configuration>
<dependencies>
<dependency>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs</artifactId>
<version>4.0.0-beta4</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>${checkstyle.version}</version>
<dependencies>
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>8.18</version>
</dependency>
</dependencies>
<configuration>
<consoleOutput>true</consoleOutput>
<encoding>UTF-8</encoding>
<configLocation>style/checkstyle.xml</configLocation>
<suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation>
<suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
<failOnViolation>true</failOnViolation>
<violationSeverity>warning</violationSeverity>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<sourceDirectories>
<sourceDirectory>${project.build.sourceDirectory}</sourceDirectory>
</sourceDirectories>
<excludes>**\/generated-sources\/</excludes>
<skip>true</skip>
</configuration>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>cobertura-maven-plugin</artifactId>
<version>${cobertura-maven-plugin.version}</version>
<configuration>
<check>
</check>
<aggregate>true</aggregate>
<outputDirectory>./target/cobertura</outputDirectory>
<encoding>${project.build.sourceEncoding}</encoding>
<quiet>true</quiet>
<format>xml</format>
<instrumentation>
<ignoreTrivial>true</ignoreTrivial>
</instrumentation>
</configuration>
</plugin>
</plugins>
</build>
<modules>
<module>dolphinscheduler-ui</module>
<module>dolphinscheduler-server</module>
<module>dolphinscheduler-common</module>
<module>dolphinscheduler-api</module>
<module>dolphinscheduler-dao</module>
<module>dolphinscheduler-alert</module>
<module>dolphinscheduler-dist</module>
<module>dolphinscheduler-remote</module>
<module>dolphinscheduler-service</module>
<module>dolphinscheduler-plugin-api</module>
<module>dolphinscheduler-microbench</module>
</modules>
</project>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 1,336 | [BUG] [api]Dependent execution fails after task performs serial complement |
**To Reproduce**
1. A task depends on B task, the deviation range is the first 3 days
2. The serial complement of 3 days before the B task execution
3. Run workflow, dependent execution fails
![image](https://user-images.githubusercontent.com/55787491/69607569-74297400-1060-11ea-9d15-8175e79056ff.png)
**Which version of Dolphin Scheduler:**
-[1.2.0]
| https://github.com/apache/dolphinscheduler/issues/1336 | https://github.com/apache/dolphinscheduler/pull/3331 | 624f0aeab9621abbbc9584c4a3167cb1bd2dcf00 | 5f725aa28b2edbd246480a554ac7822bf07fbbfd | "2019-11-26T07:23:55Z" | java | "2020-07-29T07:37:49Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.master.runner;
import com.google.common.collect.Lists;
import org.apache.commons.io.FileUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import org.apache.dolphinscheduler.remote.NettyRemotingClient;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.utils.AlertManager;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import static org.apache.dolphinscheduler.common.Constants.*;
/**
* master exec thread,split dag
*/
public class MasterExecThread implements Runnable {
/**
* logger of MasterExecThread
*/
private static final Logger logger = LoggerFactory.getLogger(MasterExecThread.class);
/**
* process instance
*/
private ProcessInstance processInstance;
/**
* runing TaskNode
*/
private final Map<MasterBaseTaskExecThread,Future<Boolean>> activeTaskNode = new ConcurrentHashMap<>();
/**
* task exec service
*/
private final ExecutorService taskExecService;
/**
* submit failure nodes
*/
private boolean taskFailedSubmit = false;
/**
* recover node id list
*/
private List<TaskInstance> recoverNodeIdList = new ArrayList<>();
/**
* error task list
*/
private Map<String,TaskInstance> errorTaskList = new ConcurrentHashMap<>();
/**
* complete task list
*/
private Map<String, TaskInstance> completeTaskList = new ConcurrentHashMap<>();
/**
* ready to submit task list
*/
private Map<String, TaskInstance> readyToSubmitTaskList = new ConcurrentHashMap<>();
/**
* depend failed task map
*/
private Map<String, TaskInstance> dependFailedTask = new ConcurrentHashMap<>();
/**
* forbidden task map
*/
private Map<String, TaskNode> forbiddenTaskList = new ConcurrentHashMap<>();
/**
* skip task map
*/
private Map<String, TaskNode> skipTaskNodeList = new ConcurrentHashMap<>();
/**
* recover tolerance fault task list
*/
private List<TaskInstance> recoverToleranceFaultTaskList = new ArrayList<>();
/**
* alert manager
*/
private AlertManager alertManager;
/**
* the object of DAG
*/
private DAG<String, TaskNode, TaskNodeRelation> dag;
/**
* process service
*/
private ProcessService processService;
/**
* master config
*/
private MasterConfig masterConfig;
/**
*
*/
private NettyRemotingClient nettyRemotingClient;
/**
* constructor of MasterExecThread
* @param processInstance processInstance
* @param processService processService
* @param nettyRemotingClient nettyRemotingClient
*/
public MasterExecThread(ProcessInstance processInstance
, ProcessService processService
, NettyRemotingClient nettyRemotingClient
, AlertManager alertManager
, MasterConfig masterConfig) {
this.processService = processService;
this.processInstance = processInstance;
this.masterConfig = masterConfig;
int masterTaskExecNum = masterConfig.getMasterExecTaskNum();
this.taskExecService = ThreadUtils.newDaemonFixedThreadExecutor("Master-Task-Exec-Thread",
masterTaskExecNum);
this.nettyRemotingClient = nettyRemotingClient;
this.alertManager = alertManager;
}
@Override
public void run() {
// process instance is null
if (processInstance == null){
logger.info("process instance is not exists");
return;
}
// check to see if it's done
if (processInstance.getState().typeIsFinished()){
logger.info("process instance is done : {}",processInstance.getId());
return;
}
try {
if (processInstance.isComplementData() && Flag.NO == processInstance.getIsSubProcess()){
// sub process complement data
executeComplementProcess();
}else{
// execute flow
executeProcess();
}
}catch (Exception e){
logger.error("master exec thread exception", e);
logger.error("process execute failed, process id:{}", processInstance.getId());
processInstance.setState(ExecutionStatus.FAILURE);
processInstance.setEndTime(new Date());
processService.updateProcessInstance(processInstance);
}finally {
taskExecService.shutdown();
// post handle
postHandle();
}
}
/**
* execute process
* @throws Exception exception
*/
private void executeProcess() throws Exception {
prepareProcess();
runProcess();
endProcess();
}
/**
* execute complement process
* @throws Exception exception
*/
private void executeComplementProcess() throws Exception {
Map<String, String> cmdParam = JSONUtils.toMap(processInstance.getCommandParam());
Date startDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE));
Date endDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE));
processService.saveProcessInstance(processInstance);
// get schedules
int processDefinitionId = processInstance.getProcessDefinitionId();
List<Schedule> schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId);
List<Date> listDate = Lists.newLinkedList();
if(!CollectionUtils.isEmpty(schedules)){
for (Schedule schedule : schedules) {
listDate.addAll(CronUtils.getSelfFireDateList(startDate, endDate, schedule.getCrontab()));
}
}
// get first fire date
Iterator<Date> iterator = null;
Date scheduleDate = null;
if(!CollectionUtils.isEmpty(listDate)) {
iterator = listDate.iterator();
scheduleDate = iterator.next();
processInstance.setScheduleTime(scheduleDate);
processService.updateProcessInstance(processInstance);
}else{
scheduleDate = processInstance.getScheduleTime();
if(scheduleDate == null){
scheduleDate = startDate;
}
}
while(Stopper.isRunning()){
logger.info("process {} start to complement {} data",
processInstance.getId(), DateUtils.dateToString(scheduleDate));
// prepare dag and other info
prepareProcess();
if(dag == null){
logger.error("process {} dag is null, please check out parameters",
processInstance.getId());
processInstance.setState(ExecutionStatus.SUCCESS);
processService.updateProcessInstance(processInstance);
return;
}
// execute process ,waiting for end
runProcess();
endProcess();
// process instance failure ,no more complements
if(!processInstance.getState().typeIsSuccess()){
logger.info("process {} state {}, complement not completely!",
processInstance.getId(), processInstance.getState());
break;
}
// current process instance success ,next execute
if(null == iterator){
// loop by day
scheduleDate = DateUtils.getSomeDay(scheduleDate, 1);
if(scheduleDate.after(endDate)){
// all success
logger.info("process {} complement completely!", processInstance.getId());
break;
}
}else{
// loop by schedule date
if(!iterator.hasNext()){
// all success
logger.info("process {} complement completely!", processInstance.getId());
break;
}
scheduleDate = iterator.next();
}
// flow end
// execute next process instance complement data
processInstance.setScheduleTime(scheduleDate);
if(cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_START_NODE_STRING)){
cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING);
processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam));
}
processInstance.setState(ExecutionStatus.RUNNING_EXEUTION);
processInstance.setGlobalParams(ParameterUtils.curingGlobalParams(
processInstance.getProcessDefinition().getGlobalParamMap(),
processInstance.getProcessDefinition().getGlobalParamList(),
CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime()));
processInstance.setId(0);
processService.saveProcessInstance(processInstance);
}
}
/**
* prepare process parameter
* @throws Exception exception
*/
private void prepareProcess() throws Exception {
// init task queue
initTaskQueue();
// gen process dag
buildFlowDag();
logger.info("prepare process :{} end", processInstance.getId());
}
/**
* process end handle
*/
private void endProcess() {
processInstance.setEndTime(new Date());
processService.updateProcessInstance(processInstance);
if(processInstance.getState().typeIsWaitingThread()){
processService.createRecoveryWaitingThreadCommand(null, processInstance);
}
List<TaskInstance> taskInstances = processService.findValidTaskListByProcessId(processInstance.getId());
alertManager.sendAlertProcessInstance(processInstance, taskInstances);
}
/**
* generate process dag
* @throws Exception exception
*/
private void buildFlowDag() throws Exception {
recoverNodeIdList = getStartTaskInstanceList(processInstance.getCommandParam());
forbiddenTaskList = DagHelper.getForbiddenTaskNodeMaps(processInstance.getProcessInstanceJson());
// generate process to get DAG info
List<String> recoveryNameList = getRecoveryNodeNameList();
List<String> startNodeNameList = parseStartNodeName(processInstance.getCommandParam());
ProcessDag processDag = generateFlowDag(processInstance.getProcessInstanceJson(),
startNodeNameList, recoveryNameList, processInstance.getTaskDependType());
if(processDag == null){
logger.error("processDag is null");
return;
}
// generate process dag
dag = DagHelper.buildDagGraph(processDag);
}
/**
* init task queue
*/
private void initTaskQueue(){
taskFailedSubmit = false;
activeTaskNode.clear();
dependFailedTask.clear();
completeTaskList.clear();
errorTaskList.clear();
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processInstance.getId());
for(TaskInstance task : taskInstanceList){
if(task.isTaskComplete()){
completeTaskList.put(task.getName(), task);
}
if(task.getState().typeIsFailure() && !task.taskCanRetry()){
errorTaskList.put(task.getName(), task);
}
}
}
/**
* process post handle
*/
private void postHandle() {
logger.info("develop mode is: {}", CommonUtils.isDevelopMode());
if (!CommonUtils.isDevelopMode()) {
// get exec dir
String execLocalPath = org.apache.dolphinscheduler.common.utils.FileUtils
.getProcessExecDir(processInstance.getProcessDefinition().getProjectId(),
processInstance.getProcessDefinitionId(),
processInstance.getId());
try {
FileUtils.deleteDirectory(new File(execLocalPath));
} catch (IOException e) {
logger.error("delete exec dir failed ", e);
}
}
}
/**
* submit task to execute
* @param taskInstance task instance
* @return TaskInstance
*/
private TaskInstance submitTaskExec(TaskInstance taskInstance) {
MasterBaseTaskExecThread abstractExecThread = null;
if(taskInstance.isSubProcess()){
abstractExecThread = new SubProcessTaskExecThread(taskInstance);
}else if(taskInstance.isDependTask()){
abstractExecThread = new DependentTaskExecThread(taskInstance);
}else if(taskInstance.isConditionsTask()){
abstractExecThread = new ConditionsTaskExecThread(taskInstance);
}else {
abstractExecThread = new MasterTaskExecThread(taskInstance);
}
Future<Boolean> future = taskExecService.submit(abstractExecThread);
activeTaskNode.putIfAbsent(abstractExecThread, future);
return abstractExecThread.getTaskInstance();
}
/**
* find task instance in db.
* in case submit more than one same name task in the same time.
* @param taskName task name
* @return TaskInstance
*/
private TaskInstance findTaskIfExists(String taskName){
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(this.processInstance.getId());
for(TaskInstance taskInstance : taskInstanceList){
if(taskInstance.getName().equals(taskName)){
return taskInstance;
}
}
return null;
}
/**
* encapsulation task
* @param processInstance process instance
* @param nodeName node name
* @return TaskInstance
*/
private TaskInstance createTaskInstance(ProcessInstance processInstance, String nodeName,
TaskNode taskNode) {
TaskInstance taskInstance = findTaskIfExists(nodeName);
if(taskInstance == null){
taskInstance = new TaskInstance();
// task name
taskInstance.setName(nodeName);
// process instance define id
taskInstance.setProcessDefinitionId(processInstance.getProcessDefinitionId());
// task instance state
taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS);
// process instance id
taskInstance.setProcessInstanceId(processInstance.getId());
// task instance node json
taskInstance.setTaskJson(JSONUtils.toJsonString(taskNode));
// task instance type
taskInstance.setTaskType(taskNode.getType());
// task instance whether alert
taskInstance.setAlertFlag(Flag.NO);
// task instance start time
taskInstance.setStartTime(new Date());
// task instance flag
taskInstance.setFlag(Flag.YES);
// task instance retry times
taskInstance.setRetryTimes(0);
// max task instance retry times
taskInstance.setMaxRetryTimes(taskNode.getMaxRetryTimes());
// retry task instance interval
taskInstance.setRetryInterval(taskNode.getRetryInterval());
// task instance priority
if(taskNode.getTaskInstancePriority() == null){
taskInstance.setTaskInstancePriority(Priority.MEDIUM);
}else{
taskInstance.setTaskInstancePriority(taskNode.getTaskInstancePriority());
}
String processWorkerGroup = processInstance.getWorkerGroup();
processWorkerGroup = StringUtils.isBlank(processWorkerGroup) ? DEFAULT_WORKER_GROUP : processWorkerGroup;
String taskWorkerGroup = StringUtils.isBlank(taskNode.getWorkerGroup()) ? processWorkerGroup : taskNode.getWorkerGroup();
if (!processWorkerGroup.equals(DEFAULT_WORKER_GROUP) && taskWorkerGroup.equals(DEFAULT_WORKER_GROUP)) {
taskInstance.setWorkerGroup(processWorkerGroup);
}else {
taskInstance.setWorkerGroup(taskWorkerGroup);
}
}
return taskInstance;
}
/**
* if all of the task dependence are skip, skip it too.
* @param taskNode
* @return
*/
private boolean isTaskNodeNeedSkip(TaskNode taskNode){
if(CollectionUtils.isEmpty(taskNode.getDepList())){
return false;
}
for(String depNode : taskNode.getDepList()){
if(!skipTaskNodeList.containsKey(depNode)){
return false;
}
}
return true;
}
/**
* set task node skip if dependence all skip
* @param taskNodesSkipList
*/
private void setTaskNodeSkip(List<String> taskNodesSkipList){
for(String skipNode : taskNodesSkipList){
skipTaskNodeList.putIfAbsent(skipNode, dag.getNode(skipNode));
Collection<String> postNodeList = DagHelper.getStartVertex(skipNode, dag, completeTaskList);
List<String> postSkipList = new ArrayList<>();
for(String post : postNodeList){
TaskNode postNode = dag.getNode(post);
if(isTaskNodeNeedSkip(postNode)){
postSkipList.add(post);
}
}
setTaskNodeSkip(postSkipList);
}
}
/**
* parse condition task find the branch process
* set skip flag for another one.
* @param nodeName
* @return
*/
private List<String> parseConditionTask(String nodeName){
List<String> conditionTaskList = new ArrayList<>();
TaskNode taskNode = dag.getNode(nodeName);
if(!taskNode.isConditionsTask()){
return conditionTaskList;
}
ConditionsParameters conditionsParameters =
JSONUtils.parseObject(taskNode.getConditionResult(), ConditionsParameters.class);
TaskInstance taskInstance = completeTaskList.get(nodeName);
if(taskInstance == null){
logger.error("task instance {} cannot find, please check it!", nodeName);
return conditionTaskList;
}
if(taskInstance.getState().typeIsSuccess()){
conditionTaskList = conditionsParameters.getSuccessNode();
setTaskNodeSkip(conditionsParameters.getFailedNode());
}else if(taskInstance.getState().typeIsFailure()){
conditionTaskList = conditionsParameters.getFailedNode();
setTaskNodeSkip(conditionsParameters.getSuccessNode());
}else{
conditionTaskList.add(nodeName);
}
return conditionTaskList;
}
/**
* parse post node list of previous node
* if condition node: return process according to the settings
* if post node completed, return post nodes of the completed node
* @param previousNodeName
* @return
*/
private List<String> parsePostNodeList(String previousNodeName){
List<String> postNodeList = new ArrayList<>();
TaskNode taskNode = dag.getNode(previousNodeName);
if(taskNode != null && taskNode.isConditionsTask()){
return parseConditionTask(previousNodeName);
}
Collection<String> postNodeCollection = DagHelper.getStartVertex(previousNodeName, dag, completeTaskList);
List<String> postSkipList = new ArrayList<>();
// delete success node, parse the past nodes
// if conditions node,
// 1. parse the branch process according the conditions setting
// 2. set skip flag on anther branch process
for(String postNode : postNodeCollection){
if(completeTaskList.containsKey(postNode)){
TaskInstance postTaskInstance = completeTaskList.get(postNode);
if(dag.getNode(postNode).isConditionsTask()){
List<String> conditionTaskNodeList = parseConditionTask(postNode);
for(String conditions : conditionTaskNodeList){
postNodeList.addAll(parsePostNodeList(conditions));
}
}else if(postTaskInstance.getState().typeIsSuccess()){
postNodeList.addAll(parsePostNodeList(postNode));
}else{
postNodeList.add(postNode);
}
}else if(isTaskNodeNeedSkip(dag.getNode(postNode))){
postSkipList.add(postNode);
setTaskNodeSkip(postSkipList);
postSkipList.clear();
}else{
postNodeList.add(postNode);
}
}
return postNodeList;
}
/**
* submit post node
* @param parentNodeName parent node name
*/
private void submitPostNode(String parentNodeName){
List<String> submitTaskNodeList = parsePostNodeList(parentNodeName);
List<TaskInstance> taskInstances = new ArrayList<>();
for(String taskNode : submitTaskNodeList){
taskInstances.add(createTaskInstance(processInstance, taskNode,
dag.getNode(taskNode)));
}
// if previous node success , post node submit
for(TaskInstance task : taskInstances){
if(readyToSubmitTaskList.containsKey(task.getName())){
continue;
}
if(completeTaskList.containsKey(task.getName())){
logger.info("task {} has already run success", task.getName());
continue;
}
if(task.getState().typeIsPause() || task.getState().typeIsCancel()){
logger.info("task {} stopped, the state is {}", task.getName(), task.getState());
}else{
addTaskToStandByList(task);
}
}
}
/**
* determine whether the dependencies of the task node are complete
* @return DependResult
*/
private DependResult isTaskDepsComplete(String taskName) {
Collection<String> startNodes = dag.getBeginNode();
// if vertex,returns true directly
if(startNodes.contains(taskName)){
return DependResult.SUCCESS;
}
TaskNode taskNode = dag.getNode(taskName);
List<String> depNameList = taskNode.getDepList();
for(String depsNode : depNameList ){
if(!dag.containsNode(depsNode)
|| forbiddenTaskList.containsKey(depsNode)
|| skipTaskNodeList.containsKey(depsNode)){
continue;
}
// dependencies must be fully completed
if(!completeTaskList.containsKey(depsNode)){
return DependResult.WAITING;
}
ExecutionStatus depTaskState = completeTaskList.get(depsNode).getState();
// conditions task would not return failed.
if(depTaskState.typeIsFailure()
&& !DagHelper.haveConditionsAfterNode(depsNode, dag )
&& !dag.getNode(depsNode).isConditionsTask()){
return DependResult.FAILED;
}
if(depTaskState.typeIsPause() || depTaskState.typeIsCancel()){
return DependResult.WAITING;
}
}
logger.info("taskName: {} completeDependTaskList: {}", taskName, Arrays.toString(completeTaskList.keySet().toArray()));
return DependResult.SUCCESS;
}
/**
* query task instance by complete state
* @param state state
* @return task instance list
*/
private List<TaskInstance> getCompleteTaskByState(ExecutionStatus state){
List<TaskInstance> resultList = new ArrayList<>();
for (Map.Entry<String, TaskInstance> entry: completeTaskList.entrySet()) {
if(entry.getValue().getState() == state){
resultList.add(entry.getValue());
}
}
return resultList;
}
/**
* where there are ongoing tasks
* @param state state
* @return ExecutionStatus
*/
private ExecutionStatus runningState(ExecutionStatus state){
if(state == ExecutionStatus.READY_STOP ||
state == ExecutionStatus.READY_PAUSE ||
state == ExecutionStatus.WAITTING_THREAD){
// if the running task is not completed, the state remains unchanged
return state;
}else{
return ExecutionStatus.RUNNING_EXEUTION;
}
}
/**
* exists failure task,contains submit failure、dependency failure,execute failure(retry after)
*
* @return Boolean whether has failed task
*/
private boolean hasFailedTask(){
if(this.taskFailedSubmit){
return true;
}
if(this.errorTaskList.size() > 0){
return true;
}
return this.dependFailedTask.size() > 0;
}
/**
* process instance failure
*
* @return Boolean whether process instance failed
*/
private boolean processFailed(){
if(hasFailedTask()) {
if(processInstance.getFailureStrategy() == FailureStrategy.END){
return true;
}
if (processInstance.getFailureStrategy() == FailureStrategy.CONTINUE) {
return readyToSubmitTaskList.size() == 0 || activeTaskNode.size() == 0;
}
}
return false;
}
/**
* whether task for waiting thread
* @return Boolean whether has waiting thread task
*/
private boolean hasWaitingThreadTask(){
List<TaskInstance> waitingList = getCompleteTaskByState(ExecutionStatus.WAITTING_THREAD);
return CollectionUtils.isNotEmpty(waitingList);
}
/**
* prepare for pause
* 1,failed retry task in the preparation queue , returns to failure directly
* 2,exists pause task,complement not completed, pending submission of tasks, return to suspension
* 3,success
* @return ExecutionStatus
*/
private ExecutionStatus processReadyPause(){
if(hasRetryTaskInStandBy()){
return ExecutionStatus.FAILURE;
}
List<TaskInstance> pauseList = getCompleteTaskByState(ExecutionStatus.PAUSE);
if(CollectionUtils.isNotEmpty(pauseList)
|| !isComplementEnd()
|| readyToSubmitTaskList.size() > 0){
return ExecutionStatus.PAUSE;
}else{
return ExecutionStatus.SUCCESS;
}
}
/**
* generate the latest process instance status by the tasks state
* @return process instance execution status
*/
private ExecutionStatus getProcessInstanceState(){
ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId());
ExecutionStatus state = instance.getState();
if(activeTaskNode.size() > 0 || retryTaskExists()){
// active task and retry task exists
return runningState(state);
}
// process failure
if(processFailed()){
return ExecutionStatus.FAILURE;
}
// waiting thread
if(hasWaitingThreadTask()){
return ExecutionStatus.WAITTING_THREAD;
}
// pause
if(state == ExecutionStatus.READY_PAUSE){
return processReadyPause();
}
// stop
if(state == ExecutionStatus.READY_STOP){
List<TaskInstance> stopList = getCompleteTaskByState(ExecutionStatus.STOP);
List<TaskInstance> killList = getCompleteTaskByState(ExecutionStatus.KILL);
if(CollectionUtils.isNotEmpty(stopList)
|| CollectionUtils.isNotEmpty(killList)
|| !isComplementEnd()){
return ExecutionStatus.STOP;
}else{
return ExecutionStatus.SUCCESS;
}
}
// success
if(state == ExecutionStatus.RUNNING_EXEUTION){
List<TaskInstance> killTasks = getCompleteTaskByState(ExecutionStatus.KILL);
if(readyToSubmitTaskList.size() > 0){
//tasks currently pending submission, no retries, indicating that depend is waiting to complete
return ExecutionStatus.RUNNING_EXEUTION;
}else if(CollectionUtils.isNotEmpty(killTasks)){
// tasks maybe killed manually
return ExecutionStatus.FAILURE;
}else{
// if the waiting queue is empty and the status is in progress, then success
return ExecutionStatus.SUCCESS;
}
}
return state;
}
/**
* whether standby task list have retry tasks
* @return
*/
private boolean retryTaskExists() {
boolean result = false;
for(String taskName : readyToSubmitTaskList.keySet()){
TaskInstance task = readyToSubmitTaskList.get(taskName);
if(task.getState().typeIsFailure()){
result = true;
break;
}
}
return result;
}
/**
* whether complement end
* @return Boolean whether is complement end
*/
private boolean isComplementEnd() {
if(!processInstance.isComplementData()){
return true;
}
try {
Map<String, String> cmdParam = JSONUtils.toMap(processInstance.getCommandParam());
Date endTime = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE));
return processInstance.getScheduleTime().equals(endTime);
} catch (Exception e) {
logger.error("complement end failed ",e);
return false;
}
}
/**
* updateProcessInstance process instance state
* after each batch of tasks is executed, the status of the process instance is updated
*/
private void updateProcessInstanceState() {
ExecutionStatus state = getProcessInstanceState();
if(processInstance.getState() != state){
logger.info(
"work flow process instance [id: {}, name:{}], state change from {} to {}, cmd type: {}",
processInstance.getId(), processInstance.getName(),
processInstance.getState(), state,
processInstance.getCommandType());
ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId());
instance.setState(state);
instance.setProcessDefinition(processInstance.getProcessDefinition());
processService.updateProcessInstance(instance);
processInstance = instance;
}
}
/**
* get task dependency result
* @param taskInstance task instance
* @return DependResult
*/
private DependResult getDependResultForTask(TaskInstance taskInstance){
return isTaskDepsComplete(taskInstance.getName());
}
/**
* add task to standby list
* @param taskInstance task instance
*/
private void addTaskToStandByList(TaskInstance taskInstance){
logger.info("add task to stand by list: {}", taskInstance.getName());
readyToSubmitTaskList.putIfAbsent(taskInstance.getName(), taskInstance);
}
/**
* remove task from stand by list
* @param taskInstance task instance
*/
private void removeTaskFromStandbyList(TaskInstance taskInstance){
logger.info("remove task from stand by list: {}", taskInstance.getName());
readyToSubmitTaskList.remove(taskInstance.getName());
}
/**
* has retry task in standby
* @return Boolean whether has retry task in standby
*/
private boolean hasRetryTaskInStandBy(){
for (Map.Entry<String, TaskInstance> entry: readyToSubmitTaskList.entrySet()) {
if(entry.getValue().getState().typeIsFailure()){
return true;
}
}
return false;
}
/**
* submit and watch the tasks, until the work flow stop
*/
private void runProcess(){
// submit start node
submitPostNode(null);
boolean sendTimeWarning = false;
while(!processInstance.isProcessInstanceStop()){
// send warning email if process time out.
if(!sendTimeWarning && checkProcessTimeOut(processInstance) ){
alertManager.sendProcessTimeoutAlert(processInstance,
processService.findProcessDefineById(processInstance.getProcessDefinitionId()));
sendTimeWarning = true;
}
for(Map.Entry<MasterBaseTaskExecThread,Future<Boolean>> entry: activeTaskNode.entrySet()) {
Future<Boolean> future = entry.getValue();
TaskInstance task = entry.getKey().getTaskInstance();
if(!future.isDone()){
continue;
}
// node monitor thread complete
task = this.processService.findTaskInstanceById(task.getId());
if(task == null){
this.taskFailedSubmit = true;
activeTaskNode.remove(entry.getKey());
continue;
}
// node monitor thread complete
if(task.getState().typeIsFinished()){
activeTaskNode.remove(entry.getKey());
}
logger.info("task :{}, id:{} complete, state is {} ",
task.getName(), task.getId(), task.getState());
// node success , post node submit
if(task.getState() == ExecutionStatus.SUCCESS){
completeTaskList.put(task.getName(), task);
submitPostNode(task.getName());
continue;
}
// node fails, retry first, and then execute the failure process
if(task.getState().typeIsFailure()){
if(task.getState() == ExecutionStatus.NEED_FAULT_TOLERANCE){
this.recoverToleranceFaultTaskList.add(task);
}
if(task.taskCanRetry()){
addTaskToStandByList(task);
}else{
completeTaskList.put(task.getName(), task);
if( task.isConditionsTask()
|| DagHelper.haveConditionsAfterNode(task.getName(), dag)) {
submitPostNode(task.getName());
}else{
errorTaskList.put(task.getName(), task);
if(processInstance.getFailureStrategy() == FailureStrategy.END){
killTheOtherTasks();
}
}
}
continue;
}
// other status stop/pause
completeTaskList.put(task.getName(), task);
}
// send alert
if(CollectionUtils.isNotEmpty(this.recoverToleranceFaultTaskList)){
alertManager.sendAlertWorkerToleranceFault(processInstance, recoverToleranceFaultTaskList);
this.recoverToleranceFaultTaskList.clear();
}
// updateProcessInstance completed task status
// failure priority is higher than pause
// if a task fails, other suspended tasks need to be reset kill
if(errorTaskList.size() > 0){
for(Map.Entry<String, TaskInstance> entry: completeTaskList.entrySet()) {
TaskInstance completeTask = entry.getValue();
if(completeTask.getState()== ExecutionStatus.PAUSE){
completeTask.setState(ExecutionStatus.KILL);
completeTaskList.put(entry.getKey(), completeTask);
processService.updateTaskInstance(completeTask);
}
}
}
if(canSubmitTaskToQueue()){
submitStandByTask();
}
try {
Thread.sleep(Constants.SLEEP_TIME_MILLIS);
} catch (InterruptedException e) {
logger.error(e.getMessage(),e);
}
updateProcessInstanceState();
}
logger.info("process:{} end, state :{}", processInstance.getId(), processInstance.getState());
}
/**
* whether check process time out
* @param processInstance task instance
* @return true if time out of process instance > running time of process instance
*/
private boolean checkProcessTimeOut(ProcessInstance processInstance) {
if(processInstance.getTimeout() == 0 ){
return false;
}
Date now = new Date();
long runningTime = DateUtils.diffMin(now, processInstance.getStartTime());
return runningTime > processInstance.getTimeout();
}
/**
* whether can submit task to queue
* @return boolean
*/
private boolean canSubmitTaskToQueue() {
return OSUtils.checkResource(masterConfig.getMasterMaxCpuloadAvg(), masterConfig.getMasterReservedMemory());
}
/**
* close the on going tasks
*/
private void killTheOtherTasks() {
logger.info("kill called on process instance id: {}, num: {}", processInstance.getId(),
activeTaskNode.size());
for (Map.Entry<MasterBaseTaskExecThread, Future<Boolean>> entry : activeTaskNode.entrySet()) {
MasterBaseTaskExecThread taskExecThread = entry.getKey();
Future<Boolean> future = entry.getValue();
TaskInstance taskInstance = taskExecThread.getTaskInstance();
taskInstance = processService.findTaskInstanceById(taskInstance.getId());
if(taskInstance != null && taskInstance.getState().typeIsFinished()){
continue;
}
if (!future.isDone()) {
// record kill info
logger.info("kill process instance, id: {}, task: {}", processInstance.getId(), taskExecThread.getTaskInstance().getId());
// kill node
taskExecThread.kill();
}
}
}
/**
* whether the retry interval is timed out
* @param taskInstance task instance
* @return Boolean
*/
private boolean retryTaskIntervalOverTime(TaskInstance taskInstance){
if(taskInstance.getState() != ExecutionStatus.FAILURE){
return true;
}
if(taskInstance.getId() == 0 ||
taskInstance.getMaxRetryTimes() ==0 ||
taskInstance.getRetryInterval() == 0 ){
return true;
}
Date now = new Date();
long failedTimeInterval = DateUtils.differSec(now, taskInstance.getEndTime());
// task retry does not over time, return false
return taskInstance.getRetryInterval() * SEC_2_MINUTES_TIME_UNIT < failedTimeInterval;
}
/**
* handling the list of tasks to be submitted
*/
private void submitStandByTask(){
for(Map.Entry<String, TaskInstance> entry: readyToSubmitTaskList.entrySet()) {
TaskInstance task = entry.getValue();
DependResult dependResult = getDependResultForTask(task);
if(DependResult.SUCCESS == dependResult){
if(retryTaskIntervalOverTime(task)){
submitTaskExec(task);
removeTaskFromStandbyList(task);
}
}else if(DependResult.FAILED == dependResult){
// if the dependency fails, the current node is not submitted and the state changes to failure.
dependFailedTask.put(entry.getKey(), task);
removeTaskFromStandbyList(task);
logger.info("task {},id:{} depend result : {}",task.getName(), task.getId(), dependResult);
}
}
}
/**
* get recovery task instance
* @param taskId task id
* @return recovery task instance
*/
private TaskInstance getRecoveryTaskInstance(String taskId){
if(!StringUtils.isNotEmpty(taskId)){
return null;
}
try {
Integer intId = Integer.valueOf(taskId);
TaskInstance task = processService.findTaskInstanceById(intId);
if(task == null){
logger.error("start node id cannot be found: {}", taskId);
}else {
return task;
}
}catch (Exception e){
logger.error("get recovery task instance failed ",e);
}
return null;
}
/**
* get start task instance list
* @param cmdParam command param
* @return task instance list
*/
private List<TaskInstance> getStartTaskInstanceList(String cmdParam){
List<TaskInstance> instanceList = new ArrayList<>();
Map<String, String> paramMap = JSONUtils.toMap(cmdParam);
if(paramMap != null && paramMap.containsKey(CMDPARAM_RECOVERY_START_NODE_STRING)){
String[] idList = paramMap.get(CMDPARAM_RECOVERY_START_NODE_STRING).split(Constants.COMMA);
for(String nodeId : idList){
TaskInstance task = getRecoveryTaskInstance(nodeId);
if(task != null){
instanceList.add(task);
}
}
}
return instanceList;
}
/**
* parse "StartNodeNameList" from cmd param
* @param cmdParam command param
* @return start node name list
*/
private List<String> parseStartNodeName(String cmdParam){
List<String> startNodeNameList = new ArrayList<>();
Map<String, String> paramMap = JSONUtils.toMap(cmdParam);
if(paramMap == null){
return startNodeNameList;
}
if(paramMap.containsKey(CMDPARAM_START_NODE_NAMES)){
startNodeNameList = Arrays.asList(paramMap.get(CMDPARAM_START_NODE_NAMES).split(Constants.COMMA));
}
return startNodeNameList;
}
/**
* generate start node name list from parsing command param;
* if "StartNodeIdList" exists in command param, return StartNodeIdList
* @return recovery node name list
*/
private List<String> getRecoveryNodeNameList(){
List<String> recoveryNodeNameList = new ArrayList<>();
if(CollectionUtils.isNotEmpty(recoverNodeIdList)) {
for (TaskInstance task : recoverNodeIdList) {
recoveryNodeNameList.add(task.getName());
}
}
return recoveryNodeNameList;
}
/**
* generate flow dag
* @param processDefinitionJson process definition json
* @param startNodeNameList start node name list
* @param recoveryNodeNameList recovery node name list
* @param depNodeType depend node type
* @return ProcessDag process dag
* @throws Exception exception
*/
public ProcessDag generateFlowDag(String processDefinitionJson,
List<String> startNodeNameList,
List<String> recoveryNodeNameList,
TaskDependType depNodeType)throws Exception{
return DagHelper.generateFlowDag(processDefinitionJson, startNodeNameList, recoveryNodeNameList, depNodeType);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,295 | When I create a scheduler directly through the api, a NullPointerException will appear when the processInstancePriority field is not passed. he is @RequestParam(value = "processInstancePriority", required = false) |
![image](https://user-images.githubusercontent.com/34977303/88355113-e1c7a480-cd95-11ea-8cfa-9d34d866cb08.png)
error line
![image](https://user-images.githubusercontent.com/34977303/88355156-10457f80-cd96-11ea-8e3f-31170d6fdff6.png)
Do you need to modify it to @RequestParam(value = "processInstancePriority", required = true) | https://github.com/apache/dolphinscheduler/issues/3295 | https://github.com/apache/dolphinscheduler/pull/3341 | 5f725aa28b2edbd246480a554ac7822bf07fbbfd | e2a117032de53a206fd0d8a55b665a199efb276a | "2020-07-24T02:12:26Z" | java | "2020-07-29T11:01:53Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/SchedulerController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.SchedulerService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import io.swagger.annotations.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.*;
import springfox.documentation.annotations.ApiIgnore;
import java.io.IOException;
import java.util.Map;
import static org.apache.dolphinscheduler.api.enums.Status.*;
import static org.apache.dolphinscheduler.common.Constants.SESSION_USER;
/**
* schedule controller
*/
@Api(tags = "SCHEDULER_TAG", position = 13)
@RestController
@RequestMapping("/projects/{projectName}/schedule")
public class SchedulerController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(SchedulerController.class);
public static final String DEFAULT_WARNING_TYPE = "NONE";
public static final String DEFAULT_NOTIFY_GROUP_ID = "1";
public static final String DEFAULT_FAILURE_POLICY = "CONTINUE";
@Autowired
private SchedulerService schedulerService;
/**
* create schedule
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @param schedule scheduler
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param processInstancePriority process instance priority
* @param receivers receivers
* @param receiversCc receivers cc
* @param workerGroup worker group
* @return create result code
*/
@ApiOperation(value = "createSchedule", notes = "CREATE_SCHEDULE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"),
@ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"),
@ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"),
@ApiImplicitParam(name = "receivers", value = "RECEIVERS", type = "String"),
@ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type = "String"),
@ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"),
})
@PostMapping("/create")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_SCHEDULE_ERROR)
public Result createSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionId") Integer processDefinitionId,
@RequestParam(value = "schedule") String schedule,
@RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType,
@RequestParam(value = "warningGroupId", required = false, defaultValue = DEFAULT_NOTIFY_GROUP_ID) int warningGroupId,
@RequestParam(value = "failureStrategy", required = false, defaultValue = DEFAULT_FAILURE_POLICY) FailureStrategy failureStrategy,
@RequestParam(value = "receivers", required = false) String receivers,
@RequestParam(value = "receiversCc", required = false) String receiversCc,
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) throws IOException {
logger.info("login user {}, project name: {}, process name: {}, create schedule: {}, warning type: {}, warning group id: {}," +
"failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {}, workGroupId:{}",
loginUser.getUserName(), projectName, processDefinitionId, schedule, warningType, warningGroupId,
failureStrategy, receivers, receiversCc, processInstancePriority, workerGroup);
Map<String, Object> result = schedulerService.insertSchedule(loginUser, projectName, processDefinitionId, schedule,
warningType, warningGroupId, failureStrategy, receivers, receiversCc, processInstancePriority, workerGroup);
return returnDataList(result);
}
/**
* updateProcessInstance schedule
*
* @param loginUser login user
* @param projectName project name
* @param id scheduler id
* @param schedule scheduler
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param receivers receivers
* @param workerGroup worker group
* @param processInstancePriority process instance priority
* @param receiversCc receivers cc
* @return update result code
*/
@ApiOperation(value = "updateSchedule", notes = "UPDATE_SCHEDULE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"),
@ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", type = "WarningType"),
@ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", type = "FailureStrategy"),
@ApiImplicitParam(name = "receivers", value = "RECEIVERS", type = "String"),
@ApiImplicitParam(name = "receiversCc", value = "RECEIVERS_CC", type = "String"),
@ApiImplicitParam(name = "workerGroupId", value = "WORKER_GROUP_ID", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", type = "Priority"),
})
@PostMapping("/update")
@ApiException(UPDATE_SCHEDULE_ERROR)
public Result updateSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "id") Integer id,
@RequestParam(value = "schedule") String schedule,
@RequestParam(value = "warningType", required = false, defaultValue = DEFAULT_WARNING_TYPE) WarningType warningType,
@RequestParam(value = "warningGroupId", required = false) int warningGroupId,
@RequestParam(value = "failureStrategy", required = false, defaultValue = "END") FailureStrategy failureStrategy,
@RequestParam(value = "receivers", required = false) String receivers,
@RequestParam(value = "receiversCc", required = false) String receiversCc,
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority) throws IOException {
logger.info("login user {}, project name: {},id: {}, updateProcessInstance schedule: {}, notify type: {}, notify mails: {}, " +
"failure policy: {},receivers : {},receiversCc : {},processInstancePriority : {},workerGroupId:{}",
loginUser.getUserName(), projectName, id, schedule, warningType, warningGroupId, failureStrategy,
receivers, receiversCc, processInstancePriority, workerGroup);
Map<String, Object> result = schedulerService.updateSchedule(loginUser, projectName, id, schedule,
warningType, warningGroupId, failureStrategy, receivers, receiversCc, null, processInstancePriority, workerGroup);
return returnDataList(result);
}
/**
* publish schedule setScheduleState
*
* @param loginUser login user
* @param projectName project name
* @param id scheduler id
* @return publish result code
*/
@ApiOperation(value = "online", notes = "ONLINE_SCHEDULE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100")
})
@PostMapping("/online")
@ApiException(PUBLISH_SCHEDULE_ONLINE_ERROR)
public Result online(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable("projectName") String projectName,
@RequestParam("id") Integer id) {
logger.info("login user {}, schedule setScheduleState, project name: {}, id: {}",
loginUser.getUserName(), projectName, id);
Map<String, Object> result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.ONLINE);
return returnDataList(result);
}
/**
* offline schedule
*
* @param loginUser login user
* @param projectName project name
* @param id schedule id
* @return operation result code
*/
@ApiOperation(value = "offline", notes = "OFFLINE_SCHEDULE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100")
})
@PostMapping("/offline")
@ApiException(OFFLINE_SCHEDULE_ERROR)
public Result offline(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable("projectName") String projectName,
@RequestParam("id") Integer id) {
logger.info("login user {}, schedule offline, project name: {}, process definition id: {}",
loginUser.getUserName(), projectName, id);
Map<String, Object> result = schedulerService.setScheduleState(loginUser, projectName, id, ReleaseState.OFFLINE);
return returnDataList(result);
}
/**
* query schedule list paging
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @param pageNo page number
* @param pageSize page size
* @param searchVal search value
* @return schedule list page
*/
@ApiOperation(value = "queryScheduleListPaging", notes = "QUERY_SCHEDULE_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", type = "String"),
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "100"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "100")
})
@GetMapping("/list-paging")
@ApiException(QUERY_SCHEDULE_LIST_PAGING_ERROR)
public Result queryScheduleListPaging(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam Integer processDefinitionId,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageNo") Integer pageNo,
@RequestParam("pageSize") Integer pageSize) {
logger.info("login user {}, query schedule, project name: {}, process definition id: {}",
loginUser.getUserName(), projectName, processDefinitionId);
searchVal = ParameterUtils.handleEscapes(searchVal);
Map<String, Object> result = schedulerService.querySchedule(loginUser, projectName, processDefinitionId, searchVal, pageNo, pageSize);
return returnDataListPaging(result);
}
/**
* delete schedule by id
*
* @param loginUser login user
* @param projectName project name
* @param scheduleId scheule id
* @return delete result code
*/
@ApiOperation(value = "deleteScheduleById", notes = "OFFLINE_SCHEDULE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "scheduleId", value = "SCHEDULE_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_SCHEDULE_CRON_BY_ID_ERROR)
public Result deleteScheduleById(@RequestAttribute(value = SESSION_USER) User loginUser,
@PathVariable String projectName,
@RequestParam("scheduleId") Integer scheduleId
) {
logger.info("delete schedule by id, login user:{}, project name:{}, schedule id:{}",
loginUser.getUserName(), projectName, scheduleId);
Map<String, Object> result = schedulerService.deleteScheduleById(loginUser, projectName, scheduleId);
return returnDataList(result);
}
/**
* query schedule list
*
* @param loginUser login user
* @param projectName project name
* @return schedule list
*/
@ApiOperation(value = "queryScheduleList", notes = "QUERY_SCHEDULE_LIST_NOTES")
@PostMapping("/list")
@ApiException(QUERY_SCHEDULE_LIST_ERROR)
public Result queryScheduleList(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName) {
logger.info("login user {}, query schedule list, project name: {}",
loginUser.getUserName(), projectName);
Map<String, Object> result = schedulerService.queryScheduleList(loginUser, projectName);
return returnDataList(result);
}
/**
* preview schedule
*
* @param loginUser login user
* @param projectName project name
* @param schedule schedule expression
* @return the next five fire time
*/
@ApiOperation(value = "previewSchedule", notes = "PREVIEW_SCHEDULE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "schedule", value = "SCHEDULE", dataType = "String", example = "{'startTime':'2019-06-10 00:00:00','endTime':'2019-06-13 00:00:00','crontab':'0 0 3/6 * * ? *'}"),
})
@PostMapping("/preview")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(PREVIEW_SCHEDULE_ERROR)
public Result previewSchedule(@ApiIgnore @RequestAttribute(value = SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "schedule") String schedule
) {
logger.info("login user {}, project name: {}, preview schedule: {}",
loginUser.getUserName(), projectName, schedule);
Map<String, Object> result = schedulerService.previewSchedule(loginUser, projectName, schedule);
return returnDataList(result);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,338 | [But][ui] No warning when user do not fill the node blank | **Describe the question**
When I want to confirm and save an no-configured node, it will show nothing warning.
**What are the current deficiencies and the benefits of improvement**
- It can help user to fill the required blank.
**Which version of DolphinScheduler:**
-[dev]
**Describe alternatives you've considered**
Add the warning.
| https://github.com/apache/dolphinscheduler/issues/3338 | https://github.com/apache/dolphinscheduler/pull/3349 | 78856fb08e512b62ab7b24c809407616b2b4bbc5 | a532d8ed69e41c69c4979eae0c1b964891abc696 | "2020-07-28T14:44:03Z" | java | "2020-07-30T05:14:05Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="form-model-model" v-clickoutside="_handleClose">
<div class="title-box">
<span class="name">{{$t('Current node settings')}}</span>
<span class="go-subtask">
<!-- Component can't pop up box to do component processing -->
<m-log :item="backfillItem">
<template slot="history"><a href="javascript:" @click="_seeHistory" ><em class="ansicon ans-icon-timer"></em><em>{{$t('View history')}}</em></a></template>
<template slot="log"><a href="javascript:"><em class="ansicon ans-icon-log"></em><em>{{$t('View log')}}</em></a></template>
</m-log>
<a href="javascript:" @click="_goSubProcess" v-if="_isGoSubProcess"><em class="ansicon ans-icon-node"></em><em>{{$t('Enter this child node')}}</em></a>
</span>
</div>
<div class="content-box" v-if="isContentBox">
<div class="from-model">
<!-- Node name -->
<div class="clearfix list">
<div class="text-box"><span>{{$t('Node name')}}</span></div>
<div class="cont-box">
<label class="label-box">
<x-input
type="text"
v-model="name"
:disabled="isDetails"
:placeholder="$t('Please enter name(required)')"
maxlength="100"
@on-blur="_verifName()"
autocomplete="off">
</x-input>
</label>
</div>
</div>
<!-- Running sign -->
<div class="clearfix list">
<div class="text-box"><span>{{$t('Run flag')}}</span></div>
<div class="cont-box">
<label class="label-box">
<x-radio-group v-model="runFlag" >
<x-radio :label="'NORMAL'" :disabled="isDetails">{{$t('Normal')}}</x-radio>
<x-radio :label="'FORBIDDEN'" :disabled="isDetails">{{$t('Prohibition execution')}}</x-radio>
</x-radio-group>
</label>
</div>
</div>
<!-- description -->
<div class="clearfix list">
<div class="text-box">
<span>{{$t('Description')}}</span>
</div>
<div class="cont-box">
<label class="label-box">
<x-input
resize
:autosize="{minRows:2}"
type="textarea"
:disabled="isDetails"
v-model="description"
:placeholder="$t('Please enter description')"
autocomplete="off">
</x-input>
</label>
</div>
</div>
<!-- Task priority -->
<div class="clearfix list">
<div class="text-box">
<span>{{$t('Task priority')}}</span>
</div>
<div class="cont-box">
<span class="label-box" style="width: 193px;display: inline-block;">
<m-priority v-model="taskInstancePriority"></m-priority>
</span>
<span class="text-b">{{$t('Worker group')}}</span>
<m-worker-groups v-model="workerGroup"></m-worker-groups>
</div>
</div>
<!-- Number of failed retries -->
<div class="clearfix list" v-if="taskType !== 'SUB_PROCESS'">
<div class="text-box">
<span>{{$t('Number of failed retries')}}</span>
</div>
<div class="cont-box">
<m-select-input v-model="maxRetryTimes" :list="[0,1,2,3,4]">
</m-select-input>
<span>({{$t('Times')}})</span>
<span class="text-b">{{$t('Failed retry interval')}}</span>
<m-select-input v-model="retryInterval" :list="[1,10,30,60,120]">
</m-select-input>
<span>({{$t('Minute')}})</span>
</div>
</div>
<div class="clearfix list" v-if="taskType === 'CONDITIONS'">
<div class="text-box">
<span>{{$t('State')}}</span>
</div>
<div class="cont-box">
<span class="label-box" style="width: 193px;display: inline-block;">
<x-select style="width: 157px;" v-model="successNode" :disabled="true">
<x-option v-for="item in stateList" :key="item.value" :value="item.value" :label="item.label">
</x-option>
</x-select>
</span>
<span class="text-b" style="padding-left: 38px">{{$t('Branch flow')}}</span>
<x-select style="width: 157px;" v-model="successBranch" clearable>
<x-option v-for="item in rearList" :key="item.value" :value="item.value" :label="item.label">
</x-option>
</x-select>
</div>
</div>
<div class="clearfix list" v-if="taskType === 'CONDITIONS'">
<div class="text-box">
<span>{{$t('State')}}</span>
</div>
<div class="cont-box">
<span class="label-box" style="width: 193px;display: inline-block;">
<x-select style="width: 157px;" v-model="failedNode" :disabled="true">
<x-option v-for="item in stateList" :key="item.value" :value="item.value" :label="item.label">
</x-option>
</x-select>
</span>
<span class="text-b" style="padding-left: 38px">{{$t('Branch flow')}}</span>
<x-select style="width: 157px;" v-model="failedBranch" clearable>
<x-option v-for="item in rearList" :key="item.value" :value="item.value" :label="item.label">
</x-option>
</x-select>
</div>
</div>
<!-- Task timeout alarm -->
<m-timeout-alarm
ref="timeout"
:backfill-item="backfillItem"
@on-timeout="_onTimeout">
</m-timeout-alarm>
<!-- shell node -->
<m-shell
v-if="taskType === 'SHELL'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="SHELL"
:backfill-item="backfillItem">
</m-shell>
<!-- sub_process node -->
<m-sub-process
v-if="taskType === 'SUB_PROCESS'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
@on-set-process-name="_onSetProcessName"
ref="SUB_PROCESS"
:backfill-item="backfillItem">
</m-sub-process>
<!-- procedure node -->
<m-procedure
v-if="taskType === 'PROCEDURE'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="PROCEDURE"
:backfill-item="backfillItem">
</m-procedure>
<!-- sql node -->
<m-sql
v-if="taskType === 'SQL'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="SQL"
:create-node-id="id"
:backfill-item="backfillItem">
</m-sql>
<!-- spark node -->
<m-spark
v-if="taskType === 'SPARK'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="SPARK"
:backfill-item="backfillItem">
</m-spark>
<m-flink
v-if="taskType === 'FLINK'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="FLINK"
:backfill-item="backfillItem">
</m-flink>
<!-- mr node -->
<m-mr
v-if="taskType === 'MR'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="MR"
:backfill-item="backfillItem">
</m-mr>
<!-- python node -->
<m-python
v-if="taskType === 'PYTHON'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="PYTHON"
:backfill-item="backfillItem">
</m-python>
<!-- dependent node -->
<m-dependent
v-if="taskType === 'DEPENDENT'"
@on-dependent="_onDependent"
@on-cache-dependent="_onCacheDependent"
ref="DEPENDENT"
:backfill-item="backfillItem">
</m-dependent>
<m-http
v-if="taskType === 'HTTP'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="HTTP"
:backfill-item="backfillItem">
</m-http>
<m-datax
v-if="taskType === 'DATAX'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="DATAX"
:backfill-item="backfillItem">
</m-datax>
<m-sqoop
v-if="taskType === 'SQOOP'"
@on-params="_onParams"
@on-cache-params="_onCacheParams"
ref="SQOOP"
:backfill-item="backfillItem">
</m-sqoop>
<m-conditions
v-if="taskType === 'CONDITIONS'"
ref="CONDITIONS"
@on-dependent="_onDependent"
@on-cache-dependent="_onCacheDependent"
:backfill-item="backfillItem"
:pre-node="preNode">
</m-conditions>
</div>
</div>
<div class="bottom-box">
<div class="submit" style="background: #fff;">
<x-button type="text" id="cancelBtn"> {{$t('Cancel')}} </x-button>
<x-button type="primary" shape="circle" :loading="spinnerLoading" @click="ok()" :disabled="isDetails">{{spinnerLoading ? 'Loading...' : $t('Confirm add')}} </x-button>
</div>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import { mapActions } from 'vuex'
import mLog from './log'
import mMr from './tasks/mr'
import mSql from './tasks/sql'
import i18n from '@/module/i18n'
import mShell from './tasks/shell'
import mSpark from './tasks/spark'
import mFlink from './tasks/flink'
import mPython from './tasks/python'
import JSP from './../plugIn/jsPlumbHandle'
import mProcedure from './tasks/procedure'
import mDependent from './tasks/dependent'
import mHttp from './tasks/http'
import mDatax from './tasks/datax'
import mConditions from './tasks/conditions'
import mSqoop from './tasks/sqoop'
import mSubProcess from './tasks/sub_process'
import mSelectInput from './_source/selectInput'
import mTimeoutAlarm from './_source/timeoutAlarm'
import mWorkerGroups from './_source/workerGroups'
import clickoutside from '@/module/util/clickoutside'
import disabledState from '@/module/mixin/disabledState'
import { isNameExDag, rtBantpl } from './../plugIn/util'
import mPriority from '@/module/components/priority/priority'
export default {
name: 'form-model',
data () {
return {
// loading
spinnerLoading: false,
// node name
name: '',
// description
description: '',
// Node echo data
backfillItem: {},
cacheBackfillItem: {},
// Resource(list)
resourcesList: [],
successNode: 'success',
failedNode: 'failed',
successBranch: '',
failedBranch: '',
conditionResult: {
'successNode': [],
'failedNode': []
},
// dependence
dependence: {},
// cache dependence
cacheDependence: {},
// Current node params data
params: {},
// Running sign
runFlag: 'NORMAL',
// The second echo problem caused by the node data is specifically which node hook caused the unfinished special treatment
isContentBox: false,
// Number of failed retries
maxRetryTimes: '0',
// Failure retry interval
retryInterval: '1',
// Task timeout alarm
timeout: {},
// Task priority
taskInstancePriority: 'MEDIUM',
// worker group id
workerGroup: 'default',
stateList:[
{
value: 'success',
label: `${i18n.$t('success')}`
},
{
value: 'failed',
label: `${i18n.$t('failed')}`
}
]
}
},
/**
* Click on events that are not generated internally by the component
*/
directives: { clickoutside },
mixins: [disabledState],
props: {
id: Number,
taskType: String,
self: Object,
preNode: Array,
rearList: Array,
instanceId: Number
},
methods: {
...mapActions('dag', ['getTaskInstanceList']),
/**
* depend
*/
_onDependent (o) {
this.dependence = Object.assign(this.dependence, {}, o)
},
/**
* cache dependent
*/
_onCacheDependent (o) {
this.cacheDependence = Object.assign(this.cacheDependence, {}, o)
},
/**
* Task timeout alarm
*/
_onTimeout (o) {
this.timeout = Object.assign(this.timeout, {}, o)
},
/**
* Click external to close the current component
*/
_handleClose () {
// this.close()
},
/**
* Jump to task instance
*/
_seeHistory () {
this.self.$router.push({
name: 'task-instance',
query: {
processInstanceId: this.self.$route.params.id,
taskName: this.backfillItem.name
}
})
this.$modal.destroy()
},
/**
* Enter the child node to judge the process instance or the process definition
* @param type = instance
*/
_goSubProcess () {
if (_.isEmpty(this.backfillItem)) {
this.$message.warning(`${i18n.$t('The newly created sub-Process has not yet been executed and cannot enter the sub-Process')}`)
return
}
if (this.router.history.current.name === 'projects-instance-details') {
let stateId = $(`#${this.id}`).attr('data-state-id') || null
if (!stateId) {
this.$message.warning(`${i18n.$t('The task has not been executed and cannot enter the sub-Process')}`)
return
}
this.store.dispatch('dag/getSubProcessId', { taskId: stateId }).then(res => {
this.$emit('onSubProcess', {
subProcessId: res.data.subProcessInstanceId,
fromThis: this
})
}).catch(e => {
this.$message.error(e.msg || '')
})
} else {
this.$emit('onSubProcess', {
subProcessId: this.backfillItem.params.processDefinitionId,
fromThis: this
})
}
},
/**
* return params
*/
_onParams (o) {
this.params = Object.assign({}, o)
},
_onCacheParams (o) {
this.params = Object.assign(this.params, {}, o)
this._cacheItem()
},
_cacheItem () {
this.conditionResult.successNode[0] = this.successBranch
this.conditionResult.failedNode[0] = this.failedBranch
this.$emit('cacheTaskInfo', {
item: {
type: this.taskType,
id: this.id,
name: this.name,
params: this.params,
description: this.description,
runFlag: this.runFlag,
conditionResult: this.conditionResult,
dependence: this.cacheDependence,
maxRetryTimes: this.maxRetryTimes,
retryInterval: this.retryInterval,
timeout: this.timeout,
taskInstancePriority: this.taskInstancePriority,
workerGroup: this.workerGroup,
status: this.status,
branch: this.branch
},
fromThis: this
})
},
/**
* verification name
*/
_verifName () {
if (!_.trim(this.name)) {
this.$message.warning(`${i18n.$t('Please enter name (required)')}`)
return false
}
if (this.successBranch !='' && this.successBranch !=null && this.successBranch == this.failedBranch) {
this.$message.warning(`${i18n.$t('Cannot select the same node for successful branch flow and failed branch flow')}`)
return false
}
if (this.name === this.backfillItem.name) {
return true
}
// Name repeat depends on dom backfill dependent store
if (isNameExDag(this.name, _.isEmpty(this.backfillItem) ? 'dom' : 'backfill')) {
this.$message.warning(`${i18n.$t('Name already exists')}`)
return false
}
return true
},
_verifWorkGroup() {
let item = this.store.state.security.workerGroupsListAll.find(item => {
return item.id == this.workerGroup;
});
if(item==undefined) {
this.$message.warning(`${i18n.$t('The Worker group no longer exists, please select the correct Worker group!')}`)
return false;
}
return true
},
/**
* Global verification procedure
*/
_verification () {
// Verify name
if (!this._verifName()) {
return
}
// verif workGroup
if(!this._verifWorkGroup()) {
return
}
// Verify task alarm parameters
if (!this.$refs['timeout']._verification()) {
return
}
// Verify node parameters
if (!this.$refs[this.taskType]._verification()) {
return
}
$(`#${this.id}`).find('span').text(this.name)
this.conditionResult.successNode[0] = this.successBranch
this.conditionResult.failedNode[0] = this.failedBranch
// Store the corresponding node data structure
this.$emit('addTaskInfo', {
item: {
type: this.taskType,
id: this.id,
name: this.name,
params: this.params,
description: this.description,
runFlag: this.runFlag,
conditionResult: this.conditionResult,
dependence: this.dependence,
maxRetryTimes: this.maxRetryTimes,
retryInterval: this.retryInterval,
timeout: this.timeout,
taskInstancePriority: this.taskInstancePriority,
workerGroup: this.workerGroup,
status: this.status,
branch: this.branch
},
fromThis: this
})
// set run flag
this._setRunFlag()
},
/**
* Sub-workflow selected node echo name
*/
_onSetProcessName (name) {
this.name = name
},
/**
* set run flag
*/
_setRunFlag () {
let dom = $(`#${this.id}`).find('.ban-p')
dom.html('')
if (this.runFlag === 'FORBIDDEN') {
dom.append(rtBantpl())
}
},
/**
* Submit verification
*/
ok () {
this._verification()
},
/**
* Close and destroy component and component internal events
*/
close () {
let flag = false
// Delete node without storage
if (!this.backfillItem.name) {
flag = true
}
this.isContentBox = false
// flag Whether to delete a node this.$destroy()
this.$emit('close', {
item: this.cacheBackfillItem,
flag: flag,
fromThis: this
})
}
},
watch: {
/**
* Watch the item change, cache the value it changes
**/
_item (val) {
// this._cacheItem()
}
},
created () {
// Unbind copy and paste events
JSP.removePaste()
// Backfill data
let taskList = this.store.state.dag.tasks
//fillback use cacheTasks
let cacheTasks = this.store.state.dag.cacheTasks
let o = {}
if (cacheTasks[this.id]) {
o = cacheTasks[this.id]
this.backfillItem = cacheTasks[this.id]
} else {
if (taskList.length) {
taskList.forEach(v => {
if (v.id === this.id) {
o = v
this.backfillItem = v
}
})
}
}
// Non-null objects represent backfill
if (!_.isEmpty(o)) {
this.name = o.name
this.taskInstancePriority = o.taskInstancePriority
this.runFlag = o.runFlag || 'NORMAL'
this.description = o.description
this.maxRetryTimes = o.maxRetryTimes
this.retryInterval = o.retryInterval
if(o.conditionResult) {
this.successBranch = o.conditionResult.successNode[0]
this.failedBranch = o.conditionResult.failedNode[0]
}
// If the workergroup has been deleted, set the default workergroup
var hasMatch = false;
for (let i = 0; i < this.store.state.security.workerGroupsListAll.length; i++) {
var workerGroup = this.store.state.security.workerGroupsListAll[i].id
if (o.workerGroup == workerGroup) {
hasMatch = true;
break;
}
}
if(o.workerGroup == undefined) {
this.store.dispatch('dag/getTaskInstanceList',{
pageSize: 10, pageNo: 1, processInstanceId: this.instanceId, name: o.name
}).then(res => {
this.workerGroup = res.totalList[0].workerGroup
})
} else {
this.workerGroup = o.workerGroup
}
this.params = o.params || {}
this.dependence = o.dependence || {}
this.cacheDependence = o.dependence || {}
} else {
this.workerGroup = this.store.state.security.workerGroupsListAll[0].id
}
this.cacheBackfillItem = JSON.parse(JSON.stringify(o))
this.isContentBox = true
},
mounted () {
let self = this
$("#cancelBtn").mousedown(function(event){
event.preventDefault();
self.close()
});
},
updated () {
},
beforeDestroy () {
},
destroyed () {
},
computed: {
/**
* Child workflow entry show/hide
*/
_isGoSubProcess () {
return this.taskType === 'SUB_PROCESS' && this.name
},
//Define the item model
_item () {
return {
type: this.taskType,
id: this.id,
name: this.name,
description: this.description,
runFlag: this.runFlag,
dependence: this.cacheDependence,
maxRetryTimes: this.maxRetryTimes,
retryInterval: this.retryInterval,
timeout: this.timeout,
taskInstancePriority: this.taskInstancePriority,
workerGroup: this.workerGroup,
successBranch: this.successBranch,
failedBranch: this.failedBranch
}
}
},
components: {
mMr,
mShell,
mSubProcess,
mProcedure,
mSql,
mLog,
mSpark,
mFlink,
mPython,
mDependent,
mHttp,
mDatax,
mSqoop,
mConditions,
mSelectInput,
mTimeoutAlarm,
mPriority,
mWorkerGroups
}
}
</script>
<style lang="scss" rel="stylesheet/scss">
@import "./formModel";
.ans-radio-disabled {
.ans-radio-inner:after {
background-color: #6F8391
}
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,338 | [But][ui] No warning when user do not fill the node blank | **Describe the question**
When I want to confirm and save an no-configured node, it will show nothing warning.
**What are the current deficiencies and the benefits of improvement**
- It can help user to fill the required blank.
**Which version of DolphinScheduler:**
-[dev]
**Describe alternatives you've considered**
Add the warning.
| https://github.com/apache/dolphinscheduler/issues/3338 | https://github.com/apache/dolphinscheduler/pull/3349 | 78856fb08e512b62ab7b24c809407616b2b4bbc5 | a532d8ed69e41c69c4979eae0c1b964891abc696 | "2020-07-28T14:44:03Z" | java | "2020-07-30T05:14:05Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/udp.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="udp-model">
<div class="scrollbar contpi-boxt">
<div class="title">
<span>{{$t('Set the DAG diagram name')}}</span>
</div>
<div>
<x-input
type="text"
v-model="name"
:disabled="router.history.current.name === 'projects-instance-details'"
:placeholder="$t('Please enter name(required)')">
</x-input>
</div>
<template v-if="router.history.current.name !== 'projects-instance-details'">
<div style="padding-top: 12px;">
<x-input
type="textarea"
v-model="description"
:autosize="{minRows:2}"
:placeholder="$t('Please enter description(optional)')"
autocomplete="off">
</x-input>
</div>
</template>
<div class="title" style="padding-top: 6px;">
<span class="text-b">{{$t('select tenant')}}</span>
<form-tenant v-model="tenantId"></form-tenant>
</div>
<div class="title" style="padding-top: 6px;">
<span class="text-b">{{$t('warning of timeout')}}</span>
<span style="padding-left: 6px;">
<x-switch v-model="checkedTimeout"></x-switch>
</span>
</div>
<div class="content" style="padding-bottom: 10px;" v-if="checkedTimeout">
<span>
<x-input v-model="timeout" style="width: 160px;" maxlength="9">
<span slot="append">{{$t('Minute')}}</span>
</x-input>
</span>
</div>
<div class="title" style="padding-top: 6px;">
<span>{{$t('Set global')}}</span>
</div>
<div class="content">
<div>
<m-local-params
ref="refLocalParams"
@on-local-params="_onLocalParams"
:udp-list="udpList"
:hide="false">
</m-local-params>
</div>
</div>
</div>
<div class="bottom">
<div class="submit">
<template v-if="router.history.current.name === 'projects-instance-details'">
<div class="lint-pt">
<x-checkbox v-model="syncDefine">{{$t('Whether to update the process definition')}}</x-checkbox>
</div>
</template>
<x-button type="text" @click="close()"> {{$t('Cancel')}} </x-button>
<x-button type="primary" shape="circle" @click="ok()">{{$t('Add')}}</x-button>
</div>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import i18n from '@/module/i18n'
import mLocalParams from '../formModel/tasks/_source/localParams'
import disabledState from '@/module/mixin/disabledState'
import Affirm from '../jumpAffirm'
import FormTenant from "./_source/selectTenant";
export default {
name: 'udp',
data () {
return {
// dag name
name: '',
// dag description
description: '',
// Global custom parameters
udpList: [],
// Global custom parameters
udpListCache: [],
// Whether to update the process definition
syncDefine: true,
// Timeout alarm
timeout: 0,
tenantId: -1,
// checked Timeout alarm
checkedTimeout: true
}
},
mixins: [disabledState],
props: {
},
methods: {
/**
* udp data
*/
_onLocalParams (a) {
this.udpList = a
},
_verifTimeout () {
const reg = /^[1-9]\d*$/
if (!reg.test(this.timeout)) {
this.$message.warning(`${i18n.$t('Please enter a positive integer greater than 0')}`)
return false
}
return true
},
_accuStore(){
this.store.commit('dag/setGlobalParams', _.cloneDeep(this.udpList))
this.store.commit('dag/setName', _.cloneDeep(this.name))
this.store.commit('dag/setTimeout', _.cloneDeep(this.timeout))
this.store.commit('dag/setTenantId', _.cloneDeep(this.tenantId))
this.store.commit('dag/setDesc', _.cloneDeep(this.description))
this.store.commit('dag/setSyncDefine', this.syncDefine)
},
/**
* submit
*/
ok () {
if (!this.name) {
this.$message.warning(`${i18n.$t('DAG graph name cannot be empty')}`)
return
}
let _verif = () => {
// verification udf
if (!this.$refs.refLocalParams._verifProp()) {
return
}
// verification timeout
if (this.checkedTimeout && !this._verifTimeout()) {
return
}
// Storage global globalParams
this._accuStore()
Affirm.setIsPop(false)
this.$emit('onUdp')
}
// Edit => direct storage
if (this.store.state.dag.name) {
_verif()
} else {
// New First verify that the name exists
this.store.dispatch('dag/verifDAGName', this.name).then(res => {
_verif()
}).catch(e => {
this.$message.error(e.msg || '')
})
}
},
/**
* Close the popup
*/
close () {
this.$emit('close')
}
},
watch: {
checkedTimeout (val) {
if (!val) {
this.timeout = 0
this.store.commit('dag/setTimeout', _.cloneDeep(this.timeout))
}
}
},
created () {
const dag = _.cloneDeep(this.store.state.dag)
this.udpList = dag.globalParams
this.udpListCache = dag.globalParams
this.name = dag.name
this.description = dag.description
this.syncDefine = dag.syncDefine
this.timeout = dag.timeout || 0
this.checkedTimeout = this.timeout !== 0
this.$nextTick(() => {
if (dag.tenantId === -1) {
this.tenantId = this.store.state.user.userInfo.tenantId
} else {
this.tenantId = dag.tenantId
}
})
},
mounted () {},
components: {FormTenant, mLocalParams }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.udp-model {
width: 624px;
min-height: 420px;
background: #fff;
border-radius: 3px;
padding:20px 0 ;
position: relative;
.contpi-boxt {
max-height: 600px;
overflow-y: scroll;
padding:0 20px;
}
.title {
line-height: 36px;
padding-bottom: 10px;
span {
font-size: 16px;
color: #333;
}
}
.bottom{
position: absolute;
bottom: 0;
left: 0;
width: 100%;
text-align: right;
height: 56px;
line-height: 56px;
border-top: 1px solid #DCDEDC;
background: #fff;
.submit {
padding-right: 20px;
margin-top: -4px;
}
.lint-pt {
position: absolute;
left: 20px;
top: -2px;
>label {
font-weight: normal;
}
}
}
.content {
padding-bottom: 50px;
.user-def-params-model {
.add {
a {
color: #0097e0;
}
}
}
}
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,338 | [But][ui] No warning when user do not fill the node blank | **Describe the question**
When I want to confirm and save an no-configured node, it will show nothing warning.
**What are the current deficiencies and the benefits of improvement**
- It can help user to fill the required blank.
**Which version of DolphinScheduler:**
-[dev]
**Describe alternatives you've considered**
Add the warning.
| https://github.com/apache/dolphinscheduler/issues/3338 | https://github.com/apache/dolphinscheduler/pull/3349 | 78856fb08e512b62ab7b24c809407616b2b4bbc5 | a532d8ed69e41c69c4979eae0c1b964891abc696 | "2020-07-28T14:44:03Z" | java | "2020-07-30T05:14:05Z" | dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': 'User Name',
'Please enter user name': 'Please enter user name',
Password: 'Password',
'Please enter your password': 'Please enter your password',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22',
Login: 'Login',
Home: 'Home',
'Failed to create node to save': 'Failed to create node to save',
'Global parameters': 'Global parameters',
'Local parameters': 'Local parameters',
'Copy success': 'Copy success',
'The browser does not support automatic copying': 'The browser does not support automatic copying',
'Whether to save the DAG graph': 'Whether to save the DAG graph',
'Current node settings': 'Current node settings',
'View history': 'View history',
'View log': 'View log',
'Enter this child node': 'Enter this child node',
'Node name': 'Node name',
'Please enter name(required)': 'Please enter name(required)',
'Run flag': 'Run flag',
Normal: 'Normal',
'Prohibition execution': 'Prohibition execution',
'Please enter description': 'Please enter description',
'Number of failed retries': 'Number of failed retries',
Times: 'Times',
'Failed retry interval': 'Failed retry interval',
Minute: 'Minute',
Cancel: 'Cancel',
'Confirm add': 'Confirm add',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process',
'The task has not been executed and cannot enter the sub-Process': 'The task has not been executed and cannot enter the sub-Process',
'Please enter name (required)': 'Please enter name (required)',
'Name already exists': 'Name already exists',
'Download Log': 'Download Log',
'Refresh Log': 'Refresh Log',
'Enter full screen': 'Enter full screen',
'Cancel full screen': 'Cancel full screen',
Close: 'Close',
'Update log success': 'Update log success',
'No more logs': 'No more logs',
'No log': 'No log',
'Loading Log...': 'Loading Log...',
'Set the DAG diagram name': 'Set the DAG diagram name',
'Please enter description(optional)': 'Please enter description(optional)',
'Set global': 'Set global',
'Whether to update the process definition': 'Whether to update the process definition',
Add: 'Add',
'DAG graph name cannot be empty': 'DAG graph name cannot be empty',
'Create Datasource': 'Create Datasource',
'Project Home': 'Project Home',
'Project Manage': 'Project',
'Create Project': 'Create Project',
'Cron Manage': 'Cron Manage',
'Copy Workflow': 'Copy Workflow',
'Tenant Manage': 'Tenant Manage',
'Create Tenant': 'Create Tenant',
'User Manage': 'User Manage',
'Create User': 'Create User',
'User Information': 'User Information',
'Edit Password': 'Edit Password',
success: 'success',
failed: 'failed',
delete: 'delete',
'Please choose': 'Please choose',
'Please enter a positive integer': 'Please enter a positive integer',
'Program Type': 'Program Type',
'Main class': 'Main class',
'Main jar package': 'Main jar package',
'Please enter main jar package': 'Please enter main jar package',
'Command-line parameters': 'Command-line parameters',
'Please enter Command-line parameters': 'Please enter Command-line parameters',
'Other parameters': 'Other parameters',
'Please enter other parameters': 'Please enter other parameters',
Resources: 'Resources',
'Custom Parameters': 'Custom Parameters',
'Custom template': 'Custom template',
Datasource: 'Datasource',
methods: 'methods',
'Please enter method(optional)': 'Please enter method(optional)',
Script: 'Script',
'Please enter script(required)': 'Please enter script(required)',
'Deploy Mode': 'Deploy Mode',
'Flink Version':'Flink Version',
'Driver core number': 'Driver core number',
'Please enter driver core number': 'Please enter driver core number',
'Driver memory use': 'Driver memory use',
'Please enter driver memory use': 'Please enter driver memory use',
'Number of Executors': 'Number of Executors',
'Please enter the number of Executor': 'Please enter the number of Executor',
'Executor memory': 'Executor memory',
'Please enter the Executor memory': 'Please enter the Executor memory',
'Executor core number': 'Executor core number',
'Please enter Executor core number': 'Please enter Executor core number',
'The number of Executors should be a positive integer': 'The number of Executors should be a positive integer',
'Memory should be a positive integer': 'Memory should be a positive integer',
'Please enter ExecutorPlease enter Executor core number': 'Please enter ExecutorPlease enter Executor core number',
'Core number should be positive integer': 'Core number should be positive integer',
'Please enter jobManager memory': 'Please enter jobManager memory',
'Please enter the taskManager memory': 'Please enter the taskManager memory',
'Please enter solt number': 'Please enter solt number',
'Please enter taskManager number': 'Please enter taskManager number',
'SQL Type': 'SQL Type',
Title: 'Title',
'Please enter the title of email': 'Please enter the title of email',
Table: 'Table',
TableMode: 'Table',
Attachment: 'Attachment',
'SQL Parameter': 'SQL Parameter',
'SQL Statement': 'SQL Statement',
'UDF Function': 'UDF Function',
'Please enter a SQL Statement(required)': 'Please enter a SQL Statement(required)',
'Please enter a JSON Statement(required)': 'Please enter a JSON Statement(required)',
'One form or attachment must be selected': 'One form or attachment must be selected',
'Recipient required': 'Recipient required',
'Mail subject required': 'Mail subject required',
'Child Node': 'Child Node',
'Please select a sub-Process': 'Please select a sub-Process',
Edit: 'Edit',
'Datasource Name': 'Datasource Name',
'Please enter datasource name': 'Please enter datasource name',
IP: 'IP',
'Please enter IP': 'Please enter IP',
Port: 'Port',
'Please enter port': 'Please enter port',
'Database Name': 'Database Name',
'Please enter database name': 'Please enter database name',
'Oracle Connect Type': 'ServiceName or SID',
'Oracle Service Name': 'ServiceName',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc connect parameters',
'Test Connect': 'Test Connect',
'Please enter resource name': 'Please enter resource name',
'Please enter resource folder name': 'Please enter resource folder name',
'Please enter a non-query SQL statement': 'Please enter a non-query SQL statement',
'Please enter IP/hostname': 'Please enter IP/hostname',
'jdbc connection parameters is not a correct JSON format': 'jdbc connection parameters is not a correct JSON format',
'#': '#',
'Datasource Type': 'Datasource Type',
'Datasource Parameter': 'Datasource Parameter',
'Create Time': 'Create Time',
'Update Time': 'Update Time',
Operation: 'Operation',
'Click to view': 'Click to view',
'Delete?': 'Delete?',
Confirm: 'Confirm',
'Task status statistics': 'Task Status Statistics',
Number: 'Number',
State: 'State',
'Process Status Statistics': 'Process Status Statistics',
'Process Definition Statistics': 'Process Definition Statistics',
'Project Name': 'Project Name',
'Please enter name': 'Please enter name',
'Owned Users': 'Owned Users',
'Process Pid': 'Process Pid',
'Zk registration directory': 'Zk registration directory',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': 'Last heartbeat time',
'Edit Tenant': 'Edit Tenant',
'Tenant Code': 'Tenant Code',
'Tenant Name': 'Tenant Name',
Queue: 'Queue',
'Please select a queue': 'default is tenant association queue',
'Please enter the tenant code in English': 'Please enter the tenant code in English',
'Please enter tenant code in English': 'Please enter tenant code in English',
'Edit User': 'Edit User',
Tenant: 'Tenant',
Email: 'Email',
Phone: 'Phone',
'User Type':'User Type',
'Please enter phone number': 'Please enter phone number',
'Please enter main class': 'Please enter main class',
'Please enter email': 'Please enter email',
'Please enter the correct email format': 'Please enter the correct email format',
'Please enter the correct mobile phone format': 'Please enter the correct mobile phone format',
Project: 'Project',
Authorize: 'Authorize',
'File resources': 'File resources',
'UDF resources': 'UDF resources',
'Please select UDF resources directory': 'Please select UDF resources directory',
'UDF resources directory': 'UDF resources directory',
'Upload File Size': 'Upload File size cannot exceed 1g',
'Edit alarm group': 'Edit alarm group',
'Create alarm group': 'Create alarm group',
'Group Name': 'Group Name',
'Please enter group name': 'Please enter group name',
'Group Type': 'Group Type',
Remarks: 'Remarks',
SMS: 'SMS',
'Managing Users': 'Managing Users',
Permission: 'Permission',
Administrator: 'Administrator',
'Confirm Password': 'Confirm Password',
'Please enter confirm password': 'Please enter confirm password',
'Password cannot be in Chinese': 'Password cannot be in Chinese',
'Please enter a password (6-22) character password': 'Please enter a password (6-22) character password',
'Confirmation password cannot be in Chinese': 'Confirmation password cannot be in Chinese',
'Please enter a confirmation password (6-22) character password': 'Please enter a confirmation password (6-22) character password',
'The password is inconsistent with the confirmation password': 'The password is inconsistent with the confirmation password',
'Please select the datasource': 'Please select the datasource',
'Please select resources': 'Please select resources',
Query: 'Query',
'Non Query': 'Non Query',
'prop(required)': 'prop(required)',
'value(optional)': 'value(optional)',
'value(required)': 'value(required)',
'prop is empty': 'prop is empty',
'value is empty': 'value is empty',
'prop is repeat': 'prop is repeat',
'Start Time': 'Start Time',
'End Time': 'End Time',
crontab: 'crontab',
'Failure Strategy': 'Failure Strategy',
online: 'online',
offline: 'offline',
'Task Status': 'Task Status',
'Process Instance': 'Process Instance',
'Task Instance': 'Task Instance',
'Select date range': 'Select date range',
Date: 'Date',
waiting: 'waiting',
execution: 'execution',
finish: 'finish',
'Create File': 'Create File',
'Create folder': 'Create folder',
'File Name': 'File Name',
'Folder Name': 'Folder Name',
'File Format': 'File Format',
'Folder Format': 'Folder Format',
'File Content': 'File Content',
Create: 'Create',
'Please enter the resource content': 'Please enter the resource content',
'Resource content cannot exceed 3000 lines': 'Resource content cannot exceed 3000 lines',
'File Details': 'File Details',
'Download Details': 'Download Details',
Return: 'Return',
Save: 'Save',
'File Manage': 'File Manage',
'Upload Files': 'Upload Files',
'Create UDF Function': 'Create UDF Function',
'Upload UDF Resources': 'Upload UDF Resources',
'Service-Master': 'Service-Master',
'Service-Worker': 'Service-Worker',
'Process Name': 'Process Name',
Executor: 'Executor',
'Run Type': 'Run Type',
'Scheduling Time': 'Scheduling Time',
'Run Times': 'Run Times',
host: 'host',
'fault-tolerant sign': 'fault-tolerant sign',
Rerun: 'Rerun',
'Recovery Failed': 'Recovery Failed',
Stop: 'Stop',
Pause: 'Pause',
'Recovery Suspend': 'Recovery Suspend',
Gantt: 'Gantt',
Name: 'Name',
'Node Type': 'Node Type',
'Submit Time': 'Submit Time',
Duration: 'Duration',
'Retry Count': 'Retry Count',
'Task Name': 'Task Name',
'Task Date': 'Task Date',
'Source Table': 'Source Table',
'Record Number': 'Record Number',
'Target Table': 'Target Table',
'Online viewing type is not supported': 'Online viewing type is not supported',
Size: 'Size',
Rename: 'Rename',
Download: 'Download',
Export: 'Export',
Submit: 'Submit',
'Edit UDF Function': 'Edit UDF Function',
type: 'type',
'UDF Function Name': 'UDF Function Name',
FILE: 'FILE',
UDF: 'UDF',
'File Subdirectory': 'File Subdirectory',
'Please enter a function name': 'Please enter a function name',
'Package Name': 'Package Name',
'Please enter a Package name': 'Please enter a Package name',
Parameter: 'Parameter',
'Please enter a parameter': 'Please enter a parameter',
'UDF Resources': 'UDF Resources',
'Upload Resources': 'Upload Resources',
Instructions: 'Instructions',
'Please enter a instructions': 'Please enter a instructions',
'Please enter a UDF function name': 'Please enter a UDF function name',
'Select UDF Resources': 'Select UDF Resources',
'Class Name': 'Class Name',
'Jar Package': 'Jar Package',
'Library Name': 'Library Name',
'UDF Resource Name': 'UDF Resource Name',
'File Size': 'File Size',
Description: 'Description',
'Drag Nodes and Selected Items': 'Drag Nodes and Selected Items',
'Select Line Connection': 'Select Line Connection',
'Delete selected lines or nodes': 'Delete selected lines or nodes',
'Full Screen': 'Full Screen',
Unpublished: 'Unpublished',
'Start Process': 'Start Process',
'Execute from the current node': 'Execute from the current node',
'Recover tolerance fault process': 'Recover tolerance fault process',
'Resume the suspension process': 'Resume the suspension process',
'Execute from the failed nodes': 'Execute from the failed nodes',
'Complement Data': 'Complement Data',
slot: 'slot',
taskManager: 'taskManager',
jobManagerMemory: 'jobManagerMemory',
taskManagerMemory: 'taskManagerMemory',
'Scheduling execution': 'Scheduling execution',
'Recovery waiting thread': 'Recovery waiting thread',
'Submitted successfully': 'Submitted successfully',
Executing: 'Executing',
'Ready to pause': 'Ready to pause',
'Ready to stop': 'Ready to stop',
'Need fault tolerance': 'Need fault tolerance',
kill: 'kill',
'Waiting for thread': 'Waiting for thread',
'Waiting for dependence': 'Waiting for dependence',
Start: 'Start',
Copy: 'Copy',
'Copy name': 'Copy name',
Delete: 'Delete',
'Please enter keyword': 'Please enter keyword',
'File Upload': 'File Upload',
'Drag the file into the current upload window': 'Drag the file into the current upload window',
'Drag area upload': 'Drag area upload',
Upload: 'Upload',
'Please enter file name': 'Please enter file name',
'Please select the file to upload': 'Please select the file to upload',
'Resources manage': 'Resources',
Security: 'Security',
Logout: 'Logout',
'No data': 'No data',
'Uploading...': 'Uploading...',
'Loading...': 'Loading...',
List: 'List',
'Unable to download without proper url': 'Unable to download without proper url',
Process: 'Process',
'Process definition': 'Process definition',
'Task record': 'Task record',
'Warning group manage': 'Warning group manage',
'Servers manage': 'Servers manage',
'UDF manage': 'UDF manage',
'Resource manage': 'Resource manage',
'Function manage': 'Function manage',
'Edit password': 'Edit password',
'Ordinary users': 'Ordinary users',
'Create process': 'Create process',
'Import process': 'Import process',
'Timing state': 'Timing state',
Timing: 'Timing',
TreeView: 'TreeView',
'Mailbox already exists! Recipients and copyers cannot repeat': 'Mailbox already exists! Recipients and copyers cannot repeat',
'Mailbox input is illegal': 'Mailbox input is illegal',
'Please set the parameters before starting': 'Please set the parameters before starting',
Continue: 'Continue',
End: 'End',
'Node execution': 'Node execution',
'Backward execution': 'Backward execution',
'Forward execution': 'Forward execution',
'Execute only the current node': 'Execute only the current node',
'Notification strategy': 'Notification strategy',
'Notification group': 'Notification group',
'Please select a notification group': 'Please select a notification group',
Recipient: 'Recipient',
Cc: 'Cc',
'Whether it is a complement process?': 'Whether it is a complement process?',
'Schedule date': 'Schedule date',
'Mode of execution': 'Mode of execution',
'Serial execution': 'Serial execution',
'Parallel execution': 'Parallel execution',
'Set parameters before timing': 'Set parameters before timing',
'Start and stop time': 'Start and stop time',
'Please select time': 'Please select time',
'Please enter crontab': 'Please enter crontab',
none_1: 'none',
success_1: 'success',
failure_1: 'failure',
All_1: 'All',
Toolbar: 'Toolbar',
'View variables': 'View variables',
'Format DAG': 'Format DAG',
'Refresh DAG status': 'Refresh DAG status',
Return_1: 'Return',
'Please enter format': 'Please enter format',
'connection parameter': 'connection parameter',
'Process definition details': 'Process definition details',
'Create process definition': 'Create process definition',
'Scheduled task list': 'Scheduled task list',
'Process instance details': 'Process instance details',
'Create Resource': 'Create Resource',
'User Center': 'User Center',
'Please enter method': 'Please enter method',
none: 'none',
name: 'name',
'Process priority': 'Process priority',
'Task priority': 'Task priority',
'Task timeout alarm': 'Task timeout alarm',
'Timeout strategy': 'Timeout strategy',
'Timeout alarm': 'Timeout alarm',
'Timeout failure': 'Timeout failure',
'Timeout period': 'Timeout period',
'Timeout strategy must be selected': 'Timeout strategy must be selected',
'Timeout must be a positive integer': 'Timeout must be a positive integer',
'Add dependency': 'Add dependency',
and: 'and',
or: 'or',
month: 'month',
week: 'week',
day: 'day',
hour: 'hour',
Running: 'Running',
'Waiting for dependency to complete': 'Waiting for dependency to complete',
Selected: 'Selected',
CurrentHour: 'CurrentHour',
Last1Hour: 'Last1Hour',
Last2Hours: 'Last2Hours',
Last3Hours: 'Last3Hours',
Last24Hours: 'Last24Hours',
today: 'today',
Last1Days: 'Last1Days',
Last2Days: 'Last2Days',
Last3Days: 'Last3Days',
Last7Days: 'Last7Days',
ThisWeek: 'ThisWeek',
LastWeek: 'LastWeek',
LastMonday: 'LastMonday',
LastTuesday: 'LastTuesday',
LastWednesday: 'LastWednesday',
LastThursday: 'LastThursday',
LastFriday: 'LastFriday',
LastSaturday: 'LastSaturday',
LastSunday: 'LastSunday',
ThisMonth: 'ThisMonth',
LastMonth: 'LastMonth',
LastMonthBegin: 'LastMonthBegin',
LastMonthEnd: 'LastMonthEnd',
'Refresh status succeeded': 'Refresh status succeeded',
'Queue manage': 'Queue manage',
'Create queue': 'Create queue',
'Edit queue': 'Edit queue',
'Datasource manage': 'Datasource',
'History task record': 'History task record',
'Please go online': 'Please go online',
'Queue value': 'Queue value',
'Please enter queue value': 'Please enter queue value',
'Worker group manage': 'Worker group manage',
'Create worker group': 'Create worker group',
'Edit worker group': 'Edit worker group',
'Token manage': 'Token manage',
'Create token': 'Create token',
'Edit token': 'Edit token',
'Please enter the IP address separated by commas': 'Please enter the IP address separated by commas',
'Note: Multiple IP addresses have been comma separated': 'Note: Multiple IP addresses have been comma separated',
'Failure time': 'Failure time',
User: 'User',
'Please enter token': 'Please enter token',
'Generate token': 'Generate token',
Monitor: 'Monitor',
Group: 'Group',
'Queue statistics': 'Queue statistics',
'Command status statistics': 'Command status statistics',
'Task kill': 'Task Kill',
'Task queue': 'Task queue',
'Error command count': 'Error command count',
'Normal command count': 'Normal command count',
Manage: ' Manage',
'Number of connections': 'Number of connections',
Sent: 'Sent',
Received: 'Received',
'Min latency': 'Min latency',
'Avg latency': 'Avg latency',
'Max latency': 'Max latency',
'Node count': 'Node count',
'Query time': 'Query time',
'Node self-test status': 'Node self-test status',
'Health status': 'Health status',
'Max connections': 'Max connections',
'Threads connections': 'Threads connections',
'Max used connections': 'Max used connections',
'Threads running connections': 'Threads running connections',
'Worker group': 'Worker group',
'Please enter a positive integer greater than 0': 'Please enter a positive integer greater than 0',
'Pre Statement': 'Pre Statement',
'Post Statement': 'Post Statement',
'Statement cannot be empty': 'Statement cannot be empty',
'Process Define Count': 'Work flow Define Count',
'Process Instance Running Count': 'Process Instance Running Count',
'command number of waiting for running': 'command number of waiting for running',
'failure command number': 'failure command number',
'tasks number of waiting running': 'tasks number of waiting running',
'task number of ready to kill': 'task number of ready to kill',
'Statistics manage': 'Statistics Manage',
statistics: 'Statistics',
'select tenant': 'select tenant',
'Please enter Principal': 'Please enter Principal',
'The start time must not be the same as the end': 'The start time must not be the same as the end',
'Startup parameter': 'Startup parameter',
'Startup type': 'Startup type',
'warning of timeout': 'warning of timeout',
'Next five execution times': 'Next five execution times',
'Execute time': 'Execute time',
'Complement range': 'Complement range',
'Http Url': 'Http Url',
'Http Method': 'Http Method',
'Http Parameters': 'Http Parameters',
'Http Parameters Key': 'Http Parameters Key',
'Http Parameters Position': 'Http Parameters Position',
'Http Parameters Value': 'Http Parameters Value',
'Http Check Condition': 'Http Check Condition',
'Http Condition': 'Http Condition',
'Please Enter Http Url': 'Please Enter Http Url(required)',
'Please Enter Http Condition': 'Please Enter Http Condition',
'There is no data for this period of time': 'There is no data for this period of time',
'IP address cannot be empty': 'IP address cannot be empty',
'Please enter the correct IP': 'Please enter the correct IP',
'Please generate token': 'Please generate token',
'Spark Version': 'Spark Version',
TargetDataBase: 'target database',
TargetTable: 'target table',
'Please enter the table of target': 'Please enter the table of target',
'Please enter a Target Table(required)': 'Please enter a Target Table(required)',
SpeedByte: 'speed(byte count)',
SpeedRecord: 'speed(record count)',
'0 means unlimited by byte': '0 means unlimited',
'0 means unlimited by count': '0 means unlimited',
'Modify User': 'Modify User',
'Whether directory': 'Whether directory',
Yes: 'Yes',
No: 'No',
'Hadoop Custom Params': 'Hadoop Params',
'Sqoop Advanced Parameters': 'Sqoop Params',
'Sqoop Job Name': 'Job Name',
'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)',
'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)',
'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)',
'Please enter Target Dir(required)': 'Please enter Target Dir(required)',
'Please enter Export Dir(required)': 'Please enter Export Dir(required)',
'Please enter Hive Database(required)': 'Please enter Hive Databasec(required)',
'Please enter Hive Table(required)': 'Please enter Hive Table(required)',
'Please enter Hive Partition Keys': 'Please enter Hive Partition Key',
'Please enter Hive Partition Values': 'Please enter Partition Value',
'Please enter Replace Delimiter': 'Please enter Replace Delimiter',
'Please enter Fields Terminated': 'Please enter Fields Terminated',
'Please enter Lines Terminated': 'Please enter Lines Terminated',
'Please enter Concurrency': 'Please enter Concurrency',
'Please enter Update Key': 'Please enter Update Key',
'Please enter Job Name(required)': 'Please enter Job Name(required)',
'Please enter Custom Shell(required)': 'Please enter Custom Shell(required)',
Direct: 'Direct',
Type: 'Type',
ModelType: 'ModelType',
ColumnType: 'ColumnType',
Database: 'Database',
Column: 'Column',
'Map Column Hive': 'Map Column Hive',
'Map Column Java': 'Map Column Java',
'Export Dir': 'Export Dir',
'Hive partition Keys': 'Hive partition Keys',
'Hive partition Values': 'Hive partition Values',
FieldsTerminated: 'FieldsTerminated',
LinesTerminated: 'LinesTerminated',
IsUpdate: 'IsUpdate',
UpdateKey: 'UpdateKey',
UpdateMode: 'UpdateMode',
'Target Dir': 'Target Dir',
DeleteTargetDir: 'DeleteTargetDir',
FileType: 'FileType',
CompressionCodec: 'CompressionCodec',
CreateHiveTable: 'CreateHiveTable',
DropDelimiter: 'DropDelimiter',
OverWriteSrc: 'OverWriteSrc',
ReplaceDelimiter: 'ReplaceDelimiter',
Concurrency: 'Concurrency',
Form: 'Form',
OnlyUpdate: 'OnlyUpdate',
AllowInsert: 'AllowInsert',
'Data Source': 'Data Source',
'Data Target': 'Data Target',
'All Columns': 'All Columns',
'Some Columns': 'Some Columns',
'Branch flow': 'Branch flow',
'Custom Job': 'Custom Job',
'Custom Script': 'Custom Script',
'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow',
'Successful branch flow and failed branch flow are required': 'conditions node Successful and failed branch flow are required',
'Unauthorized or deleted resources': 'Unauthorized or deleted resources',
'Please delete all non-existent resources': 'Please delete all non-existent resources',
'Enable': 'Enable',
'Timeout Settings': 'Timeout Settings',
'Connect Timeout':'Connect Timeout',
'Socket Timeout':'Socket Timeout',
'Connect timeout be a positive integer': 'Connect timeout be a positive integer',
'Socket Timeout be a positive integer': 'Socket Timeout be a positive integer',
'ms':'ms',
'Disable': 'Disable',
'The Worker group no longer exists, please select the correct Worker group!': 'The Worker group no longer exists, please select the correct Worker group!',
'Please confirm whether the workflow has been saved before downloading': 'Please confirm whether the workflow has been saved before downloading',
'User name length is between 3 and 39': 'User name length is between 3 and 39',
zkDirectory: 'zkDirectory',
'Directory detail': 'Directory detail',
'Connection name': 'Connection name',
'Current connection settings': 'Current connection settings',
'Please save the DAG before formatting': 'Please save the DAG before formatting'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,338 | [But][ui] No warning when user do not fill the node blank | **Describe the question**
When I want to confirm and save an no-configured node, it will show nothing warning.
**What are the current deficiencies and the benefits of improvement**
- It can help user to fill the required blank.
**Which version of DolphinScheduler:**
-[dev]
**Describe alternatives you've considered**
Add the warning.
| https://github.com/apache/dolphinscheduler/issues/3338 | https://github.com/apache/dolphinscheduler/pull/3349 | 78856fb08e512b62ab7b24c809407616b2b4bbc5 | a532d8ed69e41c69c4979eae0c1b964891abc696 | "2020-07-28T14:44:03Z" | java | "2020-07-30T05:14:05Z" | dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': '用户名',
'Please enter user name': '请输入用户名',
Password: '密码',
'Please enter your password': '请输入密码',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': '密码至少包含数字,字母和字符的两种组合,长度在6-22之间',
Login: '登录',
Home: '首页',
'Failed to create node to save': '未创建节点保存失败',
'Global parameters': '全局参数',
'Local parameters': '局部参数',
'Copy success': '复制成功',
'The browser does not support automatic copying': '该浏览器不支持自动复制',
'Whether to save the DAG graph': '是否保存DAG图',
'Current node settings': '当前节点设置',
'View history': '查看历史',
'View log': '查看日志',
'Enter this child node': '进入该子节点',
'Node name': '节点名称',
'Please enter name(required)': '请输入名称(必填)',
'Run flag': '运行标志',
Normal: '正常',
'Prohibition execution': '禁止执行',
'Please enter description': '请输入描述',
'Number of failed retries': '失败重试次数',
Times: '次',
'Failed retry interval': '失败重试间隔',
Minute: '分',
Cancel: '取消',
'Confirm add': '确认添加',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': '新创建子工作流还未执行,不能进入子工作流',
'The task has not been executed and cannot enter the sub-Process': '该任务还未执行,不能进入子工作流',
'Name already exists': '名称已存在请重新输入',
'Download Log': '下载日志',
'Refresh Log': '刷新日志',
'Enter full screen': '进入全屏',
'Cancel full screen': '取消全屏',
Close: '关闭',
'Update log success': '更新日志成功',
'No more logs': '暂无更多日志',
'No log': '暂无日志',
'Loading Log...': '正在努力请求日志中...',
'Set the DAG diagram name': '设置DAG图名称',
'Please enter description(optional)': '请输入描述(选填)',
'Set global': '设置全局',
'Whether to update the process definition': '是否更新流程定义',
Add: '添加',
'DAG graph name cannot be empty': 'DAG图名称不能为空',
'Create Datasource': '创建数据源',
'Project Home': '项目首页',
'Project Manage': '项目管理',
'Create Project': '创建项目',
'Cron Manage': '定时管理',
'Copy Workflow': '复制工作流',
'Tenant Manage': '租户管理',
'Create Tenant': '创建租户',
'User Manage': '用户管理',
'Create User': '创建用户',
'User Information': '用户信息',
'Edit Password': '密码修改',
success: '成功',
failed: '失败',
delete: '删除',
'Please choose': '请选择',
'Please enter a positive integer': '请输入正整数',
'Program Type': '程序类型',
'Main class': '主函数的class',
'Main jar package': '主jar包',
'Please enter main jar package': '请选择主jar包',
'Command-line parameters': '命令行参数',
'Please enter Command-line parameters': '请输入命令行参数',
'Other parameters': '其他参数',
'Please enter other parameters': '请输入其他参数',
Resources: '资源',
'Custom Parameters': '自定义参数',
'Custom template': '自定义模版',
'Please enter main class': '请填写主函数的class',
Datasource: '数据源',
methods: '方法',
'Please enter method(optional)': '请输入方法(选填)',
Script: '脚本',
'Please enter script(required)': '请输入脚本(必填)',
'Deploy Mode': '部署方式',
'Flink Version': 'Flink版本',
'Driver core number': 'Driver内核数',
'Please enter driver core number': '请输入Driver内核数',
'Driver memory use': 'Driver内存数',
'Please enter driver memory use': '请输入Driver内存数',
'Number of Executors': 'Executor数量',
'Please enter the number of Executor': '请输入Executor数量',
'Executor memory': 'Executor内存数',
'Please enter the Executor memory': '请输入Executor内存数',
'Executor core number': 'Executor内核数',
'Please enter Executor core number': '请输入Executor内核数',
'The number of Executors should be a positive integer': 'Executor数量为正整数',
'Memory should be a positive integer': '内存数为数字',
'Please enter ExecutorPlease enter Executor core number': '请填写Executor内核数',
'Core number should be positive integer': '内核数为正整数',
'Please enter jobManager memory': '请输入JobManager内存数',
'Please enter the taskManager memory': '请输入TaskManager内存数',
'Please enter solt number': '请输入solt数量',
'Please enter taskManager number': '请输入taskManager数量',
'SQL Type': 'sql类型',
Title: '主题',
'Please enter the title of email': '请输入邮件主题',
Table: '表名',
TableMode: '表格',
Attachment: '附件',
'SQL Parameter': 'sql参数',
'SQL Statement': 'sql语句',
'UDF Function': 'UDF函数',
FILE: '文件',
UDF: 'UDF',
'File Subdirectory': '文件子目录',
'Please enter a SQL Statement(required)': '请输入sql语句(必填)',
'Please enter a JSON Statement(required)': '请输入json语句(必填)',
'One form or attachment must be selected': '表格、附件必须勾选一个',
'Recipient required': '收件人邮箱必填',
'Mail subject required': '邮件主题必填',
'Child Node': '子节点',
'Please select a sub-Process': '请选择子工作流',
Edit: '编辑',
'Datasource Name': '数据源名称',
'Please enter datasource name': '请输入数据源名称',
IP: 'IP主机名',
'Please enter IP': '请输入IP主机名',
Port: '端口',
'Please enter port': '请输入端口',
'Database Name': '数据库名',
'Please enter database name': '请输入数据库名',
'Oracle Connect Type': '服务名或SID',
'Oracle Service Name': '服务名',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc连接参数',
'Test Connect': '测试连接',
'Please enter resource name': '请输入数据源名称',
'Please enter resource folder name': '请输入资源文件夹名称',
'Please enter a non-query SQL statement': '请输入非查询sql语句',
'Please enter IP/hostname': '请输入IP/主机名',
'jdbc connection parameters is not a correct JSON format': 'jdbc连接参数不是一个正确的JSON格式',
'#': '编号',
'Datasource Type': '数据源类型',
'Datasource Parameter': '数据源参数',
'Create Time': '创建时间',
'Update Time': '更新时间',
Operation: '操作',
'Click to view': '点击查看',
'Delete?': '确定删除吗?',
Confirm: '确定',
'Task status statistics': '任务状态统计',
Number: '数量',
State: '状态',
'Process Status Statistics': '流程状态统计',
'Process Definition Statistics': '流程定义统计',
'Project Name': '项目名称',
'Please enter name': '请输入名称',
'Owned Users': '所属用户',
'Process Pid': '进程Pid',
'Zk registration directory': 'zk注册目录',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': '最后心跳时间',
'Edit Tenant': '编辑租户',
'Tenant Code': '租户编码',
'Tenant Name': '租户名称',
Queue: '队列',
'Please enter the tenant code in English': '请输入租户编码只允许英文',
'Please enter tenant code in English': '请输入英文租户编码',
'Edit User': '编辑用户',
Tenant: '租户',
Email: '邮件',
Phone: '手机',
'User Type':'用户类型',
'Please enter phone number': '请输入手机',
'Please enter email': '请输入邮箱',
'Please enter the correct email format': '请输入正确的邮箱格式',
'Please enter the correct mobile phone format': '请输入正确的手机格式',
Project: '项目',
Authorize: '授权',
'File resources': '文件资源',
'UDF resources': 'UDF资源',
'UDF resources directory': 'UDF资源目录',
'Please select UDF resources directory': '请选择UDF资源目录',
'Edit alarm group': '编辑告警组',
'Create alarm group': '创建告警组',
'Group Name': '组名称',
'Please enter group name': '请输入组名称',
'Group Type': '组类型',
Remarks: '备注',
SMS: '短信',
'Managing Users': '管理用户',
Permission: '权限',
Administrator: '管理员',
'Confirm Password': '确认密码',
'Please enter confirm password': '请输入确认密码',
'Password cannot be in Chinese': '密码不能为中文',
'Please enter a password (6-22) character password': '请输入密码(6-22)字符密码',
'Confirmation password cannot be in Chinese': '确认密码不能为中文',
'Please enter a confirmation password (6-22) character password': '请输入确认密码(6-22)字符密码',
'The password is inconsistent with the confirmation password': '密码与确认密码不一致,请重新确认',
'Please select the datasource': '请选择数据源',
'Please select resources': '请选择资源',
Query: '查询',
'Non Query': '非查询',
'prop(required)': 'prop(必填)',
'value(optional)': 'value(选填)',
'value(required)': 'value(必填)',
'prop is empty': 'prop不能为空',
'value is empty': 'value不能为空',
'prop is repeat': 'prop中有重复',
'Start Time': '开始时间',
'End Time': '结束时间',
crontab: 'crontab',
'Failure Strategy': '失败策略',
online: '上线',
offline: '下线',
'Task Status': '任务状态',
'Process Instance': '工作流实例',
'Task Instance': '任务实例',
'Select date range': '选择日期区间',
Date: '日期',
waiting: '等待',
execution: '执行中',
finish: '完成',
'Create File': '创建文件',
'Create folder': '创建文件夹',
'File Name': '文件名称',
'Folder Name': '文件夹名称',
'File Format': '文件格式',
'Folder Format': '文件夹格式',
'File Content': '文件内容',
'Upload File Size': '文件大小不能超过1G',
Create: '创建',
'Please enter the resource content': '请输入资源内容',
'Resource content cannot exceed 3000 lines': '资源内容不能超过3000行',
'File Details': '文件详情',
'Download Details': '下载详情',
Return: '返回',
Save: '保存',
'File Manage': '文件管理',
'Upload Files': '上传文件',
'Create UDF Function': '创建UDF函数',
'Upload UDF Resources': '上传UDF资源',
'Service-Master': '服务管理-Master',
'Service-Worker': '服务管理-Worker',
'Process Name': '工作流名称',
Executor: '执行用户',
'Run Type': '运行类型',
'Scheduling Time': '调度时间',
'Run Times': '运行次数',
host: 'host',
'fault-tolerant sign': '容错标识',
Rerun: '重跑',
'Recovery Failed': '恢复失败',
Stop: '停止',
Pause: '暂停',
'Recovery Suspend': '恢复运行',
Gantt: '甘特图',
Name: '名称',
'Node Type': '节点类型',
'Submit Time': '提交时间',
Duration: '运行时长',
'Retry Count': '重试次数',
'Task Name': '任务名称',
'Task Date': '任务日期',
'Source Table': '源表',
'Record Number': '记录数',
'Target Table': '目标表',
'Online viewing type is not supported': '不支持在线查看类型',
Size: '大小',
Rename: '重命名',
Download: '下载',
Export: '导出',
Submit: '提交',
'Edit UDF Function': '编辑UDF函数',
type: '类型',
'UDF Function Name': 'UDF函数名称',
'Please enter a function name': '请输入函数名',
'Package Name': '包名类名',
'Please enter a Package name': '请输入包名类名',
Parameter: '参数',
'Please enter a parameter': '请输入参数',
'UDF Resources': 'UDF资源',
'Upload Resources': '上传资源',
Instructions: '使用说明',
'Please enter a instructions': '请输入使用说明',
'Please enter a UDF function name': '请输入UDF函数名称',
'Select UDF Resources': '请选择UDF资源',
'Class Name': '类名',
'Jar Package': 'jar包',
'Library Name': '库名',
'UDF Resource Name': 'UDF资源名称',
'File Size': '文件大小',
Description: '描述',
'Drag Nodes and Selected Items': '拖动节点和选中项',
'Select Line Connection': '选择线条连接',
'Delete selected lines or nodes': '删除选中的线或节点',
'Full Screen': '全屏',
Unpublished: '未发布',
'Start Process': '启动工作流',
'Execute from the current node': '从当前节点开始执行',
'Recover tolerance fault process': '恢复被容错的工作流',
'Resume the suspension process': '恢复运行流程',
'Execute from the failed nodes': '从失败节点开始执行',
'Complement Data': '补数',
'Scheduling execution': '调度执行',
'Recovery waiting thread': '恢复等待线程',
'Submitted successfully': '提交成功',
Executing: '正在执行',
'Ready to pause': '准备暂停',
'Ready to stop': '准备停止',
'Need fault tolerance': '需要容错',
kill: 'kill',
'Waiting for thread': '等待线程',
'Waiting for dependence': '等待依赖',
Start: '运行',
Copy: '复制节点',
'Copy name': '复制名称',
Delete: '删除',
'Please enter keyword': '请输入关键词',
'File Upload': '文件上传',
'Drag the file into the current upload window': '请将文件拖拽到当前上传窗口内!',
'Drag area upload': '拖动区域上传',
Upload: '上传',
'Please enter file name': '请输入文件名',
'Please select the file to upload': '请选择要上传的文件',
'Resources manage': '资源中心',
Security: '安全中心',
Logout: '退出',
'No data': '查询无数据',
'Uploading...': '文件上传中',
'Loading...': '正在努力加载中...',
List: '列表',
'Unable to download without proper url': '无下载url无法下载',
Process: '工作流',
'Process definition': '工作流定义',
'Task record': '任务记录',
'Warning group manage': '告警组管理',
'Servers manage': '服务管理',
'UDF manage': 'UDF管理',
'Resource manage': '资源管理',
'Function manage': '函数管理',
'Edit password': '修改密码',
'Ordinary users': '普通用户',
'Create process': '创建工作流',
'Import process': '导入工作流',
'Timing state': '定时状态',
Timing: '定时',
TreeView: '树形图',
'Mailbox already exists! Recipients and copyers cannot repeat': '邮箱已存在!收件人和抄送人不能重复',
'Mailbox input is illegal': '邮箱输入不合法',
'Please set the parameters before starting': '启动前请先设置参数',
Continue: '继续',
End: '结束',
'Node execution': '节点执行',
'Backward execution': '向后执行',
'Forward execution': '向前执行',
'Execute only the current node': '仅执行当前节点',
'Notification strategy': '通知策略',
'Notification group': '通知组',
'Please select a notification group': '请选择通知组',
Recipient: '收件人',
Cc: '抄送人',
'Whether it is a complement process?': '是否补数',
'Schedule date': '调度日期',
'Mode of execution': '执行方式',
'Serial execution': '串行执行',
'Parallel execution': '并行执行',
'Set parameters before timing': '定时前请先设置参数',
'Start and stop time': '起止时间',
'Please select time': '请选择时间',
'Please enter crontab': '请输入crontab',
none_1: '都不发',
success_1: '成功发',
failure_1: '失败发',
All_1: '成功或失败都发',
Toolbar: '工具栏',
'View variables': '查看变量',
'Format DAG': '格式化DAG',
'Refresh DAG status': '刷新DAG状态',
Return_1: '返回上一节点',
'Please enter format': '请输入格式为',
'connection parameter': '连接参数',
'Process definition details': '流程定义详情',
'Create process definition': '创建流程定义',
'Scheduled task list': '定时任务列表',
'Process instance details': '流程实例详情',
'Create Resource': '创建资源',
'User Center': '用户中心',
'Please enter method': '请输入方法',
none: '无',
name: '名称',
'Process priority': '流程优先级',
'Task priority': '任务优先级',
'Task timeout alarm': '任务超时告警',
'Timeout strategy': '超时策略',
'Timeout alarm': '超时告警',
'Timeout failure': '超时失败',
'Timeout period': '超时时长',
'Timeout strategy must be selected': '超时策略必须选一个',
'Timeout must be a positive integer': '超时时长必须为正整数',
'Add dependency': '添加依赖',
and: '且',
or: '或',
month: '月',
week: '周',
day: '日',
hour: '时',
Running: '正在运行',
'Waiting for dependency to complete': '等待依赖完成',
Selected: '已选',
CurrentHour: '当前小时',
Last1Hour: '前1小时',
Last2Hours: '前2小时',
Last3Hours: '前3小时',
Last24Hours: '前24小时',
today: '今天',
Last1Days: '昨天',
Last2Days: '前两天',
Last3Days: '前三天',
Last7Days: '前七天',
ThisWeek: '本周',
LastWeek: '上周',
LastMonday: '上周一',
LastTuesday: '上周二',
LastWednesday: '上周三',
LastThursday: '上周四',
LastFriday: '上周五',
LastSaturday: '上周六',
LastSunday: '上周日',
ThisMonth: '本月',
LastMonth: '上月',
LastMonthBegin: '上月初',
LastMonthEnd: '上月末',
'Refresh status succeeded': '刷新状态成功',
'Queue manage': '队列管理',
'Create queue': '创建队列',
'Edit queue': '编辑队列',
'Datasource manage': '数据源中心',
'History task record': '历史任务记录',
'Please go online': '不要忘记上线',
'Queue value': '队列值',
'Please enter queue value': '请输入队列值',
'Worker group manage': 'Worker分组管理',
'Create worker group': '创建Worker分组',
'Edit worker group': '编辑Worker分组',
'Token manage': '令牌管理',
'Create token': '创建令牌',
'Edit token': '编辑令牌',
'Please enter the IP address separated by commas': '请输入IP地址多个用英文逗号隔开',
'Note: Multiple IP addresses have been comma separated': '注意:多个IP地址以英文逗号分割',
'Failure time': '失效时间',
User: '用户',
'Please enter token': '请输入令牌',
'Generate token': '生成令牌',
Monitor: '监控中心',
Group: '分组',
'Queue statistics': '队列统计',
'Command status statistics': '命令状态统计',
'Task kill': '等待kill任务',
'Task queue': '等待执行任务',
'Error command count': '错误指令数',
'Normal command count': '正确指令数',
Manage: '管理',
'Number of connections': '连接数',
Sent: '发送量',
Received: '接收量',
'Min latency': '最低延时',
'Avg latency': '平均延时',
'Max latency': '最大延时',
'Node count': '节点数',
'Query time': '当前查询时间',
'Node self-test status': '节点自检状态',
'Health status': '健康状态',
'Max connections': '最大连接数',
'Threads connections': '当前连接数',
'Max used connections': '同时使用连接最大数',
'Threads running connections': '数据库当前活跃连接数',
'Worker group': 'Worker分组',
'Please enter a positive integer greater than 0': '请输入大于 0 的正整数',
'Pre Statement': '前置sql',
'Post Statement': '后置sql',
'Statement cannot be empty': '语句不能为空',
'Process Define Count': '工作流定义数',
'Process Instance Running Count': '正在运行的流程数',
'Please select a queue': '默认为租户关联队列',
'command number of waiting for running': '待执行的命令数',
'failure command number': '执行失败的命令数',
'tasks number of waiting running': '待运行任务数',
'task number of ready to kill': '待杀死任务数',
'Statistics manage': '统计管理',
statistics: '统计',
'select tenant': '选择租户',
'Please enter Principal': '请输入Principal',
'The start time must not be the same as the end': '开始时间和结束时间不能相同',
'Startup parameter': '启动参数',
'Startup type': '启动类型',
'warning of timeout': '超时告警',
'Next five execution times': '接下来五次执行时间',
'Execute time': '执行时间',
'Complement range': '补数范围',
slot: 'slot数量',
taskManager: 'taskManage数量',
jobManagerMemory: 'jobManager内存数',
taskManagerMemory: 'taskManager内存数',
'Http Url': '请求地址',
'Http Method': '请求类型',
'Http Parameters': '请求参数',
'Http Parameters Key': '参数名',
'Http Parameters Position': '参数位置',
'Http Parameters Value': '参数值',
'Http Check Condition': '校验条件',
'Http Condition': '校验内容',
'Please Enter Http Url': '请填写请求地址(必填)',
'Please Enter Http Condition': '请填写校验内容',
'There is no data for this period of time': '该时间段无数据',
'IP address cannot be empty': 'IP地址不能为空',
'Please enter the correct IP': '请输入正确的IP',
'Please generate token': '请生成Token',
'Spark Version': 'Spark版本',
TargetDataBase: '目标库',
TargetTable: '目标表',
'Please enter the table of target': '请输入目标表名',
'Please enter a Target Table(required)': '请输入目标表(必填)',
SpeedByte: '限流(字节数)',
SpeedRecord: '限流(记录数)',
'0 means unlimited by byte': 'KB,0代表不限制',
'0 means unlimited by count': '0代表不限制',
'Modify User': '修改用户',
'Whether directory': '是否文件夹',
Yes: '是',
No: '否',
'Hadoop Custom Params': 'Hadoop参数',
'Sqoop Advanced Parameters': 'Sqoop参数',
'Sqoop Job Name': '任务名称',
'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)',
'Please enter Mysql Table(required)': '请输入Mysql表名(必填)',
'Please enter Columns (Comma separated)': '请输入列名,用 , 隔开',
'Please enter Target Dir(required)': '请输入目标路径(必填)',
'Please enter Export Dir(required)': '请输入数据源路径(必填)',
'Please enter Hive Database(required)': '请输入Hive数据库(必填)',
'Please enter Hive Table(required)': '请输入Hive表名(必填)',
'Please enter Hive Partition Keys': '请输入分区键',
'Please enter Hive Partition Values': '请输入分区值',
'Please enter Replace Delimiter': '请输入替换分隔符',
'Please enter Fields Terminated': '请输入列分隔符',
'Please enter Lines Terminated': '请输入行分隔符',
'Please enter Concurrency': '请输入并发度',
'Please enter Update Key': '请输入更新列',
'Please enter Job Name(required)': '请输入任务名称(必填)',
'Please enter Custom Shell(required)': '请输入自定义脚本',
Direct: '流向',
Type: '类型',
ModelType: '模式',
ColumnType: '列类型',
Database: '数据库',
Column: '列',
'Map Column Hive': 'Hive类型映射',
'Map Column Java': 'Java类型映射',
'Export Dir': '数据源路径',
'Hive partition Keys': 'Hive 分区键',
'Hive partition Values': 'Hive 分区值',
FieldsTerminated: '列分隔符',
LinesTerminated: '行分隔符',
IsUpdate: '是否更新',
UpdateKey: '更新列',
UpdateMode: '更新类型',
'Target Dir': '目标路径',
DeleteTargetDir: '是否删除目录',
FileType: '保存格式',
CompressionCodec: '压缩类型',
CreateHiveTable: '是否创建新表',
DropDelimiter: '是否删除分隔符',
OverWriteSrc: '是否覆盖数据源',
ReplaceDelimiter: '替换分隔符',
Concurrency: '并发度',
Form: '表单',
OnlyUpdate: '只更新',
AllowInsert: '无更新便插入',
'Data Source': '数据来源',
'Data Target': '数据目的',
'All Columns': '全表导入',
'Some Columns': '选择列',
'Branch flow': '分支流转',
'Custom Job': '自定义任务',
'Custom Script': '自定义脚本',
'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点',
'Successful branch flow and failed branch flow are required': 'conditions节点成功和失败分支流转必填',
'Unauthorized or deleted resources': '未授权或已删除资源',
'Please delete all non-existent resources': '请删除所有未授权或已删除资源',
'Enable': '启用',
'Disable': '停用',
'The Worker group no longer exists, please select the correct Worker group!': '该Worker分组已经不存在,请选择正确的Worker分组!',
'Please confirm whether the workflow has been saved before downloading': '下载前请确定工作流是否已保存',
'User name length is between 3 and 39': '用户名长度在3~39之间',
'Timeout Settings': '超时设置',
'Connect Timeout':'连接超时',
'Socket Timeout':'Socket超时',
'Connect timeout be a positive integer': '连接超时必须为数字',
'Socket Timeout be a positive integer': 'Socket超时必须为数字',
'ms':'毫秒',
zkDirectory: 'zk注册目录',
'Directory detail': '查看目录详情',
'Connection name': '连线名',
'Current connection settings': '当前连线设置',
'Please save the DAG before formatting': '格式化前请先保存DAG'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,314 | [Question] Why "Failed to load ApplicationContext" when running tests locally | # Why "Failed to load ApplicationContext" when running tests locally
The first is the test command I ran:
```
mvn test -pl dolphinscheduler-server -Dtest =TaskPriorityQueueConsumerTest#testSHELLTask
```
It should be noted that the above test is not the only one with this problem.
## The environment in which the problem occurs
The problem was originally discovered by running tests in IDEA, but also by testing directly with the 'mvn' command.
- Operating system: MacOS
- JAVA: 1.8.0_111
- Maven: 3.5.2
- DS version: dev branch
To verify that it was a local environment problem, I created a new virtual machine to test, which had the same problem.
- Operating system: CentOS 7
- JAVA: 1.8.0_221
- Maven: 3.6.3
- DS version: dev branch
## Action steps
I tested it on my virtual machine using the following steps:
```
git clone ...
cd incubator-dolphinscheduler-github/
git checkout dev
mvn clean install -pl dolphinscheduler-common -am -Dmaven.test.skip=true
mvn clean install -pl dolphinscheduler-dao -am -Dmaven.test.skip=true
mvn clean install -pl dolphinscheduler-service -am -Dmaven.test.skip=true
mvn clean install -pl dolphinscheduler-alert -am -Dmaven.test.skip=true
mvn test -pl dolphinscheduler-server -Dtest=TaskPriorityQueueConsumerTest#testSHELLTask
```
## Error log
```
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 15.247 s <<< FAILURE! - in org.apache.dolphinscheduler.server.master.consumer.TaskPriorityQueueConsumerTest
[ERROR] testSHELLTask(org.apache.dolphinscheduler.server.master.consumer.TaskPriorityQueueConsumerTest) Time elapsed: 0 s <<< ERROR!
java.lang.IllegalStateException: Failed to load ApplicationContext
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'nettyExecutorManager': Unsatisfied dependency expressed through field 'zookeeperNodeManager'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperNodeManager': Unsatisfied dependency expressed through field 'registryCenter'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperRegistryCenter': Unsatisfied dependency expressed through field 'zookeeperCachedOperator'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperNodeManager': Unsatisfied dependency expressed through field 'registryCenter'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperRegistryCenter': Unsatisfied dependency expressed through field 'zookeeperCachedOperator'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperRegistryCenter': Unsatisfied dependency expressed through field 'zookeeperCachedOperator'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
01:07:48.958 [SessionTracker] INFO org.apache.zookeeper.server.SessionTrackerImpl - SessionTrackerImpl exited loop!
[INFO]
[INFO] Results:
[INFO]
[ERROR] Errors:
[ERROR] TaskPriorityQueueConsumerTest.testSHELLTask » IllegalState Failed to load Appl...
[INFO]
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0
[INFO]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 48.206 s
[INFO] Finished at: 2020-07-27T01:07:49Z
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.22.1:test (default-test) on project dolphinscheduler-server: There are test failures.
[ERROR]
[ERROR] Please refer to /root/incubator-dolphinscheduler-github/dolphinscheduler-server/target/surefire-reports for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date].dump, [date]-jvmRun[N].dump and [date].dumpstream.
[ERROR] -> [Help 1]
[ERROR]
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR]
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
```
## Problem analysis
According to my own knowledge, I tried to solve the above problems. Because the reason is that `Autowired` errors, so I according to the prompt in the test code `dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java`, To annotate `@ContextConfiguration` added a class `CuratorZookeeperClient.class`, problem is solved.
Just like this:
```java
@ContextConfiguration(classes={DependencyConfig.class, SpringApplicationContext.class, SpringZKServer.class,
NettyExecutorManager.class, ExecutorDispatcher.class, ZookeeperRegistryCenter.class, TaskPriorityQueueConsumer.class,
ZookeeperNodeManager.class, ZookeeperCachedOperator.class, ZookeeperConfig.class, MasterConfig.class,
CuratorZookeeperClient.class})
```
I mentioned this problem with more than one test code, but I'll summarize it now:
> if the test code needs to Autowired class A objects, class A needs to Autowired class B objects. Then `A.class` appears in the context configuration, while `B.class` is not, an 'Failed to load ApplicationContext' exception will appear and the test will fail.
## Question
Every time the project merges new code, it will be tested automatically. Why is this test enough to pass without these problems?
| https://github.com/apache/dolphinscheduler/issues/3314 | https://github.com/apache/dolphinscheduler/pull/3304 | b555d0a086c88c29b40304802589b3ef77955ab9 | 3a0d4da27e6f981ecba7d061bad4644079ae122d | "2020-07-27T02:02:58Z" | java | "2020-07-30T05:28:27Z" | dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.master.consumer;
import java.util.Date;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.master.dispatch.ExecutorDispatcher;
import org.apache.dolphinscheduler.server.master.dispatch.executor.NettyExecutorManager;
import org.apache.dolphinscheduler.server.registry.DependencyConfig;
import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager;
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
import org.apache.dolphinscheduler.server.zk.SpringZKServer;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import org.apache.dolphinscheduler.service.zk.ZookeeperConfig;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes={DependencyConfig.class, SpringApplicationContext.class, SpringZKServer.class,
NettyExecutorManager.class, ExecutorDispatcher.class, ZookeeperRegistryCenter.class, TaskPriorityQueueConsumer.class,
ZookeeperNodeManager.class, ZookeeperCachedOperator.class, ZookeeperConfig.class, MasterConfig.class})
public class TaskPriorityQueueConsumerTest {
@Autowired
private TaskPriorityQueue taskPriorityQueue;
@Autowired
private TaskPriorityQueueConsumer taskPriorityQueueConsumer;
@Autowired
private ProcessService processService;
@Autowired
private ExecutorDispatcher dispatcher;
@Before
public void init(){
Tenant tenant = new Tenant();
tenant.setId(1);
tenant.setTenantCode("journey");
tenant.setTenantName("journey");
tenant.setDescription("journey");
tenant.setQueueId(1);
tenant.setCreateTime(new Date());
tenant.setUpdateTime(new Date());
Mockito.doReturn(tenant).when(processService).getTenantForProcess(1,2);
Mockito.doReturn("default").when(processService).queryUserQueueByProcessInstanceId(1);
}
@Test
public void testSHELLTask() throws Exception {
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(1);
taskInstance.setTaskType("SHELL");
taskInstance.setProcessDefinitionId(1);
taskInstance.setProcessInstanceId(1);
taskInstance.setState(ExecutionStatus.KILL);
taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\",\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-55201\",\"maxRetryTimes\":0,\"name\":\"测试任务\",\"params\":\"{\\\"rawScript\\\":\\\"echo \\\\\\\"测试任务\\\\\\\"\\\",\\\"localParams\\\":[],\\\"resourceList\\\":[]}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"SHELL\",\"workerGroup\":\"default\"}");
taskInstance.setProcessInstancePriority(Priority.MEDIUM);
taskInstance.setWorkerGroup("default");
taskInstance.setExecutorId(2);
ProcessInstance processInstance = new ProcessInstance();
processInstance.setTenantId(1);
processInstance.setCommandType(CommandType.START_PROCESS);
taskInstance.setProcessInstance(processInstance);
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setUserId(2);
processDefinition.setProjectId(1);
taskInstance.setProcessDefine(processDefinition);
Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1);
taskPriorityQueue.put("2_1_2_1_default");
Thread.sleep(10000);
}
@Test
public void testSQLTask() throws Exception {
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(1);
taskInstance.setTaskType("SQL");
taskInstance.setProcessDefinitionId(1);
taskInstance.setProcessInstanceId(1);
taskInstance.setState(ExecutionStatus.KILL);
taskInstance.setTaskJson("{\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-3655\",\"maxRetryTimes\":0,\"name\":\"UDF测试\",\"params\":\"{\\\"postStatements\\\":[],\\\"connParams\\\":\\\"\\\",\\\"receiversCc\\\":\\\"\\\",\\\"udfs\\\":\\\"1\\\",\\\"type\\\":\\\"HIVE\\\",\\\"title\\\":\\\"test\\\",\\\"sql\\\":\\\"select id,name,ds,zodia(ds) from t_journey_user\\\",\\\"preStatements\\\":[],\\\"sqlType\\\":0,\\\"receivers\\\":\\\"825193156@qq.com\\\",\\\"datasource\\\":3,\\\"showType\\\":\\\"TABLE\\\",\\\"localParams\\\":[]}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"SQL\"}");
taskInstance.setProcessInstancePriority(Priority.MEDIUM);
taskInstance.setWorkerGroup("default");
taskInstance.setExecutorId(2);
ProcessInstance processInstance = new ProcessInstance();
processInstance.setTenantId(1);
processInstance.setCommandType(CommandType.START_PROCESS);
taskInstance.setProcessInstance(processInstance);
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setUserId(2);
processDefinition.setProjectId(1);
taskInstance.setProcessDefine(processDefinition);
Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1);
taskPriorityQueue.put("2_1_2_1_default");
DataSource dataSource = new DataSource();
dataSource.setId(1);
dataSource.setName("sqlDatasource");
dataSource.setType(DbType.MYSQL);
dataSource.setUserId(2);
dataSource.setConnectionParams("{\"address\":\"jdbc:mysql://192.168.221.185:3306\",\"database\":\"dolphinscheduler_qiaozhanwei\",\"jdbcUrl\":\"jdbc:mysql://192.168.221.185:3306/dolphinscheduler_qiaozhanwei\",\"user\":\"root\",\"password\":\"root@123\"}");
dataSource.setCreateTime(new Date());
dataSource.setUpdateTime(new Date());
Mockito.doReturn(dataSource).when(processService).findDataSourceById(1);
Thread.sleep(10000);
}
@Test
public void testDataxTask() throws Exception {
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(1);
taskInstance.setTaskType("DATAX");
taskInstance.setProcessDefinitionId(1);
taskInstance.setProcessInstanceId(1);
taskInstance.setState(ExecutionStatus.KILL);
taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\",\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-97625\",\"maxRetryTimes\":0,\"name\":\"MySQL数据相互导入\",\"params\":\"{\\\"targetTable\\\":\\\"pv2\\\",\\\"postStatements\\\":[],\\\"jobSpeedRecord\\\":1000,\\\"customConfig\\\":0,\\\"dtType\\\":\\\"MYSQL\\\",\\\"dsType\\\":\\\"MYSQL\\\",\\\"jobSpeedByte\\\":0,\\\"dataSource\\\":80,\\\"dataTarget\\\":80,\\\"sql\\\":\\\"SELECT dt,count FROM pv\\\",\\\"preStatements\\\":[]}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"DATAX\",\"workerGroup\":\"default\"}");
taskInstance.setProcessInstancePriority(Priority.MEDIUM);
taskInstance.setWorkerGroup("default");
taskInstance.setExecutorId(2);
ProcessInstance processInstance = new ProcessInstance();
processInstance.setTenantId(1);
processInstance.setCommandType(CommandType.START_PROCESS);
taskInstance.setProcessInstance(processInstance);
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setUserId(2);
processDefinition.setProjectId(1);
taskInstance.setProcessDefine(processDefinition);
Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1);
taskPriorityQueue.put("2_1_2_1_default");
DataSource dataSource = new DataSource();
dataSource.setId(80);
dataSource.setName("datax");
dataSource.setType(DbType.MYSQL);
dataSource.setUserId(2);
dataSource.setConnectionParams("{\"address\":\"jdbc:mysql://192.168.221.185:3306\",\"database\":\"dolphinscheduler_qiaozhanwei\",\"jdbcUrl\":\"jdbc:mysql://192.168.221.185:3306/dolphinscheduler_qiaozhanwei\",\"user\":\"root\",\"password\":\"root@123\"}");
dataSource.setCreateTime(new Date());
dataSource.setUpdateTime(new Date());
Mockito.doReturn(dataSource).when(processService).findDataSourceById(80);
Thread.sleep(10000);
}
@Test
public void testSqoopTask() throws Exception {
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(1);
taskInstance.setTaskType("SQOOP");
taskInstance.setProcessDefinitionId(1);
taskInstance.setProcessInstanceId(1);
taskInstance.setState(ExecutionStatus.KILL);
taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\",\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-63634\",\"maxRetryTimes\":0,\"name\":\"MySQL数据导入HDSF\",\"params\":\"{\\\"sourceType\\\":\\\"MYSQL\\\",\\\"targetType\\\":\\\"HDFS\\\",\\\"targetParams\\\":\\\"{\\\\\\\"targetPath\\\\\\\":\\\\\\\"/test/datatest\\\\\\\",\\\\\\\"deleteTargetDir\\\\\\\":true,\\\\\\\"fileType\\\\\\\":\\\\\\\"--as-textfile\\\\\\\",\\\\\\\"compressionCodec\\\\\\\":\\\\\\\"\\\\\\\",\\\\\\\"fieldsTerminated\\\\\\\":\\\\\\\",\\\\\\\",\\\\\\\"linesTerminated\\\\\\\":\\\\\\\"\\\\\\\\\\\\\\\\n\\\\\\\"}\\\",\\\"modelType\\\":\\\"import\\\",\\\"sourceParams\\\":\\\"{\\\\\\\"srcType\\\\\\\":\\\\\\\"MYSQL\\\\\\\",\\\\\\\"srcDatasource\\\\\\\":1,\\\\\\\"srcTable\\\\\\\":\\\\\\\"t_ds_user\\\\\\\",\\\\\\\"srcQueryType\\\\\\\":\\\\\\\"0\\\\\\\",\\\\\\\"srcQuerySql\\\\\\\":\\\\\\\"\\\\\\\",\\\\\\\"srcColumnType\\\\\\\":\\\\\\\"0\\\\\\\",\\\\\\\"srcColumns\\\\\\\":\\\\\\\"\\\\\\\",\\\\\\\"srcConditionList\\\\\\\":[],\\\\\\\"mapColumnHive\\\\\\\":[],\\\\\\\"mapColumnJava\\\\\\\":[]}\\\",\\\"localParams\\\":[],\\\"concurrency\\\":1}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"SQOOP\",\"workerGroup\":\"default\"}");
taskInstance.setProcessInstancePriority(Priority.MEDIUM);
taskInstance.setWorkerGroup("default");
taskInstance.setExecutorId(2);
ProcessInstance processInstance = new ProcessInstance();
processInstance.setTenantId(1);
processInstance.setCommandType(CommandType.START_PROCESS);
taskInstance.setProcessInstance(processInstance);
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setUserId(2);
processDefinition.setProjectId(1);
taskInstance.setProcessDefine(processDefinition);
Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1);
taskPriorityQueue.put("2_1_2_1_default");
DataSource dataSource = new DataSource();
dataSource.setId(1);
dataSource.setName("datax");
dataSource.setType(DbType.MYSQL);
dataSource.setUserId(2);
dataSource.setConnectionParams("{\"address\":\"jdbc:mysql://192.168.221.185:3306\",\"database\":\"dolphinscheduler_qiaozhanwei\",\"jdbcUrl\":\"jdbc:mysql://192.168.221.185:3306/dolphinscheduler_qiaozhanwei\",\"user\":\"root\",\"password\":\"root@123\"}");
dataSource.setCreateTime(new Date());
dataSource.setUpdateTime(new Date());
Mockito.doReturn(dataSource).when(processService).findDataSourceById(1);
Thread.sleep(10000);
}
@Test
public void testTaskInstanceIsFinalState(){
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(1);
taskInstance.setTaskType("SHELL");
taskInstance.setProcessDefinitionId(1);
taskInstance.setProcessInstanceId(1);
taskInstance.setState(ExecutionStatus.KILL);
taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\",\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-55201\",\"maxRetryTimes\":0,\"name\":\"测试任务\",\"params\":\"{\\\"rawScript\\\":\\\"echo \\\\\\\"测试任务\\\\\\\"\\\",\\\"localParams\\\":[],\\\"resourceList\\\":[]}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"SHELL\",\"workerGroup\":\"default\"}");
taskInstance.setProcessInstancePriority(Priority.MEDIUM);
taskInstance.setWorkerGroup("default");
taskInstance.setExecutorId(2);
Mockito.doReturn(taskInstance).when(processService).findTaskInstanceById(1);
taskPriorityQueueConsumer.taskInstanceIsFinalState(1);
}
@After
public void close() {
Stopper.stop();
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,314 | [Question] Why "Failed to load ApplicationContext" when running tests locally | # Why "Failed to load ApplicationContext" when running tests locally
The first is the test command I ran:
```
mvn test -pl dolphinscheduler-server -Dtest =TaskPriorityQueueConsumerTest#testSHELLTask
```
It should be noted that the above test is not the only one with this problem.
## The environment in which the problem occurs
The problem was originally discovered by running tests in IDEA, but also by testing directly with the 'mvn' command.
- Operating system: MacOS
- JAVA: 1.8.0_111
- Maven: 3.5.2
- DS version: dev branch
To verify that it was a local environment problem, I created a new virtual machine to test, which had the same problem.
- Operating system: CentOS 7
- JAVA: 1.8.0_221
- Maven: 3.6.3
- DS version: dev branch
## Action steps
I tested it on my virtual machine using the following steps:
```
git clone ...
cd incubator-dolphinscheduler-github/
git checkout dev
mvn clean install -pl dolphinscheduler-common -am -Dmaven.test.skip=true
mvn clean install -pl dolphinscheduler-dao -am -Dmaven.test.skip=true
mvn clean install -pl dolphinscheduler-service -am -Dmaven.test.skip=true
mvn clean install -pl dolphinscheduler-alert -am -Dmaven.test.skip=true
mvn test -pl dolphinscheduler-server -Dtest=TaskPriorityQueueConsumerTest#testSHELLTask
```
## Error log
```
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 15.247 s <<< FAILURE! - in org.apache.dolphinscheduler.server.master.consumer.TaskPriorityQueueConsumerTest
[ERROR] testSHELLTask(org.apache.dolphinscheduler.server.master.consumer.TaskPriorityQueueConsumerTest) Time elapsed: 0 s <<< ERROR!
java.lang.IllegalStateException: Failed to load ApplicationContext
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'nettyExecutorManager': Unsatisfied dependency expressed through field 'zookeeperNodeManager'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperNodeManager': Unsatisfied dependency expressed through field 'registryCenter'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperRegistryCenter': Unsatisfied dependency expressed through field 'zookeeperCachedOperator'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperNodeManager': Unsatisfied dependency expressed through field 'registryCenter'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperRegistryCenter': Unsatisfied dependency expressed through field 'zookeeperCachedOperator'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperRegistryCenter': Unsatisfied dependency expressed through field 'zookeeperCachedOperator'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
01:07:48.958 [SessionTracker] INFO org.apache.zookeeper.server.SessionTrackerImpl - SessionTrackerImpl exited loop!
[INFO]
[INFO] Results:
[INFO]
[ERROR] Errors:
[ERROR] TaskPriorityQueueConsumerTest.testSHELLTask » IllegalState Failed to load Appl...
[INFO]
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0
[INFO]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 48.206 s
[INFO] Finished at: 2020-07-27T01:07:49Z
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.22.1:test (default-test) on project dolphinscheduler-server: There are test failures.
[ERROR]
[ERROR] Please refer to /root/incubator-dolphinscheduler-github/dolphinscheduler-server/target/surefire-reports for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date].dump, [date]-jvmRun[N].dump and [date].dumpstream.
[ERROR] -> [Help 1]
[ERROR]
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR]
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
```
## Problem analysis
According to my own knowledge, I tried to solve the above problems. Because the reason is that `Autowired` errors, so I according to the prompt in the test code `dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java`, To annotate `@ContextConfiguration` added a class `CuratorZookeeperClient.class`, problem is solved.
Just like this:
```java
@ContextConfiguration(classes={DependencyConfig.class, SpringApplicationContext.class, SpringZKServer.class,
NettyExecutorManager.class, ExecutorDispatcher.class, ZookeeperRegistryCenter.class, TaskPriorityQueueConsumer.class,
ZookeeperNodeManager.class, ZookeeperCachedOperator.class, ZookeeperConfig.class, MasterConfig.class,
CuratorZookeeperClient.class})
```
I mentioned this problem with more than one test code, but I'll summarize it now:
> if the test code needs to Autowired class A objects, class A needs to Autowired class B objects. Then `A.class` appears in the context configuration, while `B.class` is not, an 'Failed to load ApplicationContext' exception will appear and the test will fail.
## Question
Every time the project merges new code, it will be tested automatically. Why is this test enough to pass without these problems?
| https://github.com/apache/dolphinscheduler/issues/3314 | https://github.com/apache/dolphinscheduler/pull/3304 | b555d0a086c88c29b40304802589b3ef77955ab9 | 3a0d4da27e6f981ecba7d061bad4644079ae122d | "2020-07-27T02:02:58Z" | java | "2020-07-30T05:28:27Z" | dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/registry/ZookeeperNodeManagerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.registry;
import java.util.Map;
import java.util.Set;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.NetUtils;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.master.registry.MasterRegistry;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry;
import org.apache.dolphinscheduler.server.zk.SpringZKServer;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import org.apache.dolphinscheduler.service.zk.ZookeeperConfig;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
/**
* zookeeper node manager test
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes={DependencyConfig.class, SpringZKServer.class, MasterRegistry.class,WorkerRegistry.class,
ZookeeperRegistryCenter.class, MasterConfig.class, WorkerConfig.class,
ZookeeperCachedOperator.class, ZookeeperConfig.class, ZookeeperNodeManager.class})
public class ZookeeperNodeManagerTest {
@Autowired
private ZookeeperNodeManager zookeeperNodeManager;
@Autowired
private MasterRegistry masterRegistry;
@Autowired
private WorkerRegistry workerRegistry;
@Autowired
private ZookeeperRegistryCenter zookeeperRegistryCenter;
@Autowired
private WorkerConfig workerConfig;
@Autowired
private MasterConfig masterConfig;
@Test
public void testGetMasterNodes(){
masterRegistry.registry();
try {
//let the zookeeperNodeManager catch the registry event
Thread.sleep(2000);
} catch (InterruptedException ignore) {
}
Set<String> masterNodes = zookeeperNodeManager.getMasterNodes();
Assert.assertTrue(CollectionUtils.isNotEmpty(masterNodes));
Assert.assertEquals(1, masterNodes.size());
Assert.assertEquals(NetUtils.getHost() + ":" + masterConfig.getListenPort(), masterNodes.iterator().next());
workerRegistry.unRegistry();
}
@Test
public void testGetWorkerGroupNodes(){
workerRegistry.registry();
try {
//let the zookeeperNodeManager catch the registry event
Thread.sleep(2000);
} catch (InterruptedException ignore) {
}
Map<String, Set<String>> workerGroupNodes = zookeeperNodeManager.getWorkerGroupNodes();
Assert.assertEquals(1, workerGroupNodes.size());
Assert.assertEquals("default".trim(), workerGroupNodes.keySet().iterator().next());
workerRegistry.unRegistry();
}
@Test
public void testGetWorkerGroupNodesWithParam(){
workerRegistry.registry();
try {
//let the zookeeperNodeManager catch the registry event
Thread.sleep(3000);
} catch (InterruptedException ignore) {
}
Map<String, Set<String>> workerGroupNodes = zookeeperNodeManager.getWorkerGroupNodes();
Set<String> workerNodes = zookeeperNodeManager.getWorkerGroupNodes("default");
Assert.assertTrue(CollectionUtils.isNotEmpty(workerNodes));
Assert.assertEquals(1, workerNodes.size());
Assert.assertEquals(NetUtils.getHost() + ":" + workerConfig.getListenPort(), workerNodes.iterator().next());
workerRegistry.unRegistry();
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,314 | [Question] Why "Failed to load ApplicationContext" when running tests locally | # Why "Failed to load ApplicationContext" when running tests locally
The first is the test command I ran:
```
mvn test -pl dolphinscheduler-server -Dtest =TaskPriorityQueueConsumerTest#testSHELLTask
```
It should be noted that the above test is not the only one with this problem.
## The environment in which the problem occurs
The problem was originally discovered by running tests in IDEA, but also by testing directly with the 'mvn' command.
- Operating system: MacOS
- JAVA: 1.8.0_111
- Maven: 3.5.2
- DS version: dev branch
To verify that it was a local environment problem, I created a new virtual machine to test, which had the same problem.
- Operating system: CentOS 7
- JAVA: 1.8.0_221
- Maven: 3.6.3
- DS version: dev branch
## Action steps
I tested it on my virtual machine using the following steps:
```
git clone ...
cd incubator-dolphinscheduler-github/
git checkout dev
mvn clean install -pl dolphinscheduler-common -am -Dmaven.test.skip=true
mvn clean install -pl dolphinscheduler-dao -am -Dmaven.test.skip=true
mvn clean install -pl dolphinscheduler-service -am -Dmaven.test.skip=true
mvn clean install -pl dolphinscheduler-alert -am -Dmaven.test.skip=true
mvn test -pl dolphinscheduler-server -Dtest=TaskPriorityQueueConsumerTest#testSHELLTask
```
## Error log
```
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 15.247 s <<< FAILURE! - in org.apache.dolphinscheduler.server.master.consumer.TaskPriorityQueueConsumerTest
[ERROR] testSHELLTask(org.apache.dolphinscheduler.server.master.consumer.TaskPriorityQueueConsumerTest) Time elapsed: 0 s <<< ERROR!
java.lang.IllegalStateException: Failed to load ApplicationContext
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'nettyExecutorManager': Unsatisfied dependency expressed through field 'zookeeperNodeManager'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperNodeManager': Unsatisfied dependency expressed through field 'registryCenter'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperRegistryCenter': Unsatisfied dependency expressed through field 'zookeeperCachedOperator'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperNodeManager': Unsatisfied dependency expressed through field 'registryCenter'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperRegistryCenter': Unsatisfied dependency expressed through field 'zookeeperCachedOperator'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperRegistryCenter': Unsatisfied dependency expressed through field 'zookeeperCachedOperator'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
01:07:48.958 [SessionTracker] INFO org.apache.zookeeper.server.SessionTrackerImpl - SessionTrackerImpl exited loop!
[INFO]
[INFO] Results:
[INFO]
[ERROR] Errors:
[ERROR] TaskPriorityQueueConsumerTest.testSHELLTask » IllegalState Failed to load Appl...
[INFO]
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0
[INFO]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 48.206 s
[INFO] Finished at: 2020-07-27T01:07:49Z
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.22.1:test (default-test) on project dolphinscheduler-server: There are test failures.
[ERROR]
[ERROR] Please refer to /root/incubator-dolphinscheduler-github/dolphinscheduler-server/target/surefire-reports for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date].dump, [date]-jvmRun[N].dump and [date].dumpstream.
[ERROR] -> [Help 1]
[ERROR]
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR]
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
```
## Problem analysis
According to my own knowledge, I tried to solve the above problems. Because the reason is that `Autowired` errors, so I according to the prompt in the test code `dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java`, To annotate `@ContextConfiguration` added a class `CuratorZookeeperClient.class`, problem is solved.
Just like this:
```java
@ContextConfiguration(classes={DependencyConfig.class, SpringApplicationContext.class, SpringZKServer.class,
NettyExecutorManager.class, ExecutorDispatcher.class, ZookeeperRegistryCenter.class, TaskPriorityQueueConsumer.class,
ZookeeperNodeManager.class, ZookeeperCachedOperator.class, ZookeeperConfig.class, MasterConfig.class,
CuratorZookeeperClient.class})
```
I mentioned this problem with more than one test code, but I'll summarize it now:
> if the test code needs to Autowired class A objects, class A needs to Autowired class B objects. Then `A.class` appears in the context configuration, while `B.class` is not, an 'Failed to load ApplicationContext' exception will appear and the test will fail.
## Question
Every time the project merges new code, it will be tested automatically. Why is this test enough to pass without these problems?
| https://github.com/apache/dolphinscheduler/issues/3314 | https://github.com/apache/dolphinscheduler/pull/3304 | b555d0a086c88c29b40304802589b3ef77955ab9 | 3a0d4da27e6f981ecba7d061bad4644079ae122d | "2020-07-27T02:02:58Z" | java | "2020-07-30T05:28:27Z" | dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.processor;
import java.util.Date;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.remote.NettyRemotingClient;
import org.apache.dolphinscheduler.remote.NettyRemotingServer;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand;
import org.apache.dolphinscheduler.remote.command.TaskExecuteRequestCommand;
import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand;
import org.apache.dolphinscheduler.remote.config.NettyClientConfig;
import org.apache.dolphinscheduler.remote.config.NettyServerConfig;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.master.processor.TaskAckProcessor;
import org.apache.dolphinscheduler.server.master.processor.TaskResponseProcessor;
import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService;
import org.apache.dolphinscheduler.server.master.registry.MasterRegistry;
import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager;
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry;
import org.apache.dolphinscheduler.server.zk.SpringZKServer;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import org.apache.dolphinscheduler.service.zk.ZookeeperConfig;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import io.netty.channel.Channel;
/**
* test task call back service
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes={
TaskCallbackServiceTestConfig.class,
SpringZKServer.class,
SpringApplicationContext.class,
MasterRegistry.class,
WorkerRegistry.class,
ZookeeperRegistryCenter.class,
MasterConfig.class,
WorkerConfig.class,
ZookeeperCachedOperator.class,
ZookeeperConfig.class,
ZookeeperNodeManager.class,
TaskCallbackService.class,
TaskResponseService.class,
TaskAckProcessor.class,
TaskResponseProcessor.class,
TaskExecuteProcessor.class})
public class TaskCallbackServiceTest {
@Autowired
private TaskCallbackService taskCallbackService;
@Autowired
private MasterRegistry masterRegistry;
@Autowired
private TaskAckProcessor taskAckProcessor;
@Autowired
private TaskResponseProcessor taskResponseProcessor;
@Autowired
private TaskExecuteProcessor taskExecuteProcessor;
/**
* send ack test
* @throws Exception
*/
@Test
public void testSendAck() throws Exception{
final NettyServerConfig serverConfig = new NettyServerConfig();
serverConfig.setListenPort(30000);
NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig);
nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_ACK, taskAckProcessor);
nettyRemotingServer.start();
final NettyClientConfig clientConfig = new NettyClientConfig();
NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig);
Channel channel = nettyRemotingClient.getChannel(Host.of("localhost:30000"));
taskCallbackService.addRemoteChannel(1, new NettyRemoteChannel(channel, 1));
TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand();
ackCommand.setTaskInstanceId(1);
ackCommand.setStartTime(new Date());
taskCallbackService.sendAck(1, ackCommand.convert2Command());
Stopper.stop();
nettyRemotingServer.close();
nettyRemotingClient.close();
}
/**
* send result test
* @throws Exception
*/
@Test
public void testSendResult() throws Exception{
final NettyServerConfig serverConfig = new NettyServerConfig();
serverConfig.setListenPort(30000);
NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig);
nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_RESPONSE, taskResponseProcessor);
nettyRemotingServer.start();
final NettyClientConfig clientConfig = new NettyClientConfig();
NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig);
Channel channel = nettyRemotingClient.getChannel(Host.of("localhost:30000"));
taskCallbackService.addRemoteChannel(1, new NettyRemoteChannel(channel, 1));
TaskExecuteResponseCommand responseCommand = new TaskExecuteResponseCommand();
responseCommand.setTaskInstanceId(1);
responseCommand.setEndTime(new Date());
taskCallbackService.sendResult(1, responseCommand.convert2Command());
Thread.sleep(5000);
Stopper.stop();
Thread.sleep(5000);
nettyRemotingServer.close();
nettyRemotingClient.close();
}
@Test(expected = IllegalArgumentException.class)
public void testSendAckWithIllegalArgumentException(){
TaskExecuteAckCommand ackCommand = Mockito.mock(TaskExecuteAckCommand.class);
taskCallbackService.sendAck(1, ackCommand.convert2Command());
Stopper.stop();
}
@Test
public void testPause(){
Assert.assertEquals(5000, taskCallbackService.pause(3));;
}
@Test
public void testSendAck1(){
masterRegistry.registry();
final NettyServerConfig serverConfig = new NettyServerConfig();
serverConfig.setListenPort(30000);
NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig);
nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_ACK, taskAckProcessor);
nettyRemotingServer.start();
final NettyClientConfig clientConfig = new NettyClientConfig();
NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig);
Channel channel = nettyRemotingClient.getChannel(Host.of("localhost:30000"));
taskCallbackService.addRemoteChannel(1, new NettyRemoteChannel(channel, 1));
// channel.close();
TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand();
ackCommand.setTaskInstanceId(1);
ackCommand.setStartTime(new Date());
taskCallbackService.sendAck(1, ackCommand.convert2Command());
Assert.assertEquals(true, channel.isOpen());
Stopper.stop();
nettyRemotingServer.close();
nettyRemotingClient.close();
masterRegistry.unRegistry();
}
@Test
public void testTaskExecuteProcessor() throws Exception{
final NettyServerConfig serverConfig = new NettyServerConfig();
serverConfig.setListenPort(30000);
NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig);
nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_REQUEST, taskExecuteProcessor);
nettyRemotingServer.start();
final NettyClientConfig clientConfig = new NettyClientConfig();
NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig);
TaskExecuteRequestCommand taskExecuteRequestCommand = new TaskExecuteRequestCommand();
nettyRemotingClient.send(new Host("localhost",30000),taskExecuteRequestCommand.convert2Command());
taskExecuteRequestCommand.setTaskExecutionContext(JSONUtils.toJsonString(new TaskExecutionContext()));
nettyRemotingClient.send(new Host("localhost",30000),taskExecuteRequestCommand.convert2Command());
Thread.sleep(5000);
Stopper.stop();
Thread.sleep(5000);
nettyRemotingServer.close();
nettyRemotingClient.close();
}
// @Test(expected = IllegalStateException.class)
// public void testSendAckWithIllegalStateException2(){
// masterRegistry.registry();
// final NettyServerConfig serverConfig = new NettyServerConfig();
// serverConfig.setListenPort(30000);
// NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig);
// nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_ACK, taskAckProcessor);
// nettyRemotingServer.start();
//
// final NettyClientConfig clientConfig = new NettyClientConfig();
// NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig);
// Channel channel = nettyRemotingClient.getChannel(Host.of("localhost:30000"));
// taskCallbackService.addRemoteChannel(1, new NettyRemoteChannel(channel, 1));
// channel.close();
// TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand();
// ackCommand.setTaskInstanceId(1);
// ackCommand.setStartTime(new Date());
//
// nettyRemotingServer.close();
//
// taskCallbackService.sendAck(1, ackCommand.convert2Command());
// try {
// Thread.sleep(5000);
// } catch (InterruptedException e) {
// e.printStackTrace();
// }
//
// Stopper.stop();
//
// try {
// Thread.sleep(5000);
// } catch (InterruptedException e) {
// e.printStackTrace();
// }
// }
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,314 | [Question] Why "Failed to load ApplicationContext" when running tests locally | # Why "Failed to load ApplicationContext" when running tests locally
The first is the test command I ran:
```
mvn test -pl dolphinscheduler-server -Dtest =TaskPriorityQueueConsumerTest#testSHELLTask
```
It should be noted that the above test is not the only one with this problem.
## The environment in which the problem occurs
The problem was originally discovered by running tests in IDEA, but also by testing directly with the 'mvn' command.
- Operating system: MacOS
- JAVA: 1.8.0_111
- Maven: 3.5.2
- DS version: dev branch
To verify that it was a local environment problem, I created a new virtual machine to test, which had the same problem.
- Operating system: CentOS 7
- JAVA: 1.8.0_221
- Maven: 3.6.3
- DS version: dev branch
## Action steps
I tested it on my virtual machine using the following steps:
```
git clone ...
cd incubator-dolphinscheduler-github/
git checkout dev
mvn clean install -pl dolphinscheduler-common -am -Dmaven.test.skip=true
mvn clean install -pl dolphinscheduler-dao -am -Dmaven.test.skip=true
mvn clean install -pl dolphinscheduler-service -am -Dmaven.test.skip=true
mvn clean install -pl dolphinscheduler-alert -am -Dmaven.test.skip=true
mvn test -pl dolphinscheduler-server -Dtest=TaskPriorityQueueConsumerTest#testSHELLTask
```
## Error log
```
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 15.247 s <<< FAILURE! - in org.apache.dolphinscheduler.server.master.consumer.TaskPriorityQueueConsumerTest
[ERROR] testSHELLTask(org.apache.dolphinscheduler.server.master.consumer.TaskPriorityQueueConsumerTest) Time elapsed: 0 s <<< ERROR!
java.lang.IllegalStateException: Failed to load ApplicationContext
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'nettyExecutorManager': Unsatisfied dependency expressed through field 'zookeeperNodeManager'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperNodeManager': Unsatisfied dependency expressed through field 'registryCenter'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperRegistryCenter': Unsatisfied dependency expressed through field 'zookeeperCachedOperator'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperNodeManager': Unsatisfied dependency expressed through field 'registryCenter'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperRegistryCenter': Unsatisfied dependency expressed through field 'zookeeperCachedOperator'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperRegistryCenter': Unsatisfied dependency expressed through field 'zookeeperCachedOperator'; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'zookeeperCachedOperator': Unsatisfied dependency expressed through field 'zookeeperClient'; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
Caused by: org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {@org.springframework.beans.factory.annotation.Autowired(required=true)}
01:07:48.958 [SessionTracker] INFO org.apache.zookeeper.server.SessionTrackerImpl - SessionTrackerImpl exited loop!
[INFO]
[INFO] Results:
[INFO]
[ERROR] Errors:
[ERROR] TaskPriorityQueueConsumerTest.testSHELLTask » IllegalState Failed to load Appl...
[INFO]
[ERROR] Tests run: 1, Failures: 0, Errors: 1, Skipped: 0
[INFO]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 48.206 s
[INFO] Finished at: 2020-07-27T01:07:49Z
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.22.1:test (default-test) on project dolphinscheduler-server: There are test failures.
[ERROR]
[ERROR] Please refer to /root/incubator-dolphinscheduler-github/dolphinscheduler-server/target/surefire-reports for the individual test results.
[ERROR] Please refer to dump files (if any exist) [date].dump, [date]-jvmRun[N].dump and [date].dumpstream.
[ERROR] -> [Help 1]
[ERROR]
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR]
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
```
## Problem analysis
According to my own knowledge, I tried to solve the above problems. Because the reason is that `Autowired` errors, so I according to the prompt in the test code `dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java`, To annotate `@ContextConfiguration` added a class `CuratorZookeeperClient.class`, problem is solved.
Just like this:
```java
@ContextConfiguration(classes={DependencyConfig.class, SpringApplicationContext.class, SpringZKServer.class,
NettyExecutorManager.class, ExecutorDispatcher.class, ZookeeperRegistryCenter.class, TaskPriorityQueueConsumer.class,
ZookeeperNodeManager.class, ZookeeperCachedOperator.class, ZookeeperConfig.class, MasterConfig.class,
CuratorZookeeperClient.class})
```
I mentioned this problem with more than one test code, but I'll summarize it now:
> if the test code needs to Autowired class A objects, class A needs to Autowired class B objects. Then `A.class` appears in the context configuration, while `B.class` is not, an 'Failed to load ApplicationContext' exception will appear and the test will fail.
## Question
Every time the project merges new code, it will be tested automatically. Why is this test enough to pass without these problems?
| https://github.com/apache/dolphinscheduler/issues/3314 | https://github.com/apache/dolphinscheduler/pull/3304 | b555d0a086c88c29b40304802589b3ef77955ab9 | 3a0d4da27e6f981ecba7d061bad4644079ae122d | "2020-07-27T02:02:58Z" | java | "2020-07-30T05:28:27Z" | pom.xml | <?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId>
<version>1.3.2-SNAPSHOT</version>
<packaging>pom</packaging>
<name>${project.artifactId}</name>
<url>http://dolphinscheduler.apache.org</url>
<description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated
to solving the complex dependencies in data processing, making the scheduling system out of the box for data
processing.
</description>
<licenses>
<license>
<name>Apache License 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<connection>scm:git:https://github.com/apache/incubator-dolphinscheduler.git</connection>
<developerConnection>scm:git:https://github.com/apache/incubator-dolphinscheduler.git</developerConnection>
<url>https://github.com/apache/incubator-dolphinscheduler</url>
<tag>HEAD</tag>
</scm>
<mailingLists>
<mailingList>
<name>DolphinScheduler Developer List</name>
<post>dev@dolphinscheduler.incubator.apache.org</post>
<subscribe>dev-subscribe@dolphinscheduler.incubator.apache.org</subscribe>
<unsubscribe>dev-unsubscribe@dolphinscheduler.incubator.apache.org</unsubscribe>
</mailingList>
</mailingLists>
<parent>
<groupId>org.apache</groupId>
<artifactId>apache</artifactId>
<version>21</version>
</parent>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<curator.version>4.3.0</curator.version>
<spring.version>5.1.5.RELEASE</spring.version>
<spring.boot.version>2.1.3.RELEASE</spring.boot.version>
<java.version>1.8</java.version>
<logback.version>1.2.3</logback.version>
<hadoop.version>2.7.3</hadoop.version>
<quartz.version>2.2.3</quartz.version>
<jackson.version>2.9.8</jackson.version>
<mybatis-plus.version>3.2.0</mybatis-plus.version>
<mybatis.spring.version>2.0.1</mybatis.spring.version>
<cron.utils.version>5.0.5</cron.utils.version>
<druid.version>1.1.14</druid.version>
<h2.version>1.4.200</h2.version>
<commons.codec.version>1.6</commons.codec.version>
<commons.logging.version>1.1.1</commons.logging.version>
<httpclient.version>4.4.1</httpclient.version>
<httpcore.version>4.4.1</httpcore.version>
<junit.version>4.12</junit.version>
<mysql.connector.version>5.1.34</mysql.connector.version>
<slf4j.api.version>1.7.5</slf4j.api.version>
<slf4j.log4j12.version>1.7.5</slf4j.log4j12.version>
<commons.collections.version>3.2.2</commons.collections.version>
<commons.httpclient>3.0.1</commons.httpclient>
<commons.beanutils.version>1.7.0</commons.beanutils.version>
<commons.configuration.version>1.10</commons.configuration.version>
<commons.email.version>1.5</commons.email.version>
<poi.version>3.17</poi.version>
<javax.servlet.api.version>3.1.0</javax.servlet.api.version>
<commons.collections4.version>4.1</commons.collections4.version>
<guava.version>20.0</guava.version>
<postgresql.version>42.1.4</postgresql.version>
<hive.jdbc.version>2.1.0</hive.jdbc.version>
<commons.io.version>2.4</commons.io.version>
<oshi.core.version>3.5.0</oshi.core.version>
<clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version>
<mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version>
<jsp-2.1.version>6.1.14</jsp-2.1.version>
<spotbugs.version>3.1.12</spotbugs.version>
<checkstyle.version>3.0.0</checkstyle.version>
<apache.rat.version>0.13</apache.rat.version>
<zookeeper.version>3.4.14</zookeeper.version>
<frontend-maven-plugin.version>1.6</frontend-maven-plugin.version>
<maven-compiler-plugin.version>3.3</maven-compiler-plugin.version>
<maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version>
<maven-release-plugin.version>2.5.3</maven-release-plugin.version>
<maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version>
<maven-source-plugin.version>2.4</maven-source-plugin.version>
<maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version>
<maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version>
<rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version>
<jacoco.version>0.8.4</jacoco.version>
<jcip.version>1.0</jcip.version>
<maven.deploy.skip>false</maven.deploy.skip>
<cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version>
<mockito.version>2.21.0</mockito.version>
<powermock.version>2.0.2</powermock.version>
<servlet-api.version>2.5</servlet-api.version>
<swagger.version>1.9.3</swagger.version>
<springfox.version>2.9.2</springfox.version>
<guava-retry.version>2.0.0</guava-retry.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<!-- quartz-->
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
<version>${quartz.version}</version>
</dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz-jobs</artifactId>
<version>${quartz.version}</version>
</dependency>
<dependency>
<groupId>com.cronutils</groupId>
<artifactId>cron-utils</artifactId>
<version>${cron.utils.version}</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>${druid.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>${spring.boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-server</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-plugin-api</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-dao</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-api</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-remote</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-service</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-alert</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-framework</artifactId>
<version>${curator.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<version>${curator.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<artifactId>netty</artifactId>
<groupId>io.netty</groupId>
</exclusion>
<exclusion>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-annotations</artifactId>
</exclusion>
</exclusions>
<version>${zookeeper.version}</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>${commons.codec.version}</version>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>${commons.logging.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>${httpclient.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>${httpcore.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>${mockito.version}</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<version>${powermock.version}</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito2</artifactId>
<version>${powermock.version}</version>
<type>jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.connector.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>${h2.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.api.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.log4j12.version}</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>${commons.collections.version}</version>
</dependency>
<dependency>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
<version>${commons.httpclient}</version>
</dependency>
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>${commons.beanutils.version}</version>
</dependency>
<dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
<version>${commons.configuration.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-email</artifactId>
<version>${commons.email.version}</version>
</dependency>
<!--excel poi-->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${poi.version}</version>
</dependency>
<!-- hadoop -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>com.sun.jersey</artifactId>
<groupId>jersey-json</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-aws</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
<version>${commons.collections4.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>${postgresql.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.jdbc.version}</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons.io.version}</version>
</dependency>
<dependency>
<groupId>com.github.oshi</groupId>
<artifactId>oshi-core</artifactId>
<version>${oshi.core.version}</version>
</dependency>
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<version>${clickhouse.jdbc.version}</version>
</dependency>
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
<version>${mssql.jdbc.version}</version>
</dependency>
<dependency>
<groupId>net.jcip</groupId>
<artifactId>jcip-annotations</artifactId>
<version>${jcip.version}</version>
<optional>true</optional>
</dependency>
<!-- for api module -->
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jsp-2.1</artifactId>
<version>${jsp-2.1.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<version>${servlet-api.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>${javax.servlet.api.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<version>${springfox.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
<version>${springfox.version}</version>
</dependency>
<dependency>
<groupId>com.github.xiaoymin</groupId>
<artifactId>swagger-bootstrap-ui</artifactId>
<version>${swagger.version}</version>
</dependency>
<dependency>
<groupId>com.github.rholder</groupId>
<artifactId>guava-retrying</artifactId>
<version>${guava-retry.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<finalName>apache-dolphinscheduler-incubating-${project.version}</finalName>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>rpm-maven-plugin</artifactId>
<version>${rpm-maven-plugion.version}</version>
<inherited>false</inherited>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<testSource>${java.version}</testSource>
<testTarget>${java.version}</testTarget>
</configuration>
<version>${maven-compiler-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>${maven-release-plugin.version}</version>
<configuration>
<tagNameFormat>@{project.version}</tagNameFormat>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>${maven-assembly-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<configuration>
<source>8</source>
<failOnError>false</failOnError>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>${maven-source-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<version>${maven-dependency-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<phase>verify</phase>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
<configuration>
<aggregate>true</aggregate>
<charset>${project.build.sourceEncoding}</charset>
<encoding>${project.build.sourceEncoding}</encoding>
<docencoding>${project.build.sourceEncoding}</docencoding>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>${maven-release-plugin.version}</version>
<configuration>
<autoVersionSubmodules>true</autoVersionSubmodules>
<tagNameFormat>@{project.version}</tagNameFormat>
<tagBase>${project.version}</tagBase>
<!--<goals>-f pom.xml deploy</goals>-->
</configuration>
<dependencies>
<dependency>
<groupId>org.apache.maven.scm</groupId>
<artifactId>maven-scm-provider-jgit</artifactId>
<version>1.9.5</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<encoding>${project.build.sourceEncoding}</encoding>
<skip>false</skip><!--not skip compile test classes-->
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<includes>
<include>**/alert/utils/DingTalkUtilsTest.java</include>
<include>**/alert/template/AlertTemplateFactoryTest.java</include>
<include>**/alert/template/impl/DefaultHTMLTemplateTest.java</include>
<include>**/alert/utils/EnterpriseWeChatUtilsTest.java</include>
<include>**/alert/utils/ExcelUtilsTest.java</include>
<include>**/alert/utils/FuncUtilsTest.java</include>
<include>**/alert/utils/JSONUtilsTest.java</include>
<!--<include>**/alert/utils/MailUtilsTest.java</include>-->
<include>**/alert/plugin/EmailAlertPluginTest.java</include>
<include>**/api/dto/resources/filter/ResourceFilterTest.java</include>
<include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include>
<includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest>
<include>**/api/enums/StatusTest.java</include>
<include>**/api/exceptions/ApiExceptionHandlerTest.java</include>
<include>**/api/exceptions/ServiceExceptionTest.java</include>
<include>**/api/interceptor/LoginHandlerInterceptorTest.java</include>
<include>**/api/security/PasswordAuthenticatorTest.java</include>
<include>**/api/security/SecurityConfigTest.java</include>
<include>**/api/service/AccessTokenServiceTest.java</include>
<include>**/api/service/AlertGroupServiceTest.java</include>
<include>**/api/service/BaseDAGServiceTest.java</include>
<include>**/api/service/BaseServiceTest.java</include>
<include>**/api/service/DataAnalysisServiceTest.java</include>
<!--<include>**/api/service/DataSourceServiceTest.java</include>-->
<include>**/api/service/ExecutorService2Test.java</include>
<include>**/api/service/ExecutorServiceTest.java</include>
<include>**/api/service/LoggerServiceTest.java</include>
<include>**/api/service/MonitorServiceTest.java</include>
<include>**/api/service/ProcessDefinitionServiceTest.java</include>
<include>**/api/service/ProcessInstanceServiceTest.java</include>
<include>**/api/service/ProjectServiceTest.java</include>
<include>**/api/service/QueueServiceTest.java</include>
<include>**/api/service/ResourcesServiceTest.java</include>
<include>**/api/service/SchedulerServiceTest.java</include>
<include>**/api/service/SessionServiceTest.java</include>
<include>**/api/service/TaskInstanceServiceTest.java</include>
<include>**/api/service/TenantServiceTest.java</include>
<include>**/api/service/UdfFuncServiceTest.java</include>
<include>**/api/service/UserAlertGroupServiceTest.java</include>
<include>**/api/service/UsersServiceTest.java</include>
<include>**/api/service/WorkerGroupServiceTest.java</include>
<include>**/api/controller/ProcessDefinitionControllerTest.java</include>
<include>**/api/utils/exportprocess/DataSourceParamTest.java</include>
<include>**/api/utils/exportprocess/DependentParamTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/FileUtilsTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/ResultTest.java</include>
<include>**/common/graph/DAGTest.java</include>
<include>**/common/os/OshiTest.java</include>
<include>**/common/os/OSUtilsTest.java</include>
<include>**/common/shell/ShellExecutorTest.java</include>
<include>**/common/task/EntityTestUtils.java</include>
<include>**/common/task/FlinkParametersTest.java</include>
<include>**/common/task/HttpParametersTest.java</include>
<include>**/common/task/SqoopParameterEntityTest.java</include>
<include>**/common/threadutils/ThreadPoolExecutorsTest.java</include>
<include>**/common/threadutils/ThreadUtilsTest.java</include>
<include>**/common/utils/process/ProcessBuilderForWin32Test.java</include>
<include>**/common/utils/process/ProcessEnvironmentForWin32Test.java</include>
<include>**/common/utils/process/ProcessImplForWin32Test.java</include>
<include>**/common/utils/CollectionUtilsTest.java</include>
<include>**/common/utils/CommonUtilsTest.java</include>
<include>**/common/utils/DateUtilsTest.java</include>
<include>**/common/utils/DependentUtilsTest.java</include>
<include>**/common/utils/EncryptionUtilsTest.java</include>
<include>**/common/utils/FileUtilsTest.java</include>
<include>**/common/utils/IpUtilsTest.java</include>
<include>**/common/utils/JSONUtilsTest.java</include>
<include>**/common/utils/LoggerUtilsTest.java</include>
<include>**/common/utils/OSUtilsTest.java</include>
<include>**/common/utils/ParameterUtilsTest.java</include>
<include>**/common/utils/PreconditionsTest.java</include>
<include>**/common/utils/PropertyUtilsTest.java</include>
<include>**/common/utils/SchemaUtilsTest.java</include>
<include>**/common/utils/ScriptRunnerTest.java</include>
<include>**/common/utils/SensitiveLogUtilsTest.java</include>
<include>**/common/utils/StringTest.java</include>
<include>**/common/utils/StringUtilsTest.java</include>
<include>**/common/utils/TaskParametersUtilsTest.java</include>
<include>**/common/utils/HadoopUtilsTest.java</include>
<include>**/common/utils/HttpUtilsTest.java</include>
<include>**/common/ConstantsTest.java</include>
<include>**/common/utils/HadoopUtils.java</include>
<include>**/common/utils/RetryerUtilsTest.java</include>
<include>**/common/plugin/FilePluginManagerTest</include>
<include>**/common/plugin/PluginClassLoaderTest</include>
<include>**/dao/mapper/AccessTokenMapperTest.java</include>
<include>**/dao/mapper/AlertGroupMapperTest.java</include>
<include>**/dao/mapper/CommandMapperTest.java</include>
<include>**/dao/mapper/ConnectionFactoryTest.java</include>
<include>**/dao/mapper/DataSourceMapperTest.java</include>
<include>**/dao/entity/UdfFuncTest.java</include>
<include>**/remote/JsonSerializerTest.java</include>
<include>**/remote/RemoveTaskLogResponseCommandTest.java</include>
<include>**/remote/RemoveTaskLogRequestCommandTest.java</include>
<!--<include>**/remote/NettyRemotingClientTest.java</include>-->
<include>**/remote/ResponseFutureTest.java</include>
<!--<include>**/server/log/LoggerServerTest.java</include>-->
<include>**/server/entity/SQLTaskExecutionContextTest.java</include>
<include>**/server/log/MasterLogFilterTest.java</include>
<include>**/server/log/SensitiveDataConverterTest.java</include>
<!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>-->
<include>**/server/log/TaskLogFilterTest.java</include>
<include>**/server/log/WorkerLogFilterTest.java</include>
<!--<include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include>-->
<include>**/server/master/runner/MasterTaskExecThreadTest.java</include>
<!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>-->
<include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include>
<include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include>
<include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include>
<!--<include>**/server/master/register/MasterRegistryTest.java</include>-->
<include>**/server/master/AlertManagerTest.java</include>
<include>**/server/master/MasterCommandTest.java</include>
<include>**/server/master/DependentTaskTest.java</include>
<!--<include>**/server/master/ConditionsTaskTest.java</include>-->
<include>**/server/master/MasterExecThreadTest.java</include>
<include>**/server/master/ParamsTest.java</include>
<!--<include>**/server/register/ZookeeperNodeManagerTest.java</include>-->
<include>**/server/utils/DataxUtilsTest.java</include>
<include>**/server/utils/ExecutionContextTestUtils.java</include>
<!--<include>**/server/utils/FlinkArgsUtilsTest.java</include>-->
<include>**/server/utils/ParamUtilsTest.java</include>
<include>**/server/utils/ProcessUtilsTest.java</include>
<include>**/server/utils/SparkArgsUtilsTest.java</include>
<!--<include>**/server/worker/processor/TaskCallbackServiceTest.java</include>-->
<include>**/server/worker/registry/WorkerRegistryTest.java</include>
<include>**/server/worker/shell/ShellCommandExecutorTest.java</include>
<include>**/server/worker/sql/SqlExecutorTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<include>**/server/worker/task/EnvFileTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<!--<include>**/server/worker/task/datax/DataxTaskTest.java</include>-->
<!--<include>**/server/worker/task/http/HttpTaskTest.java</include>-->
<include>**/server/worker/task/sqoop/SqoopTaskTest.java</include>
<include>**/server/worker/EnvFileTest.java</include>
<include>**/service/quartz/cron/CronUtilsTest.java</include>
<include>**/service/zk/DefaultEnsembleProviderTest.java</include>
<include>**/service/zk/ZKServerTest.java</include>
<include>**/service/zk/CuratorZookeeperClientTest.java</include>
<include>**/service/queue/TaskUpdateQueueTest.java</include>
<include>**/dao/mapper/DataSourceUserMapperTest.java</include>
<!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>-->
<include>**/dao/mapper/ProcessDefinitionMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapperTest.java</include>
<include>**/dao/mapper/ProjectMapperTest.java</include>
<include>**/dao/mapper/ProjectUserMapperTest.java</include>
<include>**/dao/mapper/QueueMapperTest.java</include>
<include>**/dao/mapper/ResourceUserMapperTest.java</include>
<include>**/dao/mapper/ScheduleMapperTest.java</include>
<include>**/dao/mapper/SessionMapperTest.java</include>
<include>**/dao/mapper/TaskInstanceMapperTest.java</include>
<include>**/dao/mapper/TenantMapperTest.java</include>
<include>**/dao/mapper/UdfFuncMapperTest.java</include>
<include>**/dao/mapper/UDFUserMapperTest.java</include>
<include>**/dao/mapper/UserAlertGroupMapperTest.java</include>
<include>**/dao/mapper/UserMapperTest.java</include>
<include>**/dao/utils/DagHelperTest.java</include>
<include>**/dao/AlertDaoTest.java</include>
<include>**/dao/datasource/OracleDataSourceTest.java</include>
<include>**/dao/datasource/HiveDataSourceTest.java</include>
<include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include>
<include>**/dao/upgrade/WokrerGrouopDaoTest.java</include>
<include>**/dao/upgrade/UpgradeDaoTest.java</include>
<include>**/plugin/model/AlertDataTest.java</include>
<include>**/plugin/model/AlertInfoTest.java</include>
<include>**/plugin/utils/PropertyUtilsTest.java</include>
</includes>
<!-- <skip>true</skip> -->
</configuration>
</plugin>
<!-- jenkins plugin jacoco report-->
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>${jacoco.version}</version>
<configuration>
<destFile>target/jacoco.exec</destFile>
<dataFile>target/jacoco.exec</dataFile>
</configuration>
<executions>
<execution>
<id>jacoco-initialize</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>jacoco-site</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<version>${apache.rat.version}</version>
<configuration>
<excludeSubProjects>false</excludeSubProjects>
<addDefaultLicenseMatchers>false</addDefaultLicenseMatchers>
<licenses>
<license implementation="org.apache.rat.analysis.license.SimplePatternBasedLicense">
<licenseFamilyCategory>AL20</licenseFamilyCategory>
<licenseFamilyName>Apache License, 2.0</licenseFamilyName>
<patterns>
<pattern>Licensed to the Apache Software Foundation (ASF)</pattern>
</patterns>
</license>
</licenses>
<licenseFamilies>
<licenseFamily implementation="org.apache.rat.license.SimpleLicenseFamily">
<familyName>Apache License, 2.0</familyName>
</licenseFamily>
</licenseFamilies>
<excludes>
<exclude>**/node_modules/**</exclude>
<exclude>**/node/**</exclude>
<exclude>**/dist/**</exclude>
<exclude>**/licenses/**</exclude>
<exclude>.github/**</exclude>
<exclude>**/sql/soft_version</exclude>
<exclude>**/common/utils/ScriptRunner.java</exclude>
<exclude>**/*.json</exclude>
<!-- document files -->
<exclude>**/*.md</exclude>
<excldue>**/*.MD</excldue>
<exclude>**/*.txt</exclude>
<exclude>**/docs/**</exclude>
<exclude>**/*.babelrc</exclude>
<exclude>**/*.eslintrc</exclude>
<exclude>**/.mvn/jvm.config</exclude>
<exclude>**/.mvn/wrapper/**</exclude>
</excludes>
<consoleOutput>true</consoleOutput>
</configuration>
</plugin>
<plugin>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-maven-plugin</artifactId>
<version>${spotbugs.version}</version>
<configuration>
<xmlOutput>true</xmlOutput>
<threshold>medium</threshold>
<effort>default</effort>
<excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile>
<failOnError>true</failOnError>
</configuration>
<dependencies>
<dependency>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs</artifactId>
<version>4.0.0-beta4</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>${checkstyle.version}</version>
<dependencies>
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>8.18</version>
</dependency>
</dependencies>
<configuration>
<consoleOutput>true</consoleOutput>
<encoding>UTF-8</encoding>
<configLocation>style/checkstyle.xml</configLocation>
<suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation>
<suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
<failOnViolation>true</failOnViolation>
<violationSeverity>warning</violationSeverity>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<sourceDirectories>
<sourceDirectory>${project.build.sourceDirectory}</sourceDirectory>
</sourceDirectories>
<excludes>**\/generated-sources\/</excludes>
<skip>true</skip>
</configuration>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>cobertura-maven-plugin</artifactId>
<version>${cobertura-maven-plugin.version}</version>
<configuration>
<check>
</check>
<aggregate>true</aggregate>
<outputDirectory>./target/cobertura</outputDirectory>
<encoding>${project.build.sourceEncoding}</encoding>
<quiet>true</quiet>
<format>xml</format>
<instrumentation>
<ignoreTrivial>true</ignoreTrivial>
</instrumentation>
</configuration>
</plugin>
</plugins>
</build>
<modules>
<module>dolphinscheduler-ui</module>
<module>dolphinscheduler-server</module>
<module>dolphinscheduler-common</module>
<module>dolphinscheduler-api</module>
<module>dolphinscheduler-dao</module>
<module>dolphinscheduler-alert</module>
<module>dolphinscheduler-dist</module>
<module>dolphinscheduler-remote</module>
<module>dolphinscheduler-service</module>
<module>dolphinscheduler-plugin-api</module>
<module>dolphinscheduler-microbench</module>
</modules>
</project>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,364 | After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar | *For better global communication, please give priority to using English description, thx! *
*Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.*
**Describe the question**
1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound
即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理???
又或者是1.3.1没有兼容1.2.0的任务????
导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。
**Which version of DolphinScheduler:**
-[1.3.1-preview]
**Additional context**
**Requirement or improvement**
- Please describe about your requirements or improvement suggestions.
| https://github.com/apache/dolphinscheduler/issues/3364 | https://github.com/apache/dolphinscheduler/pull/3378 | ff99ef383f647627dc13cfb71f5dd3c9b26ea76c | 3744167d52a52892b5ff98d3b07ab250970fdb5d | "2020-07-31T03:56:02Z" | java | "2020-08-03T02:31:27Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/process/ResourceInfo.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.process;
/**
* resource info
*/
public class ResourceInfo {
/**
* res the name of the resource that was uploaded
*/
private int id;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
private String res;
public String getRes() {
return res;
}
public void setRes(String res) {
this.res = res;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,364 | After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar | *For better global communication, please give priority to using English description, thx! *
*Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.*
**Describe the question**
1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound
即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理???
又或者是1.3.1没有兼容1.2.0的任务????
导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。
**Which version of DolphinScheduler:**
-[1.3.1-preview]
**Additional context**
**Requirement or improvement**
- Please describe about your requirements or improvement suggestions.
| https://github.com/apache/dolphinscheduler/issues/3364 | https://github.com/apache/dolphinscheduler/pull/3378 | ff99ef383f647627dc13cfb71f5dd3c9b26ea76c | 3744167d52a52892b5ff98d3b07ab250970fdb5d | "2020-07-31T03:56:02Z" | java | "2020-08-03T02:31:27Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.master.consumer;
import com.alibaba.fastjson.JSONObject;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.task.datax.DataxParameters;
import org.apache.dolphinscheduler.common.task.procedure.ProcedureParameters;
import org.apache.dolphinscheduler.common.task.sql.SqlParameters;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.common.task.sqoop.sources.SourceMysqlParameter;
import org.apache.dolphinscheduler.common.task.sqoop.targets.TargetMysqlParameter;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.server.builder.TaskExecutionContextBuilder;
import org.apache.dolphinscheduler.server.entity.*;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.master.dispatch.ExecutorDispatcher;
import org.apache.dolphinscheduler.server.master.dispatch.context.ExecutionContext;
import org.apache.dolphinscheduler.server.master.dispatch.enums.ExecutorType;
import org.apache.dolphinscheduler.server.master.dispatch.exceptions.ExecuteException;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* TaskUpdateQueue consumer
*/
@Component
public class TaskPriorityQueueConsumer extends Thread{
/**
* logger of TaskUpdateQueueConsumer
*/
private static final Logger logger = LoggerFactory.getLogger(TaskPriorityQueueConsumer.class);
/**
* taskUpdateQueue
*/
@Autowired
private TaskPriorityQueue taskPriorityQueue;
/**
* processService
*/
@Autowired
private ProcessService processService;
/**
* executor dispatcher
*/
@Autowired
private ExecutorDispatcher dispatcher;
/**
* master config
*/
@Autowired
private MasterConfig masterConfig;
@PostConstruct
public void init(){
super.setName("TaskUpdateQueueConsumerThread");
super.start();
}
@Override
public void run() {
List<String> failedDispatchTasks = new ArrayList<>();
while (Stopper.isRunning()){
try {
int fetchTaskNum = masterConfig.getMasterDispatchTaskNumber();
failedDispatchTasks.clear();
for(int i = 0; i < fetchTaskNum; i++){
if(taskPriorityQueue.size() <= 0){
Thread.sleep(Constants.SLEEP_TIME_MILLIS);
continue;
}
// if not task , blocking here
String taskPriorityInfo = taskPriorityQueue.take();
TaskPriority taskPriority = TaskPriority.of(taskPriorityInfo);
boolean dispatchResult = dispatch(taskPriority.getTaskId());
if(!dispatchResult){
failedDispatchTasks.add(taskPriorityInfo);
}
}
for(String dispatchFailedTask : failedDispatchTasks){
taskPriorityQueue.put(dispatchFailedTask);
}
}catch (Exception e){
logger.error("dispatcher task error",e);
}
}
}
/**
* dispatch task
*
* @param taskInstanceId taskInstanceId
* @return result
*/
private boolean dispatch(int taskInstanceId){
boolean result = false;
try {
TaskExecutionContext context = getTaskExecutionContext(taskInstanceId);
ExecutionContext executionContext = new ExecutionContext(context.toCommand(), ExecutorType.WORKER, context.getWorkerGroup());
if (taskInstanceIsFinalState(taskInstanceId)){
// when task finish, ignore this task, there is no need to dispatch anymore
return true;
}else{
result = dispatcher.dispatch(executionContext);
}
} catch (ExecuteException e) {
logger.error("dispatch error",e);
}
return result;
}
/**
* taskInstance is final state
* success,failure,kill,stop,pause,threadwaiting is final state
* @param taskInstanceId taskInstanceId
* @return taskInstance is final state
*/
public Boolean taskInstanceIsFinalState(int taskInstanceId){
TaskInstance taskInstance = processService.findTaskInstanceById(taskInstanceId);
return taskInstance.getState().typeIsFinished();
}
/**
* get TaskExecutionContext
* @param taskInstanceId taskInstanceId
* @return TaskExecutionContext
*/
protected TaskExecutionContext getTaskExecutionContext(int taskInstanceId){
TaskInstance taskInstance = processService.getTaskInstanceDetailByTaskId(taskInstanceId);
// task type
TaskType taskType = TaskType.valueOf(taskInstance.getTaskType());
// task node
TaskNode taskNode = JSONObject.parseObject(taskInstance.getTaskJson(), TaskNode.class);
Integer userId = taskInstance.getProcessDefine() == null ? 0 : taskInstance.getProcessDefine().getUserId();
Tenant tenant = processService.getTenantForProcess(taskInstance.getProcessInstance().getTenantId(), userId);
// verify tenant is null
if (verifyTenantIsNull(tenant, taskInstance)) {
processService.changeTaskState(ExecutionStatus.FAILURE,
taskInstance.getStartTime(),
taskInstance.getHost(),
null,
null,
taskInstance.getId());
return null;
}
// set queue for process instance, user-specified queue takes precedence over tenant queue
String userQueue = processService.queryUserQueueByProcessInstanceId(taskInstance.getProcessInstanceId());
taskInstance.getProcessInstance().setQueue(StringUtils.isEmpty(userQueue) ? tenant.getQueue() : userQueue);
taskInstance.getProcessInstance().setTenantCode(tenant.getTenantCode());
taskInstance.setExecutePath(getExecLocalPath(taskInstance));
taskInstance.setResources(getResourceFullNames(taskNode));
SQLTaskExecutionContext sqlTaskExecutionContext = new SQLTaskExecutionContext();
DataxTaskExecutionContext dataxTaskExecutionContext = new DataxTaskExecutionContext();
ProcedureTaskExecutionContext procedureTaskExecutionContext = new ProcedureTaskExecutionContext();
SqoopTaskExecutionContext sqoopTaskExecutionContext = new SqoopTaskExecutionContext();
// SQL task
if (taskType == TaskType.SQL){
setSQLTaskRelation(sqlTaskExecutionContext, taskNode);
}
// DATAX task
if (taskType == TaskType.DATAX){
setDataxTaskRelation(dataxTaskExecutionContext, taskNode);
}
// procedure task
if (taskType == TaskType.PROCEDURE){
setProcedureTaskRelation(procedureTaskExecutionContext, taskNode);
}
if (taskType == TaskType.SQOOP){
setSqoopTaskRelation(sqoopTaskExecutionContext,taskNode);
}
return TaskExecutionContextBuilder.get()
.buildTaskInstanceRelatedInfo(taskInstance)
.buildProcessInstanceRelatedInfo(taskInstance.getProcessInstance())
.buildProcessDefinitionRelatedInfo(taskInstance.getProcessDefine())
.buildSQLTaskRelatedInfo(sqlTaskExecutionContext)
.buildDataxTaskRelatedInfo(dataxTaskExecutionContext)
.buildProcedureTaskRelatedInfo(procedureTaskExecutionContext)
.buildSqoopTaskRelatedInfo(sqoopTaskExecutionContext)
.create();
}
/**
* set procedure task relation
* @param procedureTaskExecutionContext procedureTaskExecutionContext
* @param taskNode taskNode
*/
private void setProcedureTaskRelation(ProcedureTaskExecutionContext procedureTaskExecutionContext, TaskNode taskNode) {
ProcedureParameters procedureParameters = JSONObject.parseObject(taskNode.getParams(), ProcedureParameters.class);
int datasourceId = procedureParameters.getDatasource();
DataSource datasource = processService.findDataSourceById(datasourceId);
procedureTaskExecutionContext.setConnectionParams(datasource.getConnectionParams());
}
/**
* set datax task relation
* @param dataxTaskExecutionContext dataxTaskExecutionContext
* @param taskNode taskNode
*/
private void setDataxTaskRelation(DataxTaskExecutionContext dataxTaskExecutionContext, TaskNode taskNode) {
DataxParameters dataxParameters = JSONObject.parseObject(taskNode.getParams(), DataxParameters.class);
DataSource dataSource = processService.findDataSourceById(dataxParameters.getDataSource());
DataSource dataTarget = processService.findDataSourceById(dataxParameters.getDataTarget());
if (dataSource != null){
dataxTaskExecutionContext.setDataSourceId(dataxParameters.getDataSource());
dataxTaskExecutionContext.setSourcetype(dataSource.getType().getCode());
dataxTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams());
}
if (dataTarget != null){
dataxTaskExecutionContext.setDataTargetId(dataxParameters.getDataTarget());
dataxTaskExecutionContext.setTargetType(dataTarget.getType().getCode());
dataxTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams());
}
}
/**
* set datax task relation
* @param sqoopTaskExecutionContext sqoopTaskExecutionContext
* @param taskNode taskNode
*/
private void setSqoopTaskRelation(SqoopTaskExecutionContext sqoopTaskExecutionContext, TaskNode taskNode) {
SqoopParameters sqoopParameters = JSONObject.parseObject(taskNode.getParams(), SqoopParameters.class);
SourceMysqlParameter sourceMysqlParameter = JSONUtils.parseObject(sqoopParameters.getSourceParams(), SourceMysqlParameter.class);
TargetMysqlParameter targetMysqlParameter = JSONUtils.parseObject(sqoopParameters.getTargetParams(), TargetMysqlParameter.class);
DataSource dataSource = processService.findDataSourceById(sourceMysqlParameter.getSrcDatasource());
DataSource dataTarget = processService.findDataSourceById(targetMysqlParameter.getTargetDatasource());
if (dataSource != null){
sqoopTaskExecutionContext.setDataSourceId(dataSource.getId());
sqoopTaskExecutionContext.setSourcetype(dataSource.getType().getCode());
sqoopTaskExecutionContext.setSourceConnectionParams(dataSource.getConnectionParams());
}
if (dataTarget != null){
sqoopTaskExecutionContext.setDataTargetId(dataTarget.getId());
sqoopTaskExecutionContext.setTargetType(dataTarget.getType().getCode());
sqoopTaskExecutionContext.setTargetConnectionParams(dataTarget.getConnectionParams());
}
}
/**
* set SQL task relation
* @param sqlTaskExecutionContext sqlTaskExecutionContext
* @param taskNode taskNode
*/
private void setSQLTaskRelation(SQLTaskExecutionContext sqlTaskExecutionContext, TaskNode taskNode) {
SqlParameters sqlParameters = JSONObject.parseObject(taskNode.getParams(), SqlParameters.class);
int datasourceId = sqlParameters.getDatasource();
DataSource datasource = processService.findDataSourceById(datasourceId);
sqlTaskExecutionContext.setConnectionParams(datasource.getConnectionParams());
// whether udf type
boolean udfTypeFlag = EnumUtils.isValidEnum(UdfType.class, sqlParameters.getType())
&& StringUtils.isNotEmpty(sqlParameters.getUdfs());
if (udfTypeFlag){
String[] udfFunIds = sqlParameters.getUdfs().split(",");
int[] udfFunIdsArray = new int[udfFunIds.length];
for(int i = 0 ; i < udfFunIds.length;i++){
udfFunIdsArray[i]=Integer.parseInt(udfFunIds[i]);
}
List<UdfFunc> udfFuncList = processService.queryUdfFunListByids(udfFunIdsArray);
Map<UdfFunc,String> udfFuncMap = new HashMap<>();
for(UdfFunc udfFunc : udfFuncList) {
String tenantCode = processService.queryTenantCodeByResName(udfFunc.getResourceName(), ResourceType.UDF);
udfFuncMap.put(udfFunc,tenantCode);
}
sqlTaskExecutionContext.setUdfFuncTenantCodeMap(udfFuncMap);
}
}
/**
* get execute local path
*
* @return execute local path
*/
private String getExecLocalPath(TaskInstance taskInstance){
return FileUtils.getProcessExecDir(taskInstance.getProcessDefine().getProjectId(),
taskInstance.getProcessDefine().getId(),
taskInstance.getProcessInstance().getId(),
taskInstance.getId());
}
/**
* whehter tenant is null
* @param tenant tenant
* @param taskInstance taskInstance
* @return result
*/
private boolean verifyTenantIsNull(Tenant tenant, TaskInstance taskInstance) {
if(tenant == null){
logger.error("tenant not exists,process instance id : {},task instance id : {}",
taskInstance.getProcessInstance().getId(),
taskInstance.getId());
return true;
}
return false;
}
/**
* get resource map key is full name and value is tenantCode
*/
private Map<String,String> getResourceFullNames(TaskNode taskNode) {
Map<String,String> resourceMap = new HashMap<>();
AbstractParameters baseParam = TaskParametersUtils.getParameters(taskNode.getType(), taskNode.getParams());
if (baseParam != null) {
List<ResourceInfo> projectResourceFiles = baseParam.getResourceFilesList();
if (CollectionUtils.isNotEmpty(projectResourceFiles)) {
// filter the resources that the resource id equals 0
Set<ResourceInfo> oldVersionResources = projectResourceFiles.stream().filter(t -> t.getId() == 0).collect(Collectors.toSet());
if (CollectionUtils.isNotEmpty(oldVersionResources)) {
oldVersionResources.forEach(
(t)->resourceMap.put(t.getRes(), processService.queryTenantCodeByResName(t.getRes(), ResourceType.FILE))
);
}
// get the resource id in order to get the resource names in batch
Stream<Integer> resourceIdStream = projectResourceFiles.stream().map(resourceInfo -> resourceInfo.getId());
Set<Integer> resourceIdsSet = resourceIdStream.collect(Collectors.toSet());
if (CollectionUtils.isNotEmpty(resourceIdsSet)) {
Integer[] resourceIds = resourceIdsSet.toArray(new Integer[resourceIdsSet.size()]);
List<Resource> resources = processService.listResourceByIds(resourceIds);
resources.forEach(
(t)->resourceMap.put(t.getFullName(),processService.queryTenantCodeByResName(t.getFullName(), ResourceType.FILE))
);
}
}
}
return resourceMap;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,364 | After update the version from 1.2.0 to 1.3.1,running the spark task fail because the can't find the jar | *For better global communication, please give priority to using English description, thx! *
*Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.*
**Describe the question**
1.2.0升级1.3.1 由于任务Json格式变更导致Spark任务找不到Jar包 任务报错 ClassNotFound
即便应用了升级Mysql脚本 Json格式也没有变成1.3可以识别的Json格式 升级脚本是否没有对Spark任务 做升级变更处理???
又或者是1.3.1没有兼容1.2.0的任务????
导致我必须 把所有Spark类型的任务 都要重新编辑一下 保存 刷新成1.3.1的Json格式才可以执行。。。
**Which version of DolphinScheduler:**
-[1.3.1-preview]
**Additional context**
**Requirement or improvement**
- Please describe about your requirements or improvement suggestions.
| https://github.com/apache/dolphinscheduler/issues/3364 | https://github.com/apache/dolphinscheduler/pull/3378 | ff99ef383f647627dc13cfb71f5dd3c9b26ea76c | 3744167d52a52892b5ff98d3b07ab250970fdb5d | "2020-07-31T03:56:02Z" | java | "2020-08-03T02:31:27Z" | dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.process;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.cronutils.model.Cron;
import org.apache.commons.lang.ArrayUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*;
import org.apache.dolphinscheduler.common.model.DateInterval;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.service.log.LogClientService;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import java.io.File;
import java.util.*;
import java.util.stream.Collectors;
import static java.util.stream.Collectors.toSet;
import static org.apache.dolphinscheduler.common.Constants.*;
/**
* process relative dao that some mappers in this.
*/
@Component
public class ProcessService {
private final Logger logger = LoggerFactory.getLogger(getClass());
private final int[] stateArray = new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(),
ExecutionStatus.RUNNING_EXEUTION.ordinal(),
ExecutionStatus.READY_PAUSE.ordinal(),
ExecutionStatus.READY_STOP.ordinal()};
@Autowired
private UserMapper userMapper;
@Autowired
private ProcessDefinitionMapper processDefineMapper;
@Autowired
private ProcessInstanceMapper processInstanceMapper;
@Autowired
private DataSourceMapper dataSourceMapper;
@Autowired
private ProcessInstanceMapMapper processInstanceMapMapper;
@Autowired
private TaskInstanceMapper taskInstanceMapper;
@Autowired
private CommandMapper commandMapper;
@Autowired
private ScheduleMapper scheduleMapper;
@Autowired
private UdfFuncMapper udfFuncMapper;
@Autowired
private ResourceMapper resourceMapper;
@Autowired
private ErrorCommandMapper errorCommandMapper;
@Autowired
private TenantMapper tenantMapper;
@Autowired
private ProjectMapper projectMapper;
/**
* handle Command (construct ProcessInstance from Command) , wrapped in transaction
* @param logger logger
* @param host host
* @param validThreadNum validThreadNum
* @param command found command
* @return process instance
*/
@Transactional(rollbackFor = Exception.class)
public ProcessInstance handleCommand(Logger logger, String host, int validThreadNum, Command command) {
ProcessInstance processInstance = constructProcessInstance(command, host);
//cannot construct process instance, return null;
if(processInstance == null){
logger.error("scan command, command parameter is error: {}", command);
moveToErrorCommand(command, "process instance is null");
return null;
}
if(!checkThreadNum(command, validThreadNum)){
logger.info("there is not enough thread for this command: {}", command);
return setWaitingThreadProcess(command, processInstance);
}
processInstance.setCommandType(command.getCommandType());
processInstance.addHistoryCmd(command.getCommandType());
saveProcessInstance(processInstance);
this.setSubProcessParam(processInstance);
delCommandByid(command.getId());
return processInstance;
}
/**
* save error command, and delete original command
* @param command command
* @param message message
*/
@Transactional(rollbackFor = Exception.class)
public void moveToErrorCommand(Command command, String message) {
ErrorCommand errorCommand = new ErrorCommand(command, message);
this.errorCommandMapper.insert(errorCommand);
delCommandByid(command.getId());
}
/**
* set process waiting thread
* @param command command
* @param processInstance processInstance
* @return process instance
*/
private ProcessInstance setWaitingThreadProcess(Command command, ProcessInstance processInstance) {
processInstance.setState(ExecutionStatus.WAITTING_THREAD);
if(command.getCommandType() != CommandType.RECOVER_WAITTING_THREAD){
processInstance.addHistoryCmd(command.getCommandType());
}
saveProcessInstance(processInstance);
this.setSubProcessParam(processInstance);
createRecoveryWaitingThreadCommand(command, processInstance);
return null;
}
/**
* check thread num
* @param command command
* @param validThreadNum validThreadNum
* @return if thread is enough
*/
private boolean checkThreadNum(Command command, int validThreadNum) {
int commandThreadCount = this.workProcessThreadNumCount(command.getProcessDefinitionId());
return validThreadNum >= commandThreadCount;
}
/**
* insert one command
* @param command command
* @return create result
*/
public int createCommand(Command command) {
int result = 0;
if (command != null){
result = commandMapper.insert(command);
}
return result;
}
/**
* find one command from queue list
* @return command
*/
public Command findOneCommand(){
return commandMapper.getOneToRun();
}
/**
* check the input command exists in queue list
* @param command command
* @return create command result
*/
public Boolean verifyIsNeedCreateCommand(Command command){
Boolean isNeedCreate = true;
Map<CommandType,Integer> cmdTypeMap = new HashMap<CommandType,Integer>();
cmdTypeMap.put(CommandType.REPEAT_RUNNING,1);
cmdTypeMap.put(CommandType.RECOVER_SUSPENDED_PROCESS,1);
cmdTypeMap.put(CommandType.START_FAILURE_TASK_PROCESS,1);
CommandType commandType = command.getCommandType();
if(cmdTypeMap.containsKey(commandType)){
JSONObject cmdParamObj = (JSONObject) JSON.parse(command.getCommandParam());
JSONObject tempObj;
int processInstanceId = cmdParamObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING);
List<Command> commands = commandMapper.selectList(null);
// for all commands
for (Command tmpCommand:commands){
if(cmdTypeMap.containsKey(tmpCommand.getCommandType())){
tempObj = (JSONObject) JSON.parse(tmpCommand.getCommandParam());
if(tempObj != null && processInstanceId == tempObj.getInteger(CMDPARAM_RECOVER_PROCESS_ID_STRING)){
isNeedCreate = false;
break;
}
}
}
}
return isNeedCreate;
}
/**
* find process instance detail by id
* @param processId processId
* @return process instance
*/
public ProcessInstance findProcessInstanceDetailById(int processId){
return processInstanceMapper.queryDetailById(processId);
}
/**
* get task node list by definitionId
* @param defineId
* @return
*/
public List<TaskNode> getTaskNodeListByDefinitionId(Integer defineId){
ProcessDefinition processDefinition = processDefineMapper.selectById(defineId);
if (processDefinition == null) {
logger.info("process define not exists");
return null;
}
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
//process data check
if (null == processData) {
logger.error("process data is null");
return new ArrayList<>();
}
return processData.getTasks();
}
/**
* find process instance by id
* @param processId processId
* @return process instance
*/
public ProcessInstance findProcessInstanceById(int processId){
return processInstanceMapper.selectById(processId);
}
/**
* find process define by id.
* @param processDefinitionId processDefinitionId
* @return process definition
*/
public ProcessDefinition findProcessDefineById(int processDefinitionId) {
return processDefineMapper.selectById(processDefinitionId);
}
/**
* delete work process instance by id
* @param processInstanceId processInstanceId
* @return delete process instance result
*/
public int deleteWorkProcessInstanceById(int processInstanceId){
return processInstanceMapper.deleteById(processInstanceId);
}
/**
* delete all sub process by parent instance id
* @param processInstanceId processInstanceId
* @return delete all sub process instance result
*/
public int deleteAllSubWorkProcessByParentId(int processInstanceId){
List<Integer> subProcessIdList = processInstanceMapMapper.querySubIdListByParentId(processInstanceId);
for(Integer subId : subProcessIdList){
deleteAllSubWorkProcessByParentId(subId);
deleteWorkProcessMapByParentId(subId);
removeTaskLogFile(subId);
deleteWorkProcessInstanceById(subId);
}
return 1;
}
/**
* remove task log file
* @param processInstanceId processInstanceId
*/
public void removeTaskLogFile(Integer processInstanceId){
LogClientService logClient = new LogClientService();
List<TaskInstance> taskInstanceList = findValidTaskListByProcessId(processInstanceId);
if (CollectionUtils.isEmpty(taskInstanceList)){
return;
}
for (TaskInstance taskInstance : taskInstanceList){
String taskLogPath = taskInstance.getLogPath();
if (StringUtils.isEmpty(taskInstance.getHost())){
continue;
}
int port = Constants.RPC_PORT;
String ip = "";
try {
ip = Host.of(taskInstance.getHost()).getIp();
}catch (Exception e){
// compatible old version
ip = taskInstance.getHost();
}
// remove task log from loggerserver
logClient.removeTaskLog(ip,port,taskLogPath);
}
}
/**
* calculate sub process number in the process define.
* @param processDefinitionId processDefinitionId
* @return process thread num count
*/
private Integer workProcessThreadNumCount(Integer processDefinitionId){
List<Integer> ids = new ArrayList<>();
recurseFindSubProcessId(processDefinitionId, ids);
return ids.size()+1;
}
/**
* recursive query sub process definition id by parent id.
* @param parentId parentId
* @param ids ids
*/
public void recurseFindSubProcessId(int parentId, List<Integer> ids){
ProcessDefinition processDefinition = processDefineMapper.selectById(parentId);
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = processData.getTasks();
if (taskNodeList != null && taskNodeList.size() > 0){
for (TaskNode taskNode : taskNodeList){
String parameter = taskNode.getParams();
JSONObject parameterJson = JSONObject.parseObject(parameter);
if (parameterJson.getInteger(CMDPARAM_SUB_PROCESS_DEFINE_ID) != null){
SubProcessParameters subProcessParam = JSON.parseObject(parameter, SubProcessParameters.class);
ids.add(subProcessParam.getProcessDefinitionId());
recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(),ids);
}
}
}
}
/**
* create recovery waiting thread command when thread pool is not enough for the process instance.
* sub work process instance need not to create recovery command.
* create recovery waiting thread command and delete origin command at the same time.
* if the recovery command is exists, only update the field update_time
* @param originCommand originCommand
* @param processInstance processInstance
*/
public void createRecoveryWaitingThreadCommand(Command originCommand, ProcessInstance processInstance) {
// sub process doesnot need to create wait command
if(processInstance.getIsSubProcess() == Flag.YES){
if(originCommand != null){
commandMapper.deleteById(originCommand.getId());
}
return;
}
Map<String, String> cmdParam = new HashMap<>();
cmdParam.put(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD, String.valueOf(processInstance.getId()));
// process instance quit by "waiting thread" state
if(originCommand == null){
Command command = new Command(
CommandType.RECOVER_WAITTING_THREAD,
processInstance.getTaskDependType(),
processInstance.getFailureStrategy(),
processInstance.getExecutorId(),
processInstance.getProcessDefinitionId(),
JSONUtils.toJson(cmdParam),
processInstance.getWarningType(),
processInstance.getWarningGroupId(),
processInstance.getScheduleTime(),
processInstance.getProcessInstancePriority()
);
saveCommand(command);
return ;
}
// update the command time if current command if recover from waiting
if(originCommand.getCommandType() == CommandType.RECOVER_WAITTING_THREAD){
originCommand.setUpdateTime(new Date());
saveCommand(originCommand);
}else{
// delete old command and create new waiting thread command
commandMapper.deleteById(originCommand.getId());
originCommand.setId(0);
originCommand.setCommandType(CommandType.RECOVER_WAITTING_THREAD);
originCommand.setUpdateTime(new Date());
originCommand.setCommandParam(JSONUtils.toJson(cmdParam));
originCommand.setProcessInstancePriority(processInstance.getProcessInstancePriority());
saveCommand(originCommand);
}
}
/**
* get schedule time from command
* @param command command
* @param cmdParam cmdParam map
* @return date
*/
private Date getScheduleTime(Command command, Map<String, String> cmdParam){
Date scheduleTime = command.getScheduleTime();
if(scheduleTime == null){
if(cmdParam != null && cmdParam.containsKey(CMDPARAM_COMPLEMENT_DATA_START_DATE)){
scheduleTime = DateUtils.stringToDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE));
}
}
return scheduleTime;
}
/**
* generate a new work process instance from command.
* @param processDefinition processDefinition
* @param command command
* @param cmdParam cmdParam map
* @return process instance
*/
private ProcessInstance generateNewProcessInstance(ProcessDefinition processDefinition,
Command command,
Map<String, String> cmdParam){
ProcessInstance processInstance = new ProcessInstance(processDefinition);
processInstance.setState(ExecutionStatus.RUNNING_EXEUTION);
processInstance.setRecovery(Flag.NO);
processInstance.setStartTime(new Date());
processInstance.setRunTimes(1);
processInstance.setMaxTryTimes(0);
processInstance.setProcessDefinitionId(command.getProcessDefinitionId());
processInstance.setCommandParam(command.getCommandParam());
processInstance.setCommandType(command.getCommandType());
processInstance.setIsSubProcess(Flag.NO);
processInstance.setTaskDependType(command.getTaskDependType());
processInstance.setFailureStrategy(command.getFailureStrategy());
processInstance.setExecutorId(command.getExecutorId());
WarningType warningType = command.getWarningType() == null ? WarningType.NONE : command.getWarningType();
processInstance.setWarningType(warningType);
Integer warningGroupId = command.getWarningGroupId() == null ? 0 : command.getWarningGroupId();
processInstance.setWarningGroupId(warningGroupId);
// schedule time
Date scheduleTime = getScheduleTime(command, cmdParam);
if(scheduleTime != null){
processInstance.setScheduleTime(scheduleTime);
}
processInstance.setCommandStartTime(command.getStartTime());
processInstance.setLocations(processDefinition.getLocations());
processInstance.setConnects(processDefinition.getConnects());
// curing global params
processInstance.setGlobalParams(ParameterUtils.curingGlobalParams(
processDefinition.getGlobalParamMap(),
processDefinition.getGlobalParamList(),
getCommandTypeIfComplement(processInstance, command),
processInstance.getScheduleTime()));
//copy process define json to process instance
processInstance.setProcessInstanceJson(processDefinition.getProcessDefinitionJson());
// set process instance priority
processInstance.setProcessInstancePriority(command.getProcessInstancePriority());
String workerGroup = StringUtils.isBlank(command.getWorkerGroup()) ? Constants.DEFAULT_WORKER_GROUP : command.getWorkerGroup();
processInstance.setWorkerGroup(workerGroup);
processInstance.setTimeout(processDefinition.getTimeout());
processInstance.setTenantId(processDefinition.getTenantId());
return processInstance;
}
/**
* get process tenant
* there is tenant id in definition, use the tenant of the definition.
* if there is not tenant id in the definiton or the tenant not exist
* use definition creator's tenant.
* @param tenantId tenantId
* @param userId userId
* @return tenant
*/
public Tenant getTenantForProcess(int tenantId, int userId){
Tenant tenant = null;
if(tenantId >= 0){
tenant = tenantMapper.queryById(tenantId);
}
if (userId == 0){
return null;
}
if(tenant == null){
User user = userMapper.selectById(userId);
tenant = tenantMapper.queryById(user.getTenantId());
}
return tenant;
}
/**
* check command parameters is valid
* @param command command
* @param cmdParam cmdParam map
* @return whether command param is valid
*/
private Boolean checkCmdParam(Command command, Map<String, String> cmdParam){
if(command.getTaskDependType() == TaskDependType.TASK_ONLY || command.getTaskDependType()== TaskDependType.TASK_PRE){
if(cmdParam == null
|| !cmdParam.containsKey(Constants.CMDPARAM_START_NODE_NAMES)
|| cmdParam.get(Constants.CMDPARAM_START_NODE_NAMES).isEmpty()){
logger.error("command node depend type is {}, but start nodes is null ", command.getTaskDependType());
return false;
}
}
return true;
}
/**
* construct process instance according to one command.
* @param command command
* @param host host
* @return process instance
*/
private ProcessInstance constructProcessInstance(Command command, String host){
ProcessInstance processInstance = null;
CommandType commandType = command.getCommandType();
Map<String, String> cmdParam = JSONUtils.toMap(command.getCommandParam());
ProcessDefinition processDefinition = null;
if(command.getProcessDefinitionId() != 0){
processDefinition = processDefineMapper.selectById(command.getProcessDefinitionId());
if(processDefinition == null){
logger.error("cannot find the work process define! define id : {}", command.getProcessDefinitionId());
return null;
}
}
if(cmdParam != null ){
Integer processInstanceId = 0;
// recover from failure or pause tasks
if(cmdParam.containsKey(Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING)) {
String processId = cmdParam.get(Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING);
processInstanceId = Integer.parseInt(processId);
if (processInstanceId == 0) {
logger.error("command parameter is error, [ ProcessInstanceId ] is 0");
return null;
}
}else if(cmdParam.containsKey(Constants.CMDPARAM_SUB_PROCESS)){
// sub process map
String pId = cmdParam.get(Constants.CMDPARAM_SUB_PROCESS);
processInstanceId = Integer.parseInt(pId);
}else if(cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD)){
// waiting thread command
String pId = cmdParam.get(Constants.CMDPARAM_RECOVERY_WAITTING_THREAD);
processInstanceId = Integer.parseInt(pId);
}
if(processInstanceId ==0){
processInstance = generateNewProcessInstance(processDefinition, command, cmdParam);
}else{
processInstance = this.findProcessInstanceDetailById(processInstanceId);
}
processDefinition = processDefineMapper.selectById(processInstance.getProcessDefinitionId());
processInstance.setProcessDefinition(processDefinition);
//reset command parameter
if(processInstance.getCommandParam() != null){
Map<String, String> processCmdParam = JSONUtils.toMap(processInstance.getCommandParam());
for(Map.Entry<String, String> entry: processCmdParam.entrySet()) {
if(!cmdParam.containsKey(entry.getKey())){
cmdParam.put(entry.getKey(), entry.getValue());
}
}
}
// reset command parameter if sub process
if(cmdParam.containsKey(Constants.CMDPARAM_SUB_PROCESS)){
processInstance.setCommandParam(command.getCommandParam());
}
}else{
// generate one new process instance
processInstance = generateNewProcessInstance(processDefinition, command, cmdParam);
}
if(!checkCmdParam(command, cmdParam)){
logger.error("command parameter check failed!");
return null;
}
if(command.getScheduleTime() != null){
processInstance.setScheduleTime(command.getScheduleTime());
}
processInstance.setHost(host);
ExecutionStatus runStatus = ExecutionStatus.RUNNING_EXEUTION;
int runTime = processInstance.getRunTimes();
switch (commandType){
case START_PROCESS:
break;
case START_FAILURE_TASK_PROCESS:
// find failed tasks and init these tasks
List<Integer> failedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.FAILURE);
List<Integer> toleranceList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.NEED_FAULT_TOLERANCE);
List<Integer> killedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.KILL);
cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING);
failedList.addAll(killedList);
failedList.addAll(toleranceList);
for(Integer taskId : failedList){
initTaskInstance(this.findTaskInstanceById(taskId));
}
cmdParam.put(Constants.CMDPARAM_RECOVERY_START_NODE_STRING,
String.join(Constants.COMMA, convertIntListToString(failedList)));
processInstance.setCommandParam(JSONUtils.toJson(cmdParam));
processInstance.setRunTimes(runTime +1 );
break;
case START_CURRENT_TASK_PROCESS:
break;
case RECOVER_WAITTING_THREAD:
break;
case RECOVER_SUSPENDED_PROCESS:
// find pause tasks and init task's state
cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING);
List<Integer> suspendedNodeList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.PAUSE);
List<Integer> stopNodeList = findTaskIdByInstanceState(processInstance.getId(),
ExecutionStatus.KILL);
suspendedNodeList.addAll(stopNodeList);
for(Integer taskId : suspendedNodeList){
// initialize the pause state
initTaskInstance(this.findTaskInstanceById(taskId));
}
cmdParam.put(Constants.CMDPARAM_RECOVERY_START_NODE_STRING, String.join(",", convertIntListToString(suspendedNodeList)));
processInstance.setCommandParam(JSONUtils.toJson(cmdParam));
processInstance.setRunTimes(runTime +1);
break;
case RECOVER_TOLERANCE_FAULT_PROCESS:
// recover tolerance fault process
processInstance.setRecovery(Flag.YES);
runStatus = processInstance.getState();
break;
case COMPLEMENT_DATA:
// delete all the valid tasks when complement data
List<TaskInstance> taskInstanceList = this.findValidTaskListByProcessId(processInstance.getId());
for(TaskInstance taskInstance : taskInstanceList){
taskInstance.setFlag(Flag.NO);
this.updateTaskInstance(taskInstance);
}
initComplementDataParam(processDefinition, processInstance, cmdParam);
break;
case REPEAT_RUNNING:
// delete the recover task names from command parameter
if(cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_START_NODE_STRING)){
cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING);
processInstance.setCommandParam(JSONUtils.toJson(cmdParam));
}
// delete all the valid tasks when repeat running
List<TaskInstance> validTaskList = findValidTaskListByProcessId(processInstance.getId());
for(TaskInstance taskInstance : validTaskList){
taskInstance.setFlag(Flag.NO);
updateTaskInstance(taskInstance);
}
processInstance.setStartTime(new Date());
processInstance.setEndTime(null);
processInstance.setRunTimes(runTime +1);
initComplementDataParam(processDefinition, processInstance, cmdParam);
break;
case SCHEDULER:
break;
default:
break;
}
processInstance.setState(runStatus);
return processInstance;
}
/**
* return complement data if the process start with complement data
* @param processInstance processInstance
* @param command command
* @return command type
*/
private CommandType getCommandTypeIfComplement(ProcessInstance processInstance, Command command){
if(CommandType.COMPLEMENT_DATA == processInstance.getCmdTypeIfComplement()){
return CommandType.COMPLEMENT_DATA;
}else{
return command.getCommandType();
}
}
/**
* initialize complement data parameters
* @param processDefinition processDefinition
* @param processInstance processInstance
* @param cmdParam cmdParam
*/
private void initComplementDataParam(ProcessDefinition processDefinition,
ProcessInstance processInstance,
Map<String, String> cmdParam) {
if(!processInstance.isComplementData()){
return;
}
Date startComplementTime = DateUtils.parse(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE),
YYYY_MM_DD_HH_MM_SS);
if(Flag.NO == processInstance.getIsSubProcess()) {
processInstance.setScheduleTime(startComplementTime);
}
processInstance.setGlobalParams(ParameterUtils.curingGlobalParams(
processDefinition.getGlobalParamMap(),
processDefinition.getGlobalParamList(),
CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime()));
}
/**
* set sub work process parameters.
* handle sub work process instance, update relation table and command parameters
* set sub work process flag, extends parent work process command parameters
* @param subProcessInstance subProcessInstance
* @return process instance
*/
public ProcessInstance setSubProcessParam(ProcessInstance subProcessInstance){
String cmdParam = subProcessInstance.getCommandParam();
if(StringUtils.isEmpty(cmdParam)){
return subProcessInstance;
}
Map<String, String> paramMap = JSONUtils.toMap(cmdParam);
// write sub process id into cmd param.
if(paramMap.containsKey(CMDPARAM_SUB_PROCESS)
&& CMDPARAM_EMPTY_SUB_PROCESS.equals(paramMap.get(CMDPARAM_SUB_PROCESS))){
paramMap.remove(CMDPARAM_SUB_PROCESS);
paramMap.put(CMDPARAM_SUB_PROCESS, String.valueOf(subProcessInstance.getId()));
subProcessInstance.setCommandParam(JSONUtils.toJson(paramMap));
subProcessInstance.setIsSubProcess(Flag.YES);
this.saveProcessInstance(subProcessInstance);
}
// copy parent instance user def params to sub process..
String parentInstanceId = paramMap.get(CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID);
if(StringUtils.isNotEmpty(parentInstanceId)){
ProcessInstance parentInstance = findProcessInstanceDetailById(Integer.parseInt(parentInstanceId));
if(parentInstance != null){
subProcessInstance.setGlobalParams(
joinGlobalParams(parentInstance.getGlobalParams(), subProcessInstance.getGlobalParams()));
this.saveProcessInstance(subProcessInstance);
}else{
logger.error("sub process command params error, cannot find parent instance: {} ", cmdParam);
}
}
ProcessInstanceMap processInstanceMap = JSONUtils.parseObject(cmdParam, ProcessInstanceMap.class);
if(processInstanceMap == null || processInstanceMap.getParentProcessInstanceId() == 0){
return subProcessInstance;
}
// update sub process id to process map table
processInstanceMap.setProcessInstanceId(subProcessInstance.getId());
this.updateWorkProcessInstanceMap(processInstanceMap);
return subProcessInstance;
}
/**
* join parent global params into sub process.
* only the keys doesn't in sub process global would be joined.
* @param parentGlobalParams parentGlobalParams
* @param subGlobalParams subGlobalParams
* @return global params join
*/
private String joinGlobalParams(String parentGlobalParams, String subGlobalParams){
List<Property> parentPropertyList = JSONUtils.toList(parentGlobalParams, Property.class);
List<Property> subPropertyList = JSONUtils.toList(subGlobalParams, Property.class);
Map<String,String> subMap = subPropertyList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue));
for(Property parent : parentPropertyList){
if(!subMap.containsKey(parent.getProp())){
subPropertyList.add(parent);
}
}
return JSONUtils.toJson(subPropertyList);
}
/**
* initialize task instance
* @param taskInstance taskInstance
*/
private void initTaskInstance(TaskInstance taskInstance){
if(!taskInstance.isSubProcess()){
if(taskInstance.getState().typeIsCancel() || taskInstance.getState().typeIsFailure()){
taskInstance.setFlag(Flag.NO);
updateTaskInstance(taskInstance);
return;
}
}
taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS);
updateTaskInstance(taskInstance);
}
/**
* submit task to db
* submit sub process to command
* @param taskInstance taskInstance
* @return task instance
*/
@Transactional(rollbackFor = Exception.class)
public TaskInstance submitTask(TaskInstance taskInstance){
ProcessInstance processInstance = this.findProcessInstanceDetailById(taskInstance.getProcessInstanceId());
logger.info("start submit task : {}, instance id:{}, state: {}",
taskInstance.getName(), taskInstance.getProcessInstanceId(), processInstance.getState());
//submit to db
TaskInstance task = submitTaskInstanceToDB(taskInstance, processInstance);
if(task == null){
logger.error("end submit task to db error, task name:{}, process id:{} state: {} ",
taskInstance.getName(), taskInstance.getProcessInstance(), processInstance.getState());
return task;
}
if(!task.getState().typeIsFinished()){
createSubWorkProcessCommand(processInstance, task);
}
logger.info("end submit task to db successfully:{} state:{} complete, instance id:{} state: {} ",
taskInstance.getName(), task.getState(), processInstance.getId(), processInstance.getState());
return task;
}
/**
* set work process instance map
* @param parentInstance parentInstance
* @param parentTask parentTask
* @return process instance map
*/
private ProcessInstanceMap setProcessInstanceMap(ProcessInstance parentInstance, TaskInstance parentTask){
ProcessInstanceMap processMap = findWorkProcessMapByParent(parentInstance.getId(), parentTask.getId());
if(processMap != null){
return processMap;
}else if(parentInstance.getCommandType() == CommandType.REPEAT_RUNNING
|| parentInstance.isComplementData()){
// update current task id to map
// repeat running does not generate new sub process instance
processMap = findPreviousTaskProcessMap(parentInstance, parentTask);
if(processMap!= null){
processMap.setParentTaskInstanceId(parentTask.getId());
updateWorkProcessInstanceMap(processMap);
return processMap;
}
}
// new task
processMap = new ProcessInstanceMap();
processMap.setParentProcessInstanceId(parentInstance.getId());
processMap.setParentTaskInstanceId(parentTask.getId());
createWorkProcessInstanceMap(processMap);
return processMap;
}
/**
* find previous task work process map.
* @param parentProcessInstance parentProcessInstance
* @param parentTask parentTask
* @return process instance map
*/
private ProcessInstanceMap findPreviousTaskProcessMap(ProcessInstance parentProcessInstance,
TaskInstance parentTask) {
Integer preTaskId = 0;
List<TaskInstance> preTaskList = this.findPreviousTaskListByWorkProcessId(parentProcessInstance.getId());
for(TaskInstance task : preTaskList){
if(task.getName().equals(parentTask.getName())){
preTaskId = task.getId();
ProcessInstanceMap map = findWorkProcessMapByParent(parentProcessInstance.getId(), preTaskId);
if(map!=null){
return map;
}
}
}
logger.info("sub process instance is not found,parent task:{},parent instance:{}",
parentTask.getId(), parentProcessInstance.getId());
return null;
}
/**
* create sub work process command
* @param parentProcessInstance parentProcessInstance
* @param task task
*/
private void createSubWorkProcessCommand(ProcessInstance parentProcessInstance,
TaskInstance task){
if(!task.isSubProcess()){
return;
}
ProcessInstanceMap instanceMap = setProcessInstanceMap(parentProcessInstance, task);
TaskNode taskNode = JSONUtils.parseObject(task.getTaskJson(), TaskNode.class);
Map<String, String> subProcessParam = JSONUtils.toMap(taskNode.getParams());
Integer childDefineId = Integer.parseInt(subProcessParam.get(Constants.CMDPARAM_SUB_PROCESS_DEFINE_ID));
ProcessInstance childInstance = findSubProcessInstance(parentProcessInstance.getId(), task.getId());
CommandType fatherType = parentProcessInstance.getCommandType();
CommandType commandType = fatherType;
if(childInstance == null){
String fatherHistoryCommand = parentProcessInstance.getHistoryCmd();
// sub process must begin with schedule/complement data
// if father begin with scheduler/complement data
if(fatherHistoryCommand.startsWith(CommandType.SCHEDULER.toString()) ||
fatherHistoryCommand.startsWith(CommandType.COMPLEMENT_DATA.toString())){
commandType = CommandType.valueOf(fatherHistoryCommand.split(Constants.COMMA)[0]);
}
}
if(childInstance != null){
childInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS);
updateProcessInstance(childInstance);
}
// set sub work process command
String processMapStr = JSONUtils.toJson(instanceMap);
Map<String, String> cmdParam = JSONUtils.toMap(processMapStr);
if(commandType == CommandType.COMPLEMENT_DATA ||
(childInstance != null && childInstance.isComplementData())){
Map<String, String> parentParam = JSONUtils.toMap(parentProcessInstance.getCommandParam());
String endTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE);
String startTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE);
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, endTime);
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, startTime);
processMapStr = JSONUtils.toJson(cmdParam);
}
updateSubProcessDefinitionByParent(parentProcessInstance, childDefineId);
Command command = new Command();
command.setWarningType(parentProcessInstance.getWarningType());
command.setWarningGroupId(parentProcessInstance.getWarningGroupId());
command.setFailureStrategy(parentProcessInstance.getFailureStrategy());
command.setProcessDefinitionId(childDefineId);
command.setScheduleTime(parentProcessInstance.getScheduleTime());
command.setExecutorId(parentProcessInstance.getExecutorId());
command.setCommandParam(processMapStr);
command.setCommandType(commandType);
command.setProcessInstancePriority(parentProcessInstance.getProcessInstancePriority());
command.setWorkerGroup(parentProcessInstance.getWorkerGroup());
createCommand(command);
logger.info("sub process command created: {} ", command.toString());
}
/**
* update sub process definition
* @param parentProcessInstance parentProcessInstance
* @param childDefinitionId childDefinitionId
*/
private void updateSubProcessDefinitionByParent(ProcessInstance parentProcessInstance, int childDefinitionId) {
ProcessDefinition fatherDefinition = this.findProcessDefineById(parentProcessInstance.getProcessDefinitionId());
ProcessDefinition childDefinition = this.findProcessDefineById(childDefinitionId);
if(childDefinition != null && fatherDefinition != null){
childDefinition.setReceivers(fatherDefinition.getReceivers());
childDefinition.setReceiversCc(fatherDefinition.getReceiversCc());
processDefineMapper.updateById(childDefinition);
}
}
/**
* submit task to mysql
* @param taskInstance taskInstance
* @param processInstance processInstance
* @return task instance
*/
public TaskInstance submitTaskInstanceToDB(TaskInstance taskInstance, ProcessInstance processInstance){
ExecutionStatus processInstanceState = processInstance.getState();
if(taskInstance.getState().typeIsFailure()){
if(taskInstance.isSubProcess()){
taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1 );
}else {
if( processInstanceState != ExecutionStatus.READY_STOP
&& processInstanceState != ExecutionStatus.READY_PAUSE){
// failure task set invalid
taskInstance.setFlag(Flag.NO);
updateTaskInstance(taskInstance);
// crate new task instance
if(taskInstance.getState() != ExecutionStatus.NEED_FAULT_TOLERANCE){
taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1 );
}
taskInstance.setEndTime(null);
taskInstance.setStartTime(new Date());
taskInstance.setFlag(Flag.YES);
taskInstance.setHost(null);
taskInstance.setId(0);
}
}
}
taskInstance.setExecutorId(processInstance.getExecutorId());
taskInstance.setProcessInstancePriority(processInstance.getProcessInstancePriority());
taskInstance.setState(getSubmitTaskState(taskInstance, processInstanceState));
taskInstance.setSubmitTime(new Date());
boolean saveResult = saveTaskInstance(taskInstance);
if(!saveResult){
return null;
}
return taskInstance;
}
/**
* ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskInstanceId}_${task executed by ip1},${ip2}...
* The tasks with the highest priority are selected by comparing the priorities of the above four levels from high to low.
* @param taskInstance taskInstance
* @return task zk queue str
*/
public String taskZkInfo(TaskInstance taskInstance) {
String taskWorkerGroup = getTaskWorkerGroup(taskInstance);
ProcessInstance processInstance = this.findProcessInstanceById(taskInstance.getProcessInstanceId());
if(processInstance == null){
logger.error("process instance is null. please check the task info, task id: " + taskInstance.getId());
return "";
}
StringBuilder sb = new StringBuilder(100);
sb.append(processInstance.getProcessInstancePriority().ordinal()).append(Constants.UNDERLINE)
.append(taskInstance.getProcessInstanceId()).append(Constants.UNDERLINE)
.append(taskInstance.getTaskInstancePriority().ordinal()).append(Constants.UNDERLINE)
.append(taskInstance.getId()).append(Constants.UNDERLINE)
.append(taskInstance.getWorkerGroup());
return sb.toString();
}
/**
* get submit task instance state by the work process state
* cannot modify the task state when running/kill/submit success, or this
* task instance is already exists in task queue .
* return pause if work process state is ready pause
* return stop if work process state is ready stop
* if all of above are not satisfied, return submit success
*
* @param taskInstance taskInstance
* @param processInstanceState processInstanceState
* @return process instance state
*/
public ExecutionStatus getSubmitTaskState(TaskInstance taskInstance, ExecutionStatus processInstanceState){
ExecutionStatus state = taskInstance.getState();
if(
// running or killed
// the task already exists in task queue
// return state
state == ExecutionStatus.RUNNING_EXEUTION
|| state == ExecutionStatus.KILL
|| checkTaskExistsInTaskQueue(taskInstance)
){
return state;
}
//return pasue /stop if process instance state is ready pause / stop
// or return submit success
if( processInstanceState == ExecutionStatus.READY_PAUSE){
state = ExecutionStatus.PAUSE;
}else if(processInstanceState == ExecutionStatus.READY_STOP
|| !checkProcessStrategy(taskInstance)) {
state = ExecutionStatus.KILL;
}else{
state = ExecutionStatus.SUBMITTED_SUCCESS;
}
return state;
}
/**
* check process instance strategy
* @param taskInstance taskInstance
* @return check strategy result
*/
private boolean checkProcessStrategy(TaskInstance taskInstance){
ProcessInstance processInstance = this.findProcessInstanceById(taskInstance.getProcessInstanceId());
FailureStrategy failureStrategy = processInstance.getFailureStrategy();
if(failureStrategy == FailureStrategy.CONTINUE){
return true;
}
List<TaskInstance> taskInstances = this.findValidTaskListByProcessId(taskInstance.getProcessInstanceId());
for(TaskInstance task : taskInstances){
if(task.getState() == ExecutionStatus.FAILURE){
return false;
}
}
return true;
}
/**
* check the task instance existing in queue
* @param taskInstance taskInstance
* @return whether taskinstance exists queue
*/
public boolean checkTaskExistsInTaskQueue(TaskInstance taskInstance){
if(taskInstance.isSubProcess()){
return false;
}
String taskZkInfo = taskZkInfo(taskInstance);
return false;
}
/**
* create a new process instance
* @param processInstance processInstance
*/
public void createProcessInstance(ProcessInstance processInstance){
if (processInstance != null){
processInstanceMapper.insert(processInstance);
}
}
/**
* insert or update work process instance to data base
* @param processInstance processInstance
*/
public void saveProcessInstance(ProcessInstance processInstance){
if (processInstance == null){
logger.error("save error, process instance is null!");
return ;
}
if(processInstance.getId() != 0){
processInstanceMapper.updateById(processInstance);
}else{
createProcessInstance(processInstance);
}
}
/**
* insert or update command
* @param command command
* @return save command result
*/
public int saveCommand(Command command){
if(command.getId() != 0){
return commandMapper.updateById(command);
}else{
return commandMapper.insert(command);
}
}
/**
* insert or update task instance
* @param taskInstance taskInstance
* @return save task instance result
*/
public boolean saveTaskInstance(TaskInstance taskInstance){
if(taskInstance.getId() != 0){
return updateTaskInstance(taskInstance);
}else{
return createTaskInstance(taskInstance);
}
}
/**
* insert task instance
* @param taskInstance taskInstance
* @return create task instance result
*/
public boolean createTaskInstance(TaskInstance taskInstance) {
int count = taskInstanceMapper.insert(taskInstance);
return count > 0;
}
/**
* update task instance
* @param taskInstance taskInstance
* @return update task instance result
*/
public boolean updateTaskInstance(TaskInstance taskInstance){
int count = taskInstanceMapper.updateById(taskInstance);
return count > 0;
}
/**
* delete a command by id
* @param id id
*/
public void delCommandByid(int id) {
commandMapper.deleteById(id);
}
/**
* find task instance by id
* @param taskId task id
* @return task intance
*/
public TaskInstance findTaskInstanceById(Integer taskId){
return taskInstanceMapper.selectById(taskId);
}
/**
* package task instance,associate processInstance and processDefine
* @param taskInstId taskInstId
* @return task instance
*/
public TaskInstance getTaskInstanceDetailByTaskId(int taskInstId){
// get task instance
TaskInstance taskInstance = findTaskInstanceById(taskInstId);
if(taskInstance == null){
return taskInstance;
}
// get process instance
ProcessInstance processInstance = findProcessInstanceDetailById(taskInstance.getProcessInstanceId());
// get process define
ProcessDefinition processDefine = findProcessDefineById(taskInstance.getProcessDefinitionId());
taskInstance.setProcessInstance(processInstance);
taskInstance.setProcessDefine(processDefine);
return taskInstance;
}
/**
* get id list by task state
* @param instanceId instanceId
* @param state state
* @return task instance states
*/
public List<Integer> findTaskIdByInstanceState(int instanceId, ExecutionStatus state){
return taskInstanceMapper.queryTaskByProcessIdAndState(instanceId, state.ordinal());
}
/**
* find valid task list by process definition id
* @param processInstanceId processInstanceId
* @return task instance list
*/
public List<TaskInstance> findValidTaskListByProcessId(Integer processInstanceId){
return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.YES);
}
/**
* find previous task list by work process id
* @param processInstanceId processInstanceId
* @return task instance list
*/
public List<TaskInstance> findPreviousTaskListByWorkProcessId(Integer processInstanceId){
return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.NO);
}
/**
* update work process instance map
* @param processInstanceMap processInstanceMap
* @return update process instance result
*/
public int updateWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap){
return processInstanceMapMapper.updateById(processInstanceMap);
}
/**
* create work process instance map
* @param processInstanceMap processInstanceMap
* @return create process instance result
*/
public int createWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap){
Integer count = 0;
if(processInstanceMap !=null){
return processInstanceMapMapper.insert(processInstanceMap);
}
return count;
}
/**
* find work process map by parent process id and parent task id.
* @param parentWorkProcessId parentWorkProcessId
* @param parentTaskId parentTaskId
* @return process instance map
*/
public ProcessInstanceMap findWorkProcessMapByParent(Integer parentWorkProcessId, Integer parentTaskId){
return processInstanceMapMapper.queryByParentId(parentWorkProcessId, parentTaskId);
}
/**
* delete work process map by parent process id
* @param parentWorkProcessId parentWorkProcessId
* @return delete process map result
*/
public int deleteWorkProcessMapByParentId(int parentWorkProcessId){
return processInstanceMapMapper.deleteByParentProcessId(parentWorkProcessId);
}
/**
* find sub process instance
* @param parentProcessId parentProcessId
* @param parentTaskId parentTaskId
* @return process instance
*/
public ProcessInstance findSubProcessInstance(Integer parentProcessId, Integer parentTaskId){
ProcessInstance processInstance = null;
ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryByParentId(parentProcessId, parentTaskId);
if(processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0){
return processInstance;
}
processInstance = findProcessInstanceById(processInstanceMap.getProcessInstanceId());
return processInstance;
}
/**
* find parent process instance
* @param subProcessId subProcessId
* @return process instance
*/
public ProcessInstance findParentProcessInstance(Integer subProcessId) {
ProcessInstance processInstance = null;
ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryBySubProcessId(subProcessId);
if(processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0){
return processInstance;
}
processInstance = findProcessInstanceById(processInstanceMap.getParentProcessInstanceId());
return processInstance;
}
/**
* change task state
* @param state state
* @param startTime startTime
* @param host host
* @param executePath executePath
* @param logPath logPath
* @param taskInstId taskInstId
*/
public void changeTaskState(ExecutionStatus state, Date startTime, String host,
String executePath,
String logPath,
int taskInstId) {
TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstId);
taskInstance.setState(state);
taskInstance.setStartTime(startTime);
taskInstance.setHost(host);
taskInstance.setExecutePath(executePath);
taskInstance.setLogPath(logPath);
saveTaskInstance(taskInstance);
}
/**
* update process instance
* @param processInstance processInstance
* @return update process instance result
*/
public int updateProcessInstance(ProcessInstance processInstance){
return processInstanceMapper.updateById(processInstance);
}
/**
* update the process instance
* @param processInstanceId processInstanceId
* @param processJson processJson
* @param globalParams globalParams
* @param scheduleTime scheduleTime
* @param flag flag
* @param locations locations
* @param connects connects
* @return update process instance result
*/
public int updateProcessInstance(Integer processInstanceId, String processJson,
String globalParams, Date scheduleTime, Flag flag,
String locations, String connects){
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if(processInstance!= null){
processInstance.setProcessInstanceJson(processJson);
processInstance.setGlobalParams(globalParams);
processInstance.setScheduleTime(scheduleTime);
processInstance.setLocations(locations);
processInstance.setConnects(connects);
return processInstanceMapper.updateById(processInstance);
}
return 0;
}
/**
* change task state
* @param state state
* @param endTime endTime
* @param taskInstId taskInstId
*/
public void changeTaskState(ExecutionStatus state,
Date endTime,
int processId,
String appIds,
int taskInstId) {
TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstId);
taskInstance.setPid(processId);
taskInstance.setAppLink(appIds);
taskInstance.setState(state);
taskInstance.setEndTime(endTime);
saveTaskInstance(taskInstance);
}
/**
* convert integer list to string list
* @param intList intList
* @return string list
*/
public List<String> convertIntListToString(List<Integer> intList){
if(intList == null){
return new ArrayList<>();
}
List<String> result = new ArrayList<String>(intList.size());
for(Integer intVar : intList){
result.add(String.valueOf(intVar));
}
return result;
}
/**
* update pid and app links field by task instance id
* @param taskInstId taskInstId
* @param pid pid
* @param appLinks appLinks
*/
public void updatePidByTaskInstId(int taskInstId, int pid,String appLinks) {
TaskInstance taskInstance = taskInstanceMapper.selectById(taskInstId);
taskInstance.setPid(pid);
taskInstance.setAppLink(appLinks);
saveTaskInstance(taskInstance);
}
/**
* query schedule by id
* @param id id
* @return schedule
*/
public Schedule querySchedule(int id) {
return scheduleMapper.selectById(id);
}
/**
* query Schedule by processDefinitionId
* @param processDefinitionId processDefinitionId
* @see Schedule
*/
public List<Schedule> queryReleaseSchedulerListByProcessDefinitionId(int processDefinitionId) {
return scheduleMapper.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId);
}
/**
* query need failover process instance
* @param host host
* @return process instance list
*/
public List<ProcessInstance> queryNeedFailoverProcessInstances(String host){
return processInstanceMapper.queryByHostAndStatus(host, stateArray);
}
/**
* process need failover process instance
* @param processInstance processInstance
*/
@Transactional(rollbackFor = Exception.class)
public void processNeedFailoverProcessInstances(ProcessInstance processInstance){
//1 update processInstance host is null
processInstance.setHost(Constants.NULL);
processInstanceMapper.updateById(processInstance);
//2 insert into recover command
Command cmd = new Command();
cmd.setProcessDefinitionId(processInstance.getProcessDefinitionId());
cmd.setCommandParam(String.format("{\"%s\":%d}", Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING, processInstance.getId()));
cmd.setExecutorId(processInstance.getExecutorId());
cmd.setCommandType(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS);
createCommand(cmd);
}
/**
* query all need failover task instances by host
* @param host host
* @return task instance list
*/
public List<TaskInstance> queryNeedFailoverTaskInstances(String host){
return taskInstanceMapper.queryByHostAndStatus(host,
stateArray);
}
/**
* find data source by id
* @param id id
* @return datasource
*/
public DataSource findDataSourceById(int id){
return dataSourceMapper.selectById(id);
}
/**
* update process instance state by id
* @param processInstanceId processInstanceId
* @param executionStatus executionStatus
* @return update process result
*/
public int updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) {
ProcessInstance instance = processInstanceMapper.selectById(processInstanceId);
instance.setState(executionStatus);
return processInstanceMapper.updateById(instance);
}
/**
* find process instance by the task id
* @param taskId taskId
* @return process instance
*/
public ProcessInstance findProcessInstanceByTaskId(int taskId){
TaskInstance taskInstance = taskInstanceMapper.selectById(taskId);
if(taskInstance!= null){
return processInstanceMapper.selectById(taskInstance.getProcessInstanceId());
}
return null;
}
/**
* find udf function list by id list string
* @param ids ids
* @return udf function list
*/
public List<UdfFunc> queryUdfFunListByids(int[] ids){
return udfFuncMapper.queryUdfByIdStr(ids, null);
}
/**
* find tenant code by resource name
* @param resName resource name
* @param resourceType resource type
* @return tenant code
*/
public String queryTenantCodeByResName(String resName,ResourceType resourceType){
return resourceMapper.queryTenantCodeByResourceName(resName, resourceType.ordinal());
}
/**
* find schedule list by process define id.
* @param ids ids
* @return schedule list
*/
public List<Schedule> selectAllByProcessDefineId(int[] ids){
return scheduleMapper.selectAllByProcessDefineArray(
ids);
}
/**
* get dependency cycle by work process define id and scheduler fire time
* @param masterId masterId
* @param processDefinitionId processDefinitionId
* @param scheduledFireTime the time the task schedule is expected to trigger
* @return CycleDependency
* @throws Exception if error throws Exception
*/
public CycleDependency getCycleDependency(int masterId, int processDefinitionId, Date scheduledFireTime) throws Exception {
List<CycleDependency> list = getCycleDependencies(masterId,new int[]{processDefinitionId},scheduledFireTime);
return list.size()>0 ? list.get(0) : null;
}
/**
* get dependency cycle list by work process define id list and scheduler fire time
* @param masterId masterId
* @param ids ids
* @param scheduledFireTime the time the task schedule is expected to trigger
* @return CycleDependency list
* @throws Exception if error throws Exception
*/
public List<CycleDependency> getCycleDependencies(int masterId,int[] ids,Date scheduledFireTime) throws Exception {
List<CycleDependency> cycleDependencyList = new ArrayList<CycleDependency>();
if(ArrayUtils.isEmpty(ids)){
logger.warn("ids[] is empty!is invalid!");
return cycleDependencyList;
}
if(scheduledFireTime == null){
logger.warn("scheduledFireTime is null!is invalid!");
return cycleDependencyList;
}
String strCrontab = "";
CronExpression depCronExpression;
Cron depCron;
List<Date> list;
List<Schedule> schedules = this.selectAllByProcessDefineId(ids);
// for all scheduling information
for(Schedule depSchedule:schedules){
strCrontab = depSchedule.getCrontab();
depCronExpression = CronUtils.parse2CronExpression(strCrontab);
depCron = CronUtils.parse2Cron(strCrontab);
CycleEnum cycleEnum = CronUtils.getMiniCycle(depCron);
if(cycleEnum == null){
logger.error("{} is not valid",strCrontab);
continue;
}
Calendar calendar = Calendar.getInstance();
switch (cycleEnum){
/*case MINUTE:
calendar.add(Calendar.MINUTE,-61);*/
case HOUR:
calendar.add(Calendar.HOUR,-25);
break;
case DAY:
calendar.add(Calendar.DATE,-32);
break;
case WEEK:
calendar.add(Calendar.DATE,-32);
break;
case MONTH:
calendar.add(Calendar.MONTH,-13);
break;
default:
logger.warn("Dependent process definition's cycleEnum is {},not support!!", cycleEnum.name());
continue;
}
Date start = calendar.getTime();
if(depSchedule.getProcessDefinitionId() == masterId){
list = CronUtils.getSelfFireDateList(start, scheduledFireTime, depCronExpression);
}else {
list = CronUtils.getFireDateList(start, scheduledFireTime, depCronExpression);
}
if(list.size()>=1){
start = list.get(list.size()-1);
CycleDependency dependency = new CycleDependency(depSchedule.getProcessDefinitionId(),start, CronUtils.getExpirationTime(start, cycleEnum), cycleEnum);
cycleDependencyList.add(dependency);
}
}
return cycleDependencyList;
}
/**
* find last scheduler process instance in the date interval
* @param definitionId definitionId
* @param dateInterval dateInterval
* @return process instance
*/
public ProcessInstance findLastSchedulerProcessInterval(int definitionId, DateInterval dateInterval) {
return processInstanceMapper.queryLastSchedulerProcess(definitionId,
dateInterval.getStartTime(),
dateInterval.getEndTime());
}
/**
* find last manual process instance interval
* @param definitionId process definition id
* @param dateInterval dateInterval
* @return process instance
*/
public ProcessInstance findLastManualProcessInterval(int definitionId, DateInterval dateInterval) {
return processInstanceMapper.queryLastManualProcess(definitionId,
dateInterval.getStartTime(),
dateInterval.getEndTime());
}
/**
* find last running process instance
* @param definitionId process definition id
* @param startTime start time
* @param endTime end time
* @return process instance
*/
public ProcessInstance findLastRunningProcess(int definitionId, Date startTime, Date endTime) {
return processInstanceMapper.queryLastRunningProcess(definitionId,
startTime,
endTime,
stateArray);
}
/**
* query user queue by process instance id
* @param processInstanceId processInstanceId
* @return queue
*/
public String queryUserQueueByProcessInstanceId(int processInstanceId){
String queue = "";
ProcessInstance processInstance = processInstanceMapper.selectById(processInstanceId);
if(processInstance == null){
return queue;
}
User executor = userMapper.selectById(processInstance.getExecutorId());
if(executor != null){
queue = executor.getQueue();
}
return queue;
}
/**
* get task worker group
* @param taskInstance taskInstance
* @return workerGroupId
*/
public String getTaskWorkerGroup(TaskInstance taskInstance) {
String workerGroup = taskInstance.getWorkerGroup();
if(StringUtils.isNotBlank(workerGroup)){
return workerGroup;
}
int processInstanceId = taskInstance.getProcessInstanceId();
ProcessInstance processInstance = findProcessInstanceById(processInstanceId);
if(processInstance != null){
return processInstance.getWorkerGroup();
}
logger.info("task : {} will use default worker group", taskInstance.getId());
return Constants.DEFAULT_WORKER_GROUP;
}
/**
* get have perm project list
* @param userId userId
* @return project list
*/
public List<Project> getProjectListHavePerm(int userId){
List<Project> createProjects = projectMapper.queryProjectCreatedByUser(userId);
List<Project> authedProjects = projectMapper.queryAuthedProjectListByUserId(userId);
if(createProjects == null){
createProjects = new ArrayList<>();
}
if(authedProjects != null){
createProjects.addAll(authedProjects);
}
return createProjects;
}
/**
* get have perm project ids
* @param userId userId
* @return project ids
*/
public List<Integer> getProjectIdListHavePerm(int userId){
List<Integer> projectIdList = new ArrayList<>();
for(Project project : getProjectListHavePerm(userId)){
projectIdList.add(project.getId());
}
return projectIdList;
}
/**
* list unauthorized udf function
* @param userId user id
* @param needChecks data source id array
* @return unauthorized udf function list
*/
public <T> List<T> listUnauthorized(int userId,T[] needChecks,AuthorizationType authorizationType){
List<T> resultList = new ArrayList<T>();
if (!ArrayUtils.isEmpty(needChecks)) {
Set<T> originResSet = new HashSet<T>(Arrays.asList(needChecks));
switch (authorizationType){
case RESOURCE_FILE_ID:
Set<Integer> authorizedResourceFiles = resourceMapper.listAuthorizedResourceById(userId, needChecks).stream().map(t -> t.getId()).collect(toSet());
originResSet.removeAll(authorizedResourceFiles);
break;
case RESOURCE_FILE_NAME:
Set<String> authorizedResources = resourceMapper.listAuthorizedResource(userId, needChecks).stream().map(t -> t.getFullName()).collect(toSet());
originResSet.removeAll(authorizedResources);
break;
case UDF_FILE:
Set<Integer> authorizedUdfFiles = resourceMapper.listAuthorizedResourceById(userId, needChecks).stream().map(t -> t.getId()).collect(toSet());
originResSet.removeAll(authorizedUdfFiles);
break;
case DATASOURCE:
Set<Integer> authorizedDatasources = dataSourceMapper.listAuthorizedDataSource(userId,needChecks).stream().map(t -> t.getId()).collect(toSet());
originResSet.removeAll(authorizedDatasources);
break;
case UDF:
Set<Integer> authorizedUdfs = udfFuncMapper.listAuthorizedUdfFunc(userId, needChecks).stream().map(t -> t.getId()).collect(toSet());
originResSet.removeAll(authorizedUdfs);
break;
}
resultList.addAll(originResSet);
}
return resultList;
}
/**
* get user by user id
* @param userId user id
* @return User
*/
public User getUserById(int userId){
return userMapper.selectById(userId);
}
/**
* get resource by resoruce id
* @param resoruceId resource id
* @return Resource
*/
public Resource getResourceById(int resoruceId){
return resourceMapper.selectById(resoruceId);
}
/**
* list resources by ids
* @param resIds resIds
* @return resource list
*/
public List<Resource> listResourceByIds(Integer[] resIds){
return resourceMapper.listResourceByIds(resIds);
}
/**
* format task app id in task instance
* @param taskInstance
* @return
*/
public String formatTaskAppId(TaskInstance taskInstance){
ProcessDefinition definition = this.findProcessDefineById(taskInstance.getProcessDefinitionId());
ProcessInstance processInstanceById = this.findProcessInstanceById(taskInstance.getProcessInstanceId());
if(definition == null || processInstanceById == null){
return "";
}
return String.format("%s_%s_%s",
definition.getId(),
processInstanceById.getId(),
taskInstance.getId());
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,393 | [Bug][api] view the log of the subtask, prompt "task instance does not exist" | Click "View Log" of the subtask, prompt "The task instance does not exist"
![image](https://user-images.githubusercontent.com/55787491/89242261-b36a8480-d633-11ea-82de-ca2988c41d2e.png)
**Which version of Dolphin Scheduler:**
-[1.3.2-release]
| https://github.com/apache/dolphinscheduler/issues/3393 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-08-04T01:22:02Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<table class="fixed">
<tr>
<th scope="col" style="min-width: 50px">
<span>{{$t('#')}}</span>
</th>
<th scope="col" style="min-width: 200px;max-width: 300px;">
<span>{{$t('Name')}}</span>
</th>
<th scope="col" style="min-width: 200px;max-width: 300px;">
<span>{{$t('Process Instance')}}</span>
</th>
<th scope="col" style="min-width: 60px">
<span>{{$t('Executor')}}</span>
</th>
<th scope="col" style="min-width: 70px">
<span style="margin-left: 5px">{{$t('Node Type')}}</span>
</th>
<th scope="col" style="min-width: 30px">
<span>{{$t('State')}}</span>
</th>
<th scope="col" style="min-width: 130px">
<span>{{$t('Submit Time')}}</span>
</th>
<th scope="col" style="min-width: 130px">
<span>{{$t('Start Time')}}</span>
</th>
<th scope="col" style="min-width: 130px">
<span>{{$t('End Time')}}</span>
</th>
<th scope="col" style="min-width: 130px">
<span>{{$t('host')}}</span>
</th>
<th scope="col" style="min-width: 70px">
<span>{{$t('Duration')}}(s)</span>
</th>
<th scope="col" style="min-width: 60px">
<div style="width: 50px">
<span>{{$t('Retry Count')}}</span>
</div>
</th>
<th scope="col" style="min-width: 60px">
<span>{{$t('Operation')}}</span>
</th>
</tr>
<tr v-for="(item, $index) in list" :key="item.id">
<td>
<span>{{parseInt(pageNo === 1 ? ($index + 1) : (($index + 1) + (pageSize * (pageNo - 1))))}}</span>
</td>
<td style="min-width: 200px;max-width: 300px;padding-right: 10px;">
<span class="ellipsis" :title="item.name">{{item.name}}</span>
</td>
<td style="min-width: 200px;max-width: 300px;padding-right: 10px;"><a href="javascript:" class="links" @click="_go(item)"><span class="ellipsis" :title="item.processInstanceName">{{item.processInstanceName}}</span></a></td>
<td>
<span v-if="item.executorName">{{item.executorName}}</span>
<span v-else>-</span>
</td>
<td><span style="margin-left: 5px">{{item.taskType}}</span></td>
<td><span v-html="_rtState(item.state)" style="cursor: pointer;"></span></td>
<td>
<span v-if="item.submitTime">{{item.submitTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td>
<span v-if="item.startTime">{{item.startTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td>
<span v-if="item.endTime">{{item.endTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td><span>{{item.host || '-'}}</span></td>
<td><span>{{item.duration}}</span></td>
<td><span>{{item.retryTimes}}</span></td>
<td>
<x-button
type="info"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('View log')"
icon="ans-icon-log"
@click="_refreshLog(item)">
</x-button>
</td>
</tr>
</table>
</div>
</div>
</template>
<script>
import Permissions from '@/module/permissions'
import mLog from '@/conf/home/pages/dag/_source/formModel/log'
import { tasksState } from '@/conf/home/pages/dag/_source/config'
export default {
name: 'list',
data () {
return {
list: [],
isAuth: Permissions.getAuth(),
backfillItem: {}
}
},
props: {
taskInstanceList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
_rtState (code) {
let o = tasksState[code]
return `<em class="${o.icoUnicode} ${o.isSpin ? 'as as-spin' : ''}" style="color:${o.color}" data-toggle="tooltip" data-container="body" title="${o.desc}"></em>`
},
_refreshLog (item) {
let self = this
let instance = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'v-modal-custom',
transitionName: 'opacityp',
render (h) {
return h(mLog, {
on: {
ok () {
},
close () {
instance.remove()
}
},
props: {
self: self,
source: 'list',
logId: item.id
}
})
}
})
},
_go (item) {
this.$router.push({ path: `/projects/instance/list/${item.processInstanceId}` })
},
},
watch: {
taskInstanceList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
},
mounted () {
this.list = this.taskInstanceList
},
components: { }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,393 | [Bug][api] view the log of the subtask, prompt "task instance does not exist" | Click "View Log" of the subtask, prompt "The task instance does not exist"
![image](https://user-images.githubusercontent.com/55787491/89242261-b36a8480-d633-11ea-82de-ca2988c41d2e.png)
**Which version of Dolphin Scheduler:**
-[1.3.2-release]
| https://github.com/apache/dolphinscheduler/issues/3393 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-08-04T01:22:02Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<table class="fixed">
<tr>
<th scope="col">
<span>{{$t('#')}}</span>
</th>
<th scope="col">
<span>{{$t('Name')}}</span>
</th>
<th scope="col">
<span>{{$t('Whether directory')}}</span>
</th>
<th scope="col">
<span>{{$t('File Name')}}</span>
</th>
<th scope="col">
<span>{{$t('Description')}}</span>
</th>
<th scope="col" width="100">
<span>{{$t('Size')}}</span>
</th>
<th scope="col" width="140">
<span>{{$t('Update Time')}}</span>
</th>
<th scope="col" width="140">
<span>{{$t('Operation')}}</span>
</th>
</tr>
<tr v-for="(item, $index) in list" :key="item.id">
<td>
<span>{{parseInt(pageNo === 1 ? ($index + 1) : (($index + 1) + (pageSize * (pageNo - 1))))}}</span>
</td>
<td>
<span class="ellipsis" v-tooltip.large.top.start.light="{text: item.alias, maxWidth: '500px'}">
<a href="javascript:" class="links" @click="_go(item)">{{item.alias}}</a>
</span>
</td>
<td>
<span>{{item.directory? $t('Yes') : $t('No')}}</span>
</td>
<td><span class="ellipsis" v-tooltip.large.top.start.light="{text: item.fileName, maxWidth: '500px'}">{{item.fileName}}</span></td>
<td>
<span v-if="item.description" class="ellipsis" v-tooltip.large.top.start.light="{text: item.description, maxWidth: '500px'}">{{item.description}}</span>
<span v-else>-</span>
</td>
<td>
<span>{{_rtSize(item.size)}}</span>
</td>
<td>
<span v-if="item.updateTime">{{item.updateTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td>
<x-button
type="info"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('Edit')"
:disabled="_rtDisb(item)"
@click="_edit(item,$index)"
icon="ans-icon-edit">
</x-button>
<x-button
type="info"
shape="circle"
size="xsmall"
icon="ans-icon-play"
data-toggle="tooltip"
:title="$t('Rename')"
@click="_rename(item,$index)">
</x-button>
<x-button
type="info"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('Download')"
:disabled="item.directory? true: false"
@click="_downloadFile(item)"
icon="ans-icon-download">
</x-button>
<x-poptip
:ref="'poptip-' + $index"
placement="bottom-end"
width="90">
<p>{{$t('Delete?')}}</p>
<div style="text-align: right; margin: 0;padding-top: 4px;">
<x-button type="text" size="xsmall" shape="circle" @click="_closeDelete($index)">{{$t('Cancel')}}</x-button>
<x-button type="primary" size="xsmall" shape="circle" @click="_delete(item,$index)">{{$t('Confirm')}}</x-button>
</div>
<template slot="reference">
<x-button
icon="ans-icon-trash"
type="error"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('delete')">
</x-button>
</template>
</x-poptip>
</td>
</tr>
</table>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import mRename from './rename'
import { mapActions } from 'vuex'
import { filtTypeArr } from '../../_source/common'
import { bytesToSize } from '@/module/util/util'
import { downloadFile } from '@/module/download'
import localStore from '@/module/util/localStorage'
export default {
name: 'file-manage-list',
data () {
return {
list: []
}
},
props: {
fileResourcesList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('resource', ['deleteResource']),
_edit (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
this.$router.push({ path: `/resource/file/edit/${item.id}` })
},
_go (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
if(item.directory) {
localStore.setItem('currentDir', `${item.fullName}`)
this.$router.push({ path: `/resource/file/subdirectory/${item.id}` })
} else {
this.$router.push({ path: `/resource/file/list/${item.id}` })
}
},
_downloadFile (item) {
downloadFile('/dolphinscheduler/resources/download', {
id: item.id
})
},
_rtSize (val) {
return bytesToSize(parseInt(val))
},
_closeDelete (i) {
this.$refs[`poptip-${i}`][0].doClose()
},
_delete (item, i) {
this.deleteResource({
id: item.id
}).then(res => {
this.$refs[`poptip-${i}`][0].doClose()
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$refs[`poptip-${i}`][0].doClose()
this.$message.error(e.msg || '')
})
},
_rename (item, i) {
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'v-modal-custom',
transitionName: 'opacityp',
render (h) {
return h(mRename, {
on: {
onUpDate (item) {
self.$set(self.list, i, item)
modal.remove()
},
close () {
modal.remove()
}
},
props: {
item: item
}
})
}
})
},
_rtDisb ({ alias, size }) {
let i = alias.lastIndexOf('.')
let a = alias.substring(i, alias.length)
let flag = _.includes(filtTypeArr, _.trimStart(a, '.'))
if (flag && (size < 1000000)) {
flag = true
} else {
flag = false
}
return !flag
}
},
watch: {
fileResourcesList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
},
mounted () {
this.list = this.fileResourcesList
},
components: { }
}
</script> |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,393 | [Bug][api] view the log of the subtask, prompt "task instance does not exist" | Click "View Log" of the subtask, prompt "The task instance does not exist"
![image](https://user-images.githubusercontent.com/55787491/89242261-b36a8480-d633-11ea-82de-ca2988c41d2e.png)
**Which version of Dolphin Scheduler:**
-[1.3.2-release]
| https://github.com/apache/dolphinscheduler/issues/3393 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-08-04T01:22:02Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/subdirectory/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<table class="fixed">
<tr>
<th scope="col">
<span>{{$t('#')}}</span>
</th>
<th scope="col">
<span>{{$t('Name')}}</span>
</th>
<th scope="col">
<span>{{$t('Whether directory')}}</span>
</th>
<th scope="col">
<span>{{$t('File Name')}}</span>
</th>
<th scope="col">
<span>{{$t('Description')}}</span>
</th>
<th scope="col" width="100">
<span>{{$t('Size')}}</span>
</th>
<th scope="col" width="140">
<span>{{$t('Update Time')}}</span>
</th>
<th scope="col" width="140">
<span>{{$t('Operation')}}</span>
</th>
</tr>
<tr v-for="(item, $index) in list" :key="item.id">
<td>
<span>{{parseInt(pageNo === 1 ? ($index + 1) : (($index + 1) + (pageSize * (pageNo - 1))))}}</span>
</td>
<td>
<span class="ellipsis" v-tooltip.large.top.start.light="{text: item.alias, maxWidth: '500px'}">
<a href="javascript:" class="links" @click="_go(item)">{{item.alias}}</a>
</span>
</td>
<td>
<span>{{item.directory? $t('Yes') : $t('No')}}</span>
</td>
<td><span class="ellipsis" v-tooltip.large.top.start.light="{text: item.fileName, maxWidth: '500px'}">{{item.fileName}}</span></td>
<td>
<span v-if="item.description" class="ellipsis" v-tooltip.large.top.start.light="{text: item.description, maxWidth: '500px'}">{{item.description}}</span>
<span v-else>-</span>
</td>
<td>
<span>{{_rtSize(item.size)}}</span>
</td>
<td>
<span v-if="item.updateTime">{{item.updateTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td>
<x-button
type="info"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('Edit')"
:disabled="_rtDisb(item)"
@click="_edit(item,$index)"
icon="ans-icon-edit">
</x-button>
<x-button
type="info"
shape="circle"
size="xsmall"
icon="ans-icon-play"
data-toggle="tooltip"
:title="$t('Rename')"
@click="_rename(item,$index)">
</x-button>
<x-button
type="info"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('Download')"
:disabled="item.directory? true: false"
@click="_downloadFile(item)"
icon="ans-icon-download">
</x-button>
<x-poptip
:ref="'poptip-' + $index"
placement="bottom-end"
width="90">
<p>{{$t('Delete?')}}</p>
<div style="text-align: right; margin: 0;padding-top: 4px;">
<x-button type="text" size="xsmall" shape="circle" @click="_closeDelete($index)">{{$t('Cancel')}}</x-button>
<x-button type="primary" size="xsmall" shape="circle" @click="_delete(item,$index)">{{$t('Confirm')}}</x-button>
</div>
<template slot="reference">
<x-button
icon="ans-icon-trash"
type="error"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('delete')">
</x-button>
</template>
</x-poptip>
</td>
</tr>
</table>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import mRename from './rename'
import { mapActions } from 'vuex'
import { filtTypeArr } from '../../_source/common'
import { bytesToSize } from '@/module/util/util'
import { downloadFile } from '@/module/download'
import localStore from '@/module/util/localStorage'
export default {
name: 'file-manage-list',
data () {
return {
list: []
}
},
props: {
fileResourcesList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('resource', ['deleteResource']),
_edit (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
this.$router.push({ path: `/resource/file/edit/${item.id}` })
},
_go (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
if(item.directory) {
localStore.setItem('currentDir', `${item.fullName}`)
this.$router.push({ path: `/resource/file/subdirectory/${item.id}` })
} else {
this.$router.push({ path: `/resource/file/list/${item.id}` })
}
},
_downloadFile (item) {
downloadFile('/dolphinscheduler/resources/download', {
id: item.id
})
},
_rtSize (val) {
return bytesToSize(parseInt(val))
},
_closeDelete (i) {
this.$refs[`poptip-${i}`][0].doClose()
},
_delete (item, i) {
this.deleteResource({
id: item.id
}).then(res => {
this.$refs[`poptip-${i}`][0].doClose()
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$refs[`poptip-${i}`][0].doClose()
this.$message.error(e.msg || '')
})
},
_rename (item, i) {
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'v-modal-custom',
transitionName: 'opacityp',
render (h) {
return h(mRename, {
on: {
onUpDate (item) {
self.$set(self.list, i, item)
modal.remove()
},
close () {
modal.remove()
}
},
props: {
item: item
}
})
}
})
},
_rtDisb ({ alias, size }) {
let i = alias.lastIndexOf('.')
let a = alias.substring(i, alias.length)
let flag = _.includes(filtTypeArr, _.trimStart(a, '.'))
if (flag && (size < 1000000)) {
flag = true
} else {
flag = false
}
return !flag
}
},
watch: {
fileResourcesList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
},
// Listening for routing changes
// '$route': {
// deep: false,
// handler () {
// this.$emit('on-update',this.$route.params.id)
// }
// }
},
beforeRouteUpdate (to, from, next) {
next() // 一定要有next
},
created () {
},
mounted () {
this.list = this.fileResourcesList
},
components: { }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,393 | [Bug][api] view the log of the subtask, prompt "task instance does not exist" | Click "View Log" of the subtask, prompt "The task instance does not exist"
![image](https://user-images.githubusercontent.com/55787491/89242261-b36a8480-d633-11ea-82de-ca2988c41d2e.png)
**Which version of Dolphin Scheduler:**
-[1.3.2-release]
| https://github.com/apache/dolphinscheduler/issues/3393 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-08-04T01:22:02Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<table class="fixed">
<tr>
<th scope="col">
<span>{{$t('#')}}</span>
</th>
<th scope="col">
<span>{{$t('UDF Resource Name')}}</span>
</th>
<th scope="col">
<span>{{$t('Whether directory')}}</span>
</th>
<th scope="col">
<span>{{$t('File Name')}}</span>
</th>
<th scope="col" width="80">
<span>{{$t('File Size')}}</span>
</th>
<th scope="col">
<span>{{$t('Description')}}</span>
</th>
<th scope="col" width="140">
<span>{{$t('Create Time')}}</span>
</th>
<th scope="col" width="140">
<span>{{$t('Update Time')}}</span>
</th>
<th scope="col" width="110">
<span>{{$t('Operation')}}</span>
</th>
</tr>
<tr v-for="(item, $index) in list" :key="$index">
<td>
<span>{{parseInt(pageNo === 1 ? ($index + 1) : (($index + 1) + (pageSize * (pageNo - 1))))}}</span>
</td>
<td>
<span class="ellipsis" v-tooltip.large.top.start.light="{text: item.alias, maxWidth: '500px'}">
<a href="javascript:" class="links" @click="_go(item)">{{item.alias}}</a>
</span>
</td>
<td>
<span>{{item.directory? $t('Yes') : $t('No')}}</span>
</td>
<td><span class="ellipsis" v-tooltip.large.top.start.light="{text: item.fileName, maxWidth: '500px'}">{{item.fileName}}</span></td>
<td>
<span>{{_rtSize(item.size)}}</span>
</td>
<td>
<span v-if="item.description" class="ellipsis" v-tooltip.large.top.start.light="{text: item.description, maxWidth: '500px'}">{{item.description}}</span>
<span v-else>-</span>
</td>
<td>
<span v-if="item.createTime">{{item.createTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td>
<span v-if="item.updateTime">{{item.updateTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td>
<x-button
type="info"
shape="circle"
size="xsmall"
icon="ans-icon-play"
data-toggle="tooltip"
:title="$t('Rename')"
@click="_rename(item,$index)">
</x-button>
<x-button
type="info"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('Download')"
:disabled="item.directory? true: false"
icon="ans-icon-download"
@click="_downloadFile(item)">
</x-button>
<x-poptip
:ref="'poptip-' + $index"
placement="bottom-end"
width="90">
<p>{{$t('Delete?')}}</p>
<div style="text-align: right; margin: 0;padding-top: 4px;">
<x-button type="text" size="xsmall" shape="circle" @click="_closeDelete($index)">{{$t('Cancel')}}</x-button>
<x-button type="primary" size="xsmall" shape="circle" @click="_delete(item,$index)">{{$t('Confirm')}}</x-button>
</div>
<template slot="reference">
<x-button
type="error"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('delete')"
icon="ans-icon-trash">
</x-button>
</template>
</x-poptip>
</td>
</tr>
</table>
</div>
</div>
</template>
<script>
import { mapActions } from 'vuex'
import mRename from './rename'
import { downloadFile } from '@/module/download'
import { bytesToSize } from '@/module/util/util'
import localStore from '@/module/util/localStorage'
export default {
name: 'udf-manage-list',
data () {
return {
list: []
}
},
props: {
udfResourcesList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('resource', ['deleteResource']),
_downloadFile (item) {
downloadFile('/dolphinscheduler/resources/download', {
id: item.id
})
},
_go (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
if(item.directory) {
localStore.setItem('currentDir', `${item.fullName}`)
this.$router.push({ path: `/resource/udf/subUdfDirectory/${item.id}` })
}
},
_rtSize (val) {
return bytesToSize(parseInt(val))
},
_closeDelete (i) {
this.$refs[`poptip-${i}`][0].doClose()
},
_delete (item, i) {
this.deleteResource({
id: item.id
}).then(res => {
this.$refs[`poptip-${i}`][0].doClose()
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$refs[`poptip-${i}`][0].doClose()
this.$message.error(e.msg || '')
})
},
_rename (item, i) {
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'v-modal-custom',
transitionName: 'opacityp',
render (h) {
return h(mRename, {
on: {
onUpDate (item) {
self.$set(self.list, i, item)
modal.remove()
},
close () {
modal.remove()
}
},
props: {
item: item
}
})
}
})
}
},
watch: {
udfResourcesList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
},
mounted () {
this.list = this.udfResourcesList
},
components: { }
}
</script> |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,393 | [Bug][api] view the log of the subtask, prompt "task instance does not exist" | Click "View Log" of the subtask, prompt "The task instance does not exist"
![image](https://user-images.githubusercontent.com/55787491/89242261-b36a8480-d633-11ea-82de-ca2988c41d2e.png)
**Which version of Dolphin Scheduler:**
-[1.3.2-release]
| https://github.com/apache/dolphinscheduler/issues/3393 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-08-04T01:22:02Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/module/components/fileUpdate/fileChildReUpdate.vue | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,393 | [Bug][api] view the log of the subtask, prompt "task instance does not exist" | Click "View Log" of the subtask, prompt "The task instance does not exist"
![image](https://user-images.githubusercontent.com/55787491/89242261-b36a8480-d633-11ea-82de-ca2988c41d2e.png)
**Which version of Dolphin Scheduler:**
-[1.3.2-release]
| https://github.com/apache/dolphinscheduler/issues/3393 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-08-04T01:22:02Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/module/components/fileUpdate/fileReUpload.vue | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,393 | [Bug][api] view the log of the subtask, prompt "task instance does not exist" | Click "View Log" of the subtask, prompt "The task instance does not exist"
![image](https://user-images.githubusercontent.com/55787491/89242261-b36a8480-d633-11ea-82de-ca2988c41d2e.png)
**Which version of Dolphin Scheduler:**
-[1.3.2-release]
| https://github.com/apache/dolphinscheduler/issues/3393 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-08-04T01:22:02Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/module/components/nav/nav.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="nav-model">
<router-link :to="{ path: '/home'}" tag="div" class="logo-box">
<a href="javascript:"></a>
</router-link>
<div class="nav-box">
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/home'}" tag="a" active-class="active">
<span><em class="ansiconfont ans-icon-home-empty"></em>{{$t('Home')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/projects'}" tag="a" active-class="active">
<span><em class="ansiconfont ans-icon-setting"></em>{{$t('Project Manage')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/resource'}" tag="a" active-class="active">
<span><em class="ansiconfont ans-icon-folder-empty"></em>{{$t('Resources manage')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/datasource'}" tag="a" active-class="active">
<span><em class="ansiconfont ans-icon-database"></em>{{$t('Datasource manage')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/monitor'}" tag="a" active-class="active">
<span><em class="ansiconfont ans-icon-monitor"></em>{{$t('Monitor')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list" >
<div class="nav-links">
<router-link :to="{ path: '/security'}" tag="a" active-class="active" v-ps="['ADMIN_USER']">
<span><em class="ansiconfont ans-icon-shield"></em>{{$t('Security')}}</span><strong></strong>
</router-link>
</div>
</div>
</div>
<div class="right">
<!--<span class="docs">
<a :href="docLink">doc</a>
</span>-->
<span class="lang">
<x-poptip
style="width: 80px"
trigger="click">
<div class="lrns-list">
<a href="javascript:" @click="_toggleLanguage(item.code)" v-for="(item,$index) in localeList" :key="$index"><span>{{item.name}}</span></a>
</div>
<div class="login-model" slot="reference">
<span>{{activeLocale.name}}</span>
<em class="ans-icon-arrow-down"></em>
</div>
</x-poptip>
</span>
<x-poptip
ref="login"
trigger="click"
v-model="isLogin"
placement="bottom-end">
<div class="lrns-list">
<a href="javascript:" @click="_goAccount">
<em class="ans-icon-user-empty"></em>
<span>{{$t('User Information')}}</span>
</a>
<a href="javascript:" @click="_signOut">
<em class="ans-icon-off"></em>
<span>{{$t('Logout')}}</span>
</a>
</div>
<div class="login-model" slot="reference">
<em class="ans-icon-user-solid"></em>
<span>{{userInfo.userName}}</span>
<em class="ans-icon-arrow-down"></em>
</div>
</x-poptip>
</div>
<div class="file-update-model" @click="_toggleArchive" v-if="isUpdate">
<div class="icon-cloud">
<em class="ans ans-icon-upload"></em>
</div>
<div class="progress-box">
<m-progress-bar :value="progress" text-placement="bottom"></m-progress-bar>
</div>
</div>
<div class="adaptive-m-nav">
<div class="m-nav-box ">
<a href="javascript:" @click="mIsNav = !mIsNav"><em class="ans-icon-database"></em></a>
</div>
<div class="m-title-box">
<div class="logo-m"></div>
</div>
<div class="m-user-box">
<a href="javascript:" @click="_goAccount"><em class="ans-icon-user-empty"></em></a>
</div>
<transition name="slide-fade">
<div class="m-nav-list" v-if="mIsNav">
<ul @click="mIsNav = false">
<router-link :to="{ path: '/home'}" tag="li" active-class="active">
<em class="ans-icon-home-empty"></em>
<span>{{$t('Home')}}</span>
</router-link>
<router-link :to="{ path: '/projects'}" tag="li" active-class="active">
<em class="ans-icon-setting"></em>
<span>{{$t('Project manage')}}</span>
</router-link>
<router-link :to="{ path: '/resource'}" tag="li" active-class="active">
<em class="ans-icon-folder-empty"></em>
<span>{{$t('Resources manage')}}</span>
</router-link>
<router-link :to="{ path: '/datasource'}" tag="li" active-class="active">
<em class="ans-icon-database"></em>
<span>{{$t('Datasource manage')}}</span>
</router-link>
<router-link :to="{ path: '/security'}" tag="li" active-class="active" v-ps="['ADMIN_USER']">
<em class="ans-icon-shield"></em>
<span>{{$t('Security')}}</span>
</router-link>
</ul>
</div>
</transition>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import cookies from 'js-cookie'
import { mapState, mapActions } from 'vuex'
import { findComponentDownward } from '@/module/util/'
import mFileUpdate from '@/module/components/fileUpdate/fileUpdate'
import mFileChildUpdate from '@/module/components/fileUpdate/fileChildUpdate'
import mResourceChildUpdate from '@/module/components/fileUpdate/resourceChildUpdate'
import mDefinitionUpdate from '@/module/components/fileUpdate/definitionUpdate'
import mProgressBar from '@/module/components/progressBar/progressBar'
import { findLocale, localeList } from '@/module/i18n/config'
export default {
name: 'roof-nav',
data () {
return {
// Whether to drag
isDrag: false,
// Upload progress
progress: 0,
// Whether to upload
isUpdate: false,
// Whether to log in
isLogin: false,
// Mobile compatible navigation
mIsNav: false,
// Take the language list data to get rid of the language pack
localeList: _.map(_.cloneDeep(localeList()), v => _.omit(v, ['locale'])),
// Selected language
activeLocale: '',
// Environmental variable
docLink: ''
}
},
methods: {
...mapActions('user', ['signOut']),
/**
* User Info
*/
_goAccount () {
this.isLogin = false
this.$router.push({ name: 'account' })
},
/**
* Upload (for the time being)
*/
_fileUpdate (type) {
if (this.progress) {
this._toggleArchive()
return
}
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'update-file-modal',
transitionName: 'opacityp',
render (h) {
if(type === 'DEFINITION'){
return h(mDefinitionUpdate, {
on: {
onProgress (val) {
self.progress = val
},
onUpdate () {
findComponentDownward(self.$root, `definition-list-index`)._updateList()
self.isUpdate = false
self.progress = 0
modal.remove()
},
onArchive () {
self.isUpdate = true
},
close () {
self.progress = 0
modal.remove()
}
},
props: {
type: type
}
})
}else{
return h(mFileUpdate, {
on: {
onProgress (val) {
self.progress = val
},
onUpdate () {
findComponentDownward(self.$root, `resource-list-index-${type}`)._updateList()
self.isUpdate = false
self.progress = 0
modal.remove()
},
onArchive () {
self.isUpdate = true
},
close () {
self.progress = 0
modal.remove()
}
},
props: {
type: type
}
})
}
}
})
},
_fileChildUpdate (type,data) {
if (this.progress) {
this._toggleArchive()
return
}
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'update-file-modal',
transitionName: 'opacityp',
render (h) {
return h(mFileChildUpdate, {
on: {
onProgress (val) {
self.progress = val
},
onUpdate () {
findComponentDownward(self.$root, `resource-list-index-${type}`)._updateList(data)
self.isUpdate = false
self.progress = 0
modal.remove()
},
onArchive () {
self.isUpdate = true
},
close () {
self.progress = 0
modal.remove()
}
},
props: {
type: type,
id: data
}
})
}
})
},
_resourceChildUpdate (type,data) {
if (this.progress) {
this._toggleArchive()
return
}
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'update-file-modal',
transitionName: 'opacityp',
render (h) {
return h(mResourceChildUpdate, {
on: {
onProgress (val) {
self.progress = val
},
onUpdate () {
findComponentDownward(self.$root, `resource-list-index-${type}`)._updateList(data)
self.isUpdate = false
self.progress = 0
modal.remove()
},
onArchive () {
self.isUpdate = true
},
close () {
self.progress = 0
modal.remove()
}
},
props: {
type: type,
id: data
}
})
}
})
},
/**
* Upload popup layer display
*/
_toggleArchive () {
$('.update-file-modal').show()
},
/**
* sign out
*/
_signOut () {
this.signOut()
},
/**
* Language switching
*/
_toggleLanguage (language) {
cookies.set('language', language, { path: '/' })
setTimeout(() => {
window.location.reload()
}, 100)
}
},
created () {
let language = cookies.get('language')
this.activeLocale = language ? findLocale(language) : '中文'
this.docLink = process.env.NODE_ENV === 'true' ? 'docs' : `/view/docs/${this.activeLocale.code}/_book` // eslint-disable-line
},
computed: {
...mapState('user', ['userInfo'])
},
components: { mFileUpdate, mProgressBar, mDefinitionUpdate }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.nav-model {
height: 60px;
background: #2D303A;
box-shadow: 0 3px 5px rgba(0, 0, 0, 0.3);
position: relative;
.m-title-box {
text-align: center;
.logo-m {
width: 36px;
height: 36px;
margin: 0 auto;
position: relative;
top: 12px;
}
}
.logo-box {
position: absolute;
left: 10px;
top: 8px;
cursor: pointer;
>a {
width: 180px;
height: 46px;
display: block;
background: url("./logo.svg") no-repeat;
}
}
.nav-box {
height: 60px;
line-height: 60px;
position: absolute;
left: 220px;
top: 0;
.list {
width: 106px;
float: left;
position: relative;
cursor: pointer;
margin-right: 14px;
.nav-links {
height: 60px;
a {
position: relative;
text-decoration: none;
font-size: 15px;
color: #333;
display: inline-block;
float: left;
text-align: center;
span {
display: block;
width: 106px;
color: #fff;
.ansiconfont {
vertical-align: -2px;
font-size: 22px;
margin-right: 4px;
}
}
&:hover {
span {
color: #2d8cf0;
}
}
&.active {
span {
color: #2D8BF0;
i {
color: #2d8cf0;
}
}
b {
height: 2px;
background: #2d8cf0;
display: block;
margin-top: -2px;
}
}
}
}
}
}
.right {
position: absolute;
right: 0;
top: 0;
.ans-poptip {
min-width: 120px;
}
>.lang {
.ans-poptip {
min-width: 80px;
}
}
>.docs {
padding-right: 20px;
a {
color: #fff;
font-size: 14px;
vertical-align: middle;
&:hover {
color: #2d8bf0;
}
}
}
.current-project {
height: 60px;
line-height: 56px;
display: inline-block;
margin-right: 16px;
cursor: pointer;
i {
font-size: 18px;
vertical-align: middle;
&:nth-child(1) {
font-size: 20px;
color:#2d8cf0;
}
}
span {
color: #333;
vertical-align: middle;
font-size: 14px;
}
&:hover {
span {
color: #2d8cf0;
}
}
}
.login-model {
display: inline-block;
margin-right: 20px;
cursor: pointer;
margin-top: 16px;
em {
font-size: 18px;
vertical-align: middle;
color: #fff;
}
span {
color: #fff;
vertical-align: middle;
font-size: 14px;
}
&:hover {
>i,>span {
color: #2d8cf0;
}
}
}
.lrns-list {
margin: -6px 0;
a {
display: block;
height: 30px;
line-height: 30px;
font-size: 14px;
&:hover {
i,span {
color: #2d8cf0;
}
}
}
}
}
.file-update-model {
position: absolute;
right: 160px;
top: 18px;
cursor: pointer;
.progress-box {
width: 240px;
}
.icon-cloud {
position: absolute;
left: -34px;
top: 2px;
> i {
font-size: 22px;
color: #2d8cf0;
}
}
}
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,393 | [Bug][api] view the log of the subtask, prompt "task instance does not exist" | Click "View Log" of the subtask, prompt "The task instance does not exist"
![image](https://user-images.githubusercontent.com/55787491/89242261-b36a8480-d633-11ea-82de-ca2988c41d2e.png)
**Which version of Dolphin Scheduler:**
-[1.3.2-release]
| https://github.com/apache/dolphinscheduler/issues/3393 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-08-04T01:22:02Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': 'User Name',
'Please enter user name': 'Please enter user name',
Password: 'Password',
'Please enter your password': 'Please enter your password',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22',
Login: 'Login',
Home: 'Home',
'Failed to create node to save': 'Failed to create node to save',
'Global parameters': 'Global parameters',
'Local parameters': 'Local parameters',
'Copy success': 'Copy success',
'The browser does not support automatic copying': 'The browser does not support automatic copying',
'Whether to save the DAG graph': 'Whether to save the DAG graph',
'Current node settings': 'Current node settings',
'View history': 'View history',
'View log': 'View log',
'Enter this child node': 'Enter this child node',
'Node name': 'Node name',
'Run flag': 'Run flag',
Normal: 'Normal',
'Prohibition execution': 'Prohibition execution',
'Please enter description': 'Please enter description',
'Number of failed retries': 'Number of failed retries',
Times: 'Times',
'Failed retry interval': 'Failed retry interval',
Minute: 'Minute',
Cancel: 'Cancel',
'Confirm add': 'Confirm add',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process',
'The task has not been executed and cannot enter the sub-Process': 'The task has not been executed and cannot enter the sub-Process',
'Please enter name (required)': 'Please enter name (required)',
'Name already exists': 'Name already exists',
'Download Log': 'Download Log',
'Refresh Log': 'Refresh Log',
'Enter full screen': 'Enter full screen',
'Cancel full screen': 'Cancel full screen',
Close: 'Close',
'Update log success': 'Update log success',
'No more logs': 'No more logs',
'No log': 'No log',
'Loading Log...': 'Loading Log...',
'Set the DAG diagram name': 'Set the DAG diagram name',
'Please enter description(optional)': 'Please enter description(optional)',
'Set global': 'Set global',
'Whether to update the process definition': 'Whether to update the process definition',
Add: 'Add',
'DAG graph name cannot be empty': 'DAG graph name cannot be empty',
'Create Datasource': 'Create Datasource',
'Project Home': 'Project Home',
'Project Manage': 'Project',
'Create Project': 'Create Project',
'Cron Manage': 'Cron Manage',
'Copy Workflow': 'Copy Workflow',
'Tenant Manage': 'Tenant Manage',
'Create Tenant': 'Create Tenant',
'User Manage': 'User Manage',
'Create User': 'Create User',
'User Information': 'User Information',
'Edit Password': 'Edit Password',
success: 'success',
failed: 'failed',
delete: 'delete',
'Please choose': 'Please choose',
'Please enter a positive integer': 'Please enter a positive integer',
'Program Type': 'Program Type',
'Main class': 'Main class',
'Main jar package': 'Main jar package',
'Please enter main jar package': 'Please enter main jar package',
'Command-line parameters': 'Command-line parameters',
'Please enter Command-line parameters': 'Please enter Command-line parameters',
'Other parameters': 'Other parameters',
'Please enter other parameters': 'Please enter other parameters',
Resources: 'Resources',
'Custom Parameters': 'Custom Parameters',
'Custom template': 'Custom template',
Datasource: 'Datasource',
methods: 'methods',
'Please enter method(optional)': 'Please enter method(optional)',
Script: 'Script',
'Please enter script(required)': 'Please enter script(required)',
'Deploy Mode': 'Deploy Mode',
'Driver core number': 'Driver core number',
'Please enter driver core number': 'Please enter driver core number',
'Driver memory use': 'Driver memory use',
'Please enter driver memory use': 'Please enter driver memory use',
'Number of Executors': 'Number of Executors',
'Please enter the number of Executor': 'Please enter the number of Executor',
'Executor memory': 'Executor memory',
'Please enter the Executor memory': 'Please enter the Executor memory',
'Executor core number': 'Executor core number',
'Please enter Executor core number': 'Please enter Executor core number',
'The number of Executors should be a positive integer': 'The number of Executors should be a positive integer',
'Memory should be a positive integer': 'Memory should be a positive integer',
'Please enter ExecutorPlease enter Executor core number': 'Please enter ExecutorPlease enter Executor core number',
'Core number should be positive integer': 'Core number should be positive integer',
'SQL Type': 'SQL Type',
Title: 'Title',
'Please enter the title of email': 'Please enter the title of email',
Table: 'Table',
TableMode: 'Table',
Attachment: 'Attachment',
'SQL Parameter': 'SQL Parameter',
'SQL Statement': 'SQL Statement',
'UDF Function': 'UDF Function',
'Please enter a SQL Statement(required)': 'Please enter a SQL Statement(required)',
'Please enter a JSON Statement(required)': 'Please enter a JSON Statement(required)',
'One form or attachment must be selected': 'One form or attachment must be selected',
'Recipient required': 'Recipient required',
'Mail subject required': 'Mail subject required',
'Child Node': 'Child Node',
'Please select a sub-Process': 'Please select a sub-Process',
Edit: 'Edit',
'Datasource Name': 'Datasource Name',
'Please enter datasource name': 'Please enter datasource name',
IP: 'IP',
'Please enter IP': 'Please enter IP',
Port: 'Port',
'Please enter port': 'Please enter port',
'Database Name': 'Database Name',
'Please enter database name': 'Please enter database name',
'Oracle Connect Type': 'ServiceName or SID',
'Oracle Service Name': 'ServiceName',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc connect parameters',
'Test Connect': 'Test Connect',
'Please enter resource name': 'Please enter resource name',
'Please enter resource folder name': 'Please enter resource folder name',
'Please enter a non-query SQL statement': 'Please enter a non-query SQL statement',
'Please enter IP/hostname': 'Please enter IP/hostname',
'jdbc connection parameters is not a correct JSON format': 'jdbc connection parameters is not a correct JSON format',
'#': '#',
'Datasource Type': 'Datasource Type',
'Datasource Parameter': 'Datasource Parameter',
'Create Time': 'Create Time',
'Update Time': 'Update Time',
Operation: 'Operation',
'Click to view': 'Click to view',
'Delete?': 'Delete?',
Confirm: 'Confirm',
'Task status statistics': 'Task Status Statistics',
Number: 'Number',
State: 'State',
'Process Status Statistics': 'Process Status Statistics',
'Process Definition Statistics': 'Process Definition Statistics',
'Project Name': 'Project Name',
'Please enter name': 'Please enter name',
'Owned Users': 'Owned Users',
'Process Pid': 'Process Pid',
'Zk registration directory': 'Zk registration directory',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': 'Last heartbeat time',
'Edit Tenant': 'Edit Tenant',
'Tenant Code': 'Tenant Code',
'Tenant Name': 'Tenant Name',
Queue: 'Queue',
'Please select a queue': 'default is tenant association queue',
'Please enter the tenant code in English': 'Please enter the tenant code in English',
'Please enter tenant code in English': 'Please enter tenant code in English',
'Edit User': 'Edit User',
Tenant: 'Tenant',
Email: 'Email',
Phone: 'Phone',
'Please enter phone number': 'Please enter phone number',
'Please enter main class': 'Please enter main class',
'Please enter email': 'Please enter email',
'Please enter the correct email format': 'Please enter the correct email format',
'Please enter the correct mobile phone format': 'Please enter the correct mobile phone format',
Project: 'Project',
Authorize: 'Authorize',
'File resources': 'File resources',
'UDF resources': 'UDF resources',
'Please select UDF resources directory': 'Please select UDF resources directory',
'UDF resources directory': 'UDF resources directory',
'Upload File Size': 'Upload File size cannot exceed 1g',
'Edit alarm group': 'Edit alarm group',
'Create alarm group': 'Create alarm group',
'Group Name': 'Group Name',
'Please enter group name': 'Please enter group name',
'Group Type': 'Group Type',
Remarks: 'Remarks',
SMS: 'SMS',
'Managing Users': 'Managing Users',
Permission: 'Permission',
Administrator: 'Administrator',
'Confirm Password': 'Confirm Password',
'Please enter confirm password': 'Please enter confirm password',
'Password cannot be in Chinese': 'Password cannot be in Chinese',
'Please enter a password (6-22) character password': 'Please enter a password (6-22) character password',
'Confirmation password cannot be in Chinese': 'Confirmation password cannot be in Chinese',
'Please enter a confirmation password (6-22) character password': 'Please enter a confirmation password (6-22) character password',
'The password is inconsistent with the confirmation password': 'The password is inconsistent with the confirmation password',
'Please select the datasource': 'Please select the datasource',
'Please select resources': 'Please select resources',
Query: 'Query',
'Non Query': 'Non Query',
'prop(required)': 'prop(required)',
'value(optional)': 'value(optional)',
'value(required)': 'value(required)',
'prop is empty': 'prop is empty',
'value is empty': 'value is empty',
'prop is repeat': 'prop is repeat',
'Start Time': 'Start Time',
'End Time': 'End Time',
crontab: 'crontab',
'Failure Strategy': 'Failure Strategy',
online: 'online',
offline: 'offline',
'Task Status': 'Task Status',
'Process Instance': 'Process Instance',
'Task Instance': 'Task Instance',
'Select date range': 'Select date range',
Date: 'Date',
waiting: 'waiting',
execution: 'execution',
finish: 'finish',
'Create File': 'Create File',
'Create folder': 'Create folder',
'File Name': 'File Name',
'Folder Name': 'Folder Name',
'File Format': 'File Format',
'Folder Format': 'Folder Format',
'File Content': 'File Content',
Create: 'Create',
'Please enter the resource content': 'Please enter the resource content',
'Resource content cannot exceed 3000 lines': 'Resource content cannot exceed 3000 lines',
'File Details': 'File Details',
'Download Details': 'Download Details',
Return: 'Return',
Save: 'Save',
'File Manage': 'File Manage',
'Upload Files': 'Upload Files',
'Create UDF Function': 'Create UDF Function',
'Upload UDF Resources': 'Upload UDF Resources',
'Service-Master': 'Service-Master',
'Service-Worker': 'Service-Worker',
'Process Name': 'Process Name',
Executor: 'Executor',
'Run Type': 'Run Type',
'Scheduling Time': 'Scheduling Time',
'Run Times': 'Run Times',
host: 'host',
'fault-tolerant sign': 'fault-tolerant sign',
Rerun: 'Rerun',
'Recovery Failed': 'Recovery Failed',
Stop: 'Stop',
Pause: 'Pause',
'Recovery Suspend': 'Recovery Suspend',
Gantt: 'Gantt',
Name: 'Name',
'Node Type': 'Node Type',
'Submit Time': 'Submit Time',
Duration: 'Duration',
'Retry Count': 'Retry Count',
'Task Name': 'Task Name',
'Task Date': 'Task Date',
'Source Table': 'Source Table',
'Record Number': 'Record Number',
'Target Table': 'Target Table',
'Online viewing type is not supported': 'Online viewing type is not supported',
Size: 'Size',
Rename: 'Rename',
Download: 'Download',
Export: 'Export',
Submit: 'Submit',
'Edit UDF Function': 'Edit UDF Function',
type: 'type',
'UDF Function Name': 'UDF Function Name',
FILE: 'FILE',
UDF: 'UDF',
'File Subdirectory': 'File Subdirectory',
'Please enter a function name': 'Please enter a function name',
'Package Name': 'Package Name',
'Please enter a Package name': 'Please enter a Package name',
Parameter: 'Parameter',
'Please enter a parameter': 'Please enter a parameter',
'UDF Resources': 'UDF Resources',
'Upload Resources': 'Upload Resources',
Instructions: 'Instructions',
'Please enter a instructions': 'Please enter a instructions',
'Please enter a UDF function name': 'Please enter a UDF function name',
'Select UDF Resources': 'Select UDF Resources',
'Class Name': 'Class Name',
'Jar Package': 'Jar Package',
'Library Name': 'Library Name',
'UDF Resource Name': 'UDF Resource Name',
'File Size': 'File Size',
Description: 'Description',
'Drag Nodes and Selected Items': 'Drag Nodes and Selected Items',
'Select Line Connection': 'Select Line Connection',
'Delete selected lines or nodes': 'Delete selected lines or nodes',
'Full Screen': 'Full Screen',
Unpublished: 'Unpublished',
'Start Process': 'Start Process',
'Execute from the current node': 'Execute from the current node',
'Recover tolerance fault process': 'Recover tolerance fault process',
'Resume the suspension process': 'Resume the suspension process',
'Execute from the failed nodes': 'Execute from the failed nodes',
'Complement Data': 'Complement Data',
slot: 'slot',
taskManager: 'taskManager',
jobManagerMemory: 'jobManagerMemory',
taskManagerMemory: 'taskManagerMemory',
'Scheduling execution': 'Scheduling execution',
'Recovery waiting thread': 'Recovery waiting thread',
'Submitted successfully': 'Submitted successfully',
Executing: 'Executing',
'Ready to pause': 'Ready to pause',
'Ready to stop': 'Ready to stop',
'Need fault tolerance': 'Need fault tolerance',
kill: 'kill',
'Waiting for thread': 'Waiting for thread',
'Waiting for dependence': 'Waiting for dependence',
Start: 'Start',
Copy: 'Copy',
'Copy name': 'Copy name',
Delete: 'Delete',
'Please enter keyword': 'Please enter keyword',
'File Upload': 'File Upload',
'Drag the file into the current upload window': 'Drag the file into the current upload window',
'Drag area upload': 'Drag area upload',
Upload: 'Upload',
'Please enter file name': 'Please enter file name',
'Please select the file to upload': 'Please select the file to upload',
'Resources manage': 'Resources',
Security: 'Security',
Logout: 'Logout',
'No data': 'No data',
'Uploading...': 'Uploading...',
'Loading...': 'Loading...',
List: 'List',
'Unable to download without proper url': 'Unable to download without proper url',
Process: 'Process',
'Process definition': 'Process definition',
'Task record': 'Task record',
'Warning group manage': 'Warning group manage',
'Servers manage': 'Servers manage',
'UDF manage': 'UDF manage',
'Resource manage': 'Resource manage',
'Function manage': 'Function manage',
'Edit password': 'Edit password',
'Ordinary users': 'Ordinary users',
'Create process': 'Create process',
'Import process': 'Import process',
'Timing state': 'Timing state',
Timing: 'Timing',
TreeView: 'TreeView',
'Mailbox already exists! Recipients and copyers cannot repeat': 'Mailbox already exists! Recipients and copyers cannot repeat',
'Mailbox input is illegal': 'Mailbox input is illegal',
'Please set the parameters before starting': 'Please set the parameters before starting',
Continue: 'Continue',
End: 'End',
'Node execution': 'Node execution',
'Backward execution': 'Backward execution',
'Forward execution': 'Forward execution',
'Execute only the current node': 'Execute only the current node',
'Notification strategy': 'Notification strategy',
'Notification group': 'Notification group',
'Please select a notification group': 'Please select a notification group',
Recipient: 'Recipient',
Cc: 'Cc',
'Whether it is a complement process?': 'Whether it is a complement process?',
'Schedule date': 'Schedule date',
'Mode of execution': 'Mode of execution',
'Serial execution': 'Serial execution',
'Parallel execution': 'Parallel execution',
'Set parameters before timing': 'Set parameters before timing',
'Start and stop time': 'Start and stop time',
'Please select time': 'Please select time',
'Please enter crontab': 'Please enter crontab',
none_1: 'none',
success_1: 'success',
failure_1: 'failure',
All_1: 'All',
Toolbar: 'Toolbar',
'View variables': 'View variables',
'Format DAG': 'Format DAG',
'Refresh DAG status': 'Refresh DAG status',
Return_1: 'Return',
'Please enter format': 'Please enter format',
'connection parameter': 'connection parameter',
'Process definition details': 'Process definition details',
'Create process definition': 'Create process definition',
'Scheduled task list': 'Scheduled task list',
'Process instance details': 'Process instance details',
'Create Resource': 'Create Resource',
'User Center': 'User Center',
'Please enter method': 'Please enter method',
none: 'none',
name: 'name',
'Process priority': 'Process priority',
'Task priority': 'Task priority',
'Task timeout alarm': 'Task timeout alarm',
'Timeout strategy': 'Timeout strategy',
'Timeout alarm': 'Timeout alarm',
'Timeout failure': 'Timeout failure',
'Timeout period': 'Timeout period',
'Timeout strategy must be selected': 'Timeout strategy must be selected',
'Timeout must be a positive integer': 'Timeout must be a positive integer',
'Add dependency': 'Add dependency',
and: 'and',
or: 'or',
month: 'month',
week: 'week',
day: 'day',
hour: 'hour',
Running: 'Running',
'Waiting for dependency to complete': 'Waiting for dependency to complete',
Selected: 'Selected',
CurrentHour: 'CurrentHour',
Last1Hour: 'Last1Hour',
Last2Hours: 'Last2Hours',
Last3Hours: 'Last3Hours',
Last24Hours: 'Last24Hours',
today: 'today',
Last1Days: 'Last1Days',
Last2Days: 'Last2Days',
Last3Days: 'Last3Days',
Last7Days: 'Last7Days',
ThisWeek: 'ThisWeek',
LastWeek: 'LastWeek',
LastMonday: 'LastMonday',
LastTuesday: 'LastTuesday',
LastWednesday: 'LastWednesday',
LastThursday: 'LastThursday',
LastFriday: 'LastFriday',
LastSaturday: 'LastSaturday',
LastSunday: 'LastSunday',
ThisMonth: 'ThisMonth',
LastMonth: 'LastMonth',
LastMonthBegin: 'LastMonthBegin',
LastMonthEnd: 'LastMonthEnd',
'Refresh status succeeded': 'Refresh status succeeded',
'Queue manage': 'Queue manage',
'Create queue': 'Create queue',
'Edit queue': 'Edit queue',
'Datasource manage': 'Datasource',
'History task record': 'History task record',
'Please go online': 'Please go online',
'Queue value': 'Queue value',
'Please enter queue value': 'Please enter queue value',
'Worker group manage': 'Worker group manage',
'Create worker group': 'Create worker group',
'Edit worker group': 'Edit worker group',
'Token manage': 'Token manage',
'Create token': 'Create token',
'Edit token': 'Edit token',
'Please enter the IP address separated by commas': 'Please enter the IP address separated by commas',
'Note: Multiple IP addresses have been comma separated': 'Note: Multiple IP addresses have been comma separated',
'Failure time': 'Failure time',
User: 'User',
'Please enter token': 'Please enter token',
'Generate token': 'Generate token',
Monitor: 'Monitor',
Group: 'Group',
'Queue statistics': 'Queue statistics',
'Command status statistics': 'Command status statistics',
'Task kill': 'Task Kill',
'Task queue': 'Task queue',
'Error command count': 'Error command count',
'Normal command count': 'Normal command count',
Manage: ' Manage',
'Number of connections': 'Number of connections',
Sent: 'Sent',
Received: 'Received',
'Min latency': 'Min latency',
'Avg latency': 'Avg latency',
'Max latency': 'Max latency',
'Node count': 'Node count',
'Query time': 'Query time',
'Node self-test status': 'Node self-test status',
'Health status': 'Health status',
'Max connections': 'Max connections',
'Threads connections': 'Threads connections',
'Max used connections': 'Max used connections',
'Threads running connections': 'Threads running connections',
'Worker group': 'Worker group',
'Please enter a positive integer greater than 0': 'Please enter a positive integer greater than 0',
'Pre Statement': 'Pre Statement',
'Post Statement': 'Post Statement',
'Statement cannot be empty': 'Statement cannot be empty',
'Process Define Count': 'Work flow Define Count',
'Process Instance Running Count': 'Process Instance Running Count',
'command number of waiting for running': 'command number of waiting for running',
'failure command number': 'failure command number',
'tasks number of waiting running': 'tasks number of waiting running',
'task number of ready to kill': 'task number of ready to kill',
'Statistics manage': 'Statistics Manage',
statistics: 'Statistics',
'select tenant': 'select tenant',
'Please enter Principal': 'Please enter Principal',
'The start time must not be the same as the end': 'The start time must not be the same as the end',
'Startup parameter': 'Startup parameter',
'Startup type': 'Startup type',
'warning of timeout': 'warning of timeout',
'Next five execution times': 'Next five execution times',
'Execute time': 'Execute time',
'Complement range': 'Complement range',
'Http Url': 'Http Url',
'Http Method': 'Http Method',
'Http Parameters': 'Http Parameters',
'Http Parameters Key': 'Http Parameters Key',
'Http Parameters Position': 'Http Parameters Position',
'Http Parameters Value': 'Http Parameters Value',
'Http Check Condition': 'Http Check Condition',
'Http Condition': 'Http Condition',
'Please Enter Http Url': 'Please Enter Http Url(required)',
'Please Enter Http Condition': 'Please Enter Http Condition',
'There is no data for this period of time': 'There is no data for this period of time',
'IP address cannot be empty': 'IP address cannot be empty',
'Please enter the correct IP': 'Please enter the correct IP',
'Please generate token': 'Please generate token',
'Spark Version': 'Spark Version',
TargetDataBase: 'target database',
TargetTable: 'target table',
'Please enter the table of target': 'Please enter the table of target',
'Please enter a Target Table(required)': 'Please enter a Target Table(required)',
SpeedByte: 'speed(byte count)',
SpeedRecord: 'speed(record count)',
'0 means unlimited by byte': '0 means unlimited',
'0 means unlimited by count': '0 means unlimited',
'Modify User': 'Modify User',
'Whether directory': 'Whether directory',
Yes: 'Yes',
No: 'No',
'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)',
'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)',
'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)',
'Please enter Target Dir(required)': 'Please enter Target Dir(required)',
'Please enter Export Dir(required)': 'Please enter Export Dir(required)',
'Please enter Hive Database(required)': 'Please enter Hive Databasec(required)',
'Please enter Hive Table(required)': 'Please enter Hive Table(required)',
'Please enter Hive Partition Keys': 'Please enter Hive Partition Key',
'Please enter Hive Partition Values': 'Please enter Partition Value',
'Please enter Replace Delimiter': 'Please enter Replace Delimiter',
'Please enter Fields Terminated': 'Please enter Fields Terminated',
'Please enter Lines Terminated': 'Please enter Lines Terminated',
'Please enter Concurrency': 'Please enter Concurrency',
'Please enter Update Key': 'Please enter Update Key',
Direct: 'Direct',
Type: 'Type',
ModelType: 'ModelType',
ColumnType: 'ColumnType',
Database: 'Database',
Column: 'Column',
'Map Column Hive': 'Map Column Hive',
'Map Column Java': 'Map Column Java',
'Export Dir': 'Export Dir',
'Hive partition Keys': 'Hive partition Keys',
'Hive partition Values': 'Hive partition Values',
FieldsTerminated: 'FieldsTerminated',
LinesTerminated: 'LinesTerminated',
IsUpdate: 'IsUpdate',
UpdateKey: 'UpdateKey',
UpdateMode: 'UpdateMode',
'Target Dir': 'Target Dir',
DeleteTargetDir: 'DeleteTargetDir',
FileType: 'FileType',
CompressionCodec: 'CompressionCodec',
CreateHiveTable: 'CreateHiveTable',
DropDelimiter: 'DropDelimiter',
OverWriteSrc: 'OverWriteSrc',
ReplaceDelimiter: 'ReplaceDelimiter',
Concurrency: 'Concurrency',
Form: 'Form',
OnlyUpdate: 'OnlyUpdate',
AllowInsert: 'AllowInsert',
'Data Source': 'Data Source',
'Data Target': 'Data Target',
'All Columns': 'All Columns',
'Some Columns': 'Some Columns',
'Branch flow': 'Branch flow',
'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow',
'Successful branch flow and failed branch flow are required': 'conditions node Successful and failed branch flow are required',
'Unauthorized or deleted resources': 'Unauthorized or deleted resources',
'Please delete all non-existent resources': 'Please delete all non-existent resources',
'The Worker group no longer exists, please select the correct Worker group!': 'The Worker group no longer exists, please select the correct Worker group!',
'Please confirm whether the workflow has been saved before downloading': 'Please confirm whether the workflow has been saved before downloading',
'User name length is between 3 and 39': 'User name length is between 3 and 39',
zkDirectory: 'zkDirectory',
'Directory detail': 'Directory detail',
'Connection name': 'Connection name',
'Current connection settings': 'Current connection settings',
'Please save the DAG before formatting': 'Please save the DAG before formatting'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,393 | [Bug][api] view the log of the subtask, prompt "task instance does not exist" | Click "View Log" of the subtask, prompt "The task instance does not exist"
![image](https://user-images.githubusercontent.com/55787491/89242261-b36a8480-d633-11ea-82de-ca2988c41d2e.png)
**Which version of Dolphin Scheduler:**
-[1.3.2-release]
| https://github.com/apache/dolphinscheduler/issues/3393 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-08-04T01:22:02Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': '用户名',
'Please enter user name': '请输入用户名',
Password: '密码',
'Please enter your password': '请输入密码',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': '密码至少包含数字,字母和字符的两种组合,长度在6-22之间',
Login: '登录',
Home: '首页',
'Failed to create node to save': '未创建节点保存失败',
'Global parameters': '全局参数',
'Local parameters': '局部参数',
'Copy success': '复制成功',
'The browser does not support automatic copying': '该浏览器不支持自动复制',
'Whether to save the DAG graph': '是否保存DAG图',
'Current node settings': '当前节点设置',
'View history': '查看历史',
'View log': '查看日志',
'Enter this child node': '进入该子节点',
'Node name': '节点名称',
'Please enter name (required)': '请输入名称(必填)',
'Run flag': '运行标志',
Normal: '正常',
'Prohibition execution': '禁止执行',
'Please enter description': '请输入描述',
'Number of failed retries': '失败重试次数',
Times: '次',
'Failed retry interval': '失败重试间隔',
Minute: '分',
Cancel: '取消',
'Confirm add': '确认添加',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': '新创建子工作流还未执行,不能进入子工作流',
'The task has not been executed and cannot enter the sub-Process': '该任务还未执行,不能进入子工作流',
'Name already exists': '名称已存在请重新输入',
'Download Log': '下载日志',
'Refresh Log': '刷新日志',
'Enter full screen': '进入全屏',
'Cancel full screen': '取消全屏',
Close: '关闭',
'Update log success': '更新日志成功',
'No more logs': '暂无更多日志',
'No log': '暂无日志',
'Loading Log...': '正在努力请求日志中...',
'Set the DAG diagram name': '设置DAG图名称',
'Please enter description(optional)': '请输入描述(选填)',
'Set global': '设置全局',
'Whether to update the process definition': '是否更新流程定义',
Add: '添加',
'DAG graph name cannot be empty': 'DAG图名称不能为空',
'Create Datasource': '创建数据源',
'Project Home': '项目首页',
'Project Manage': '项目管理',
'Create Project': '创建项目',
'Cron Manage': '定时管理',
'Copy Workflow': '复制工作流',
'Tenant Manage': '租户管理',
'Create Tenant': '创建租户',
'User Manage': '用户管理',
'Create User': '创建用户',
'User Information': '用户信息',
'Edit Password': '密码修改',
success: '成功',
failed: '失败',
delete: '删除',
'Please choose': '请选择',
'Please enter a positive integer': '请输入正整数',
'Program Type': '程序类型',
'Main class': '主函数的class',
'Main jar package': '主jar包',
'Please enter main jar package': '请选择主jar包',
'Command-line parameters': '命令行参数',
'Please enter Command-line parameters': '请输入命令行参数',
'Other parameters': '其他参数',
'Please enter other parameters': '请输入其他参数',
Resources: '资源',
'Custom Parameters': '自定义参数',
'Custom template': '自定义模版',
'Please enter main class': '请填写主函数的class',
Datasource: '数据源',
methods: '方法',
'Please enter method(optional)': '请输入方法(选填)',
Script: '脚本',
'Please enter script(required)': '请输入脚本(必填)',
'Deploy Mode': '部署方式',
'Driver core number': 'Driver内核数',
'Please enter driver core number': '请输入Driver内核数',
'Driver memory use': 'Driver内存数',
'Please enter driver memory use': '请输入Driver内存数',
'Number of Executors': 'Executor数量',
'Please enter the number of Executor': '请输入Executor数量',
'Executor memory': 'Executor内存数',
'Please enter the Executor memory': '请输入Executor内存数',
'Executor core number': 'Executor内核数',
'Please enter Executor core number': '请输入Executor内核数',
'The number of Executors should be a positive integer': 'Executor数量为正整数',
'Memory should be a positive integer': '内存数为数字',
'Please enter ExecutorPlease enter Executor core number': '请填写Executor内核数',
'Core number should be positive integer': '内核数为正整数',
'SQL Type': 'sql类型',
Title: '主题',
'Please enter the title of email': '请输入邮件主题',
Table: '表名',
TableMode: '表格',
Attachment: '附件',
'SQL Parameter': 'sql参数',
'SQL Statement': 'sql语句',
'UDF Function': 'UDF函数',
FILE: '文件',
UDF: 'UDF',
'File Subdirectory': '文件子目录',
'Please enter a SQL Statement(required)': '请输入sql语句(必填)',
'Please enter a JSON Statement(required)': '请输入json语句(必填)',
'One form or attachment must be selected': '表格、附件必须勾选一个',
'Recipient required': '收件人邮箱必填',
'Mail subject required': '邮件主题必填',
'Child Node': '子节点',
'Please select a sub-Process': '请选择子工作流',
Edit: '编辑',
'Datasource Name': '数据源名称',
'Please enter datasource name': '请输入数据源名称',
IP: 'IP主机名',
'Please enter IP': '请输入IP主机名',
Port: '端口',
'Please enter port': '请输入端口',
'Database Name': '数据库名',
'Please enter database name': '请输入数据库名',
'Oracle Connect Type': '服务名或SID',
'Oracle Service Name': '服务名',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc连接参数',
'Test Connect': '测试连接',
'Please enter resource name': '请输入数据源名称',
'Please enter resource folder name': '请输入资源文件夹名称',
'Please enter a non-query SQL statement': '请输入非查询sql语句',
'Please enter IP/hostname': '请输入IP/主机名',
'jdbc connection parameters is not a correct JSON format': 'jdbc连接参数不是一个正确的JSON格式',
'#': '编号',
'Datasource Type': '数据源类型',
'Datasource Parameter': '数据源参数',
'Create Time': '创建时间',
'Update Time': '更新时间',
Operation: '操作',
'Click to view': '点击查看',
'Delete?': '确定删除吗?',
Confirm: '确定',
'Task status statistics': '任务状态统计',
Number: '数量',
State: '状态',
'Process Status Statistics': '流程状态统计',
'Process Definition Statistics': '流程定义统计',
'Project Name': '项目名称',
'Please enter name': '请输入名称',
'Owned Users': '所属用户',
'Process Pid': '进程Pid',
'Zk registration directory': 'zk注册目录',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': '最后心跳时间',
'Edit Tenant': '编辑租户',
'Tenant Code': '租户编码',
'Tenant Name': '租户名称',
Queue: '队列',
'Please enter the tenant code in English': '请输入租户编码只允许英文',
'Please enter tenant code in English': '请输入英文租户编码',
'Edit User': '编辑用户',
Tenant: '租户',
Email: '邮件',
Phone: '手机',
'Please enter phone number': '请输入手机',
'Please enter email': '请输入邮箱',
'Please enter the correct email format': '请输入正确的邮箱格式',
'Please enter the correct mobile phone format': '请输入正确的手机格式',
Project: '项目',
Authorize: '授权',
'File resources': '文件资源',
'UDF resources': 'UDF资源',
'UDF resources directory': 'UDF资源目录',
'Please select UDF resources directory': '请选择UDF资源目录',
'Edit alarm group': '编辑告警组',
'Create alarm group': '创建告警组',
'Group Name': '组名称',
'Please enter group name': '请输入组名称',
'Group Type': '组类型',
Remarks: '备注',
SMS: '短信',
'Managing Users': '管理用户',
Permission: '权限',
Administrator: '管理员',
'Confirm Password': '确认密码',
'Please enter confirm password': '请输入确认密码',
'Password cannot be in Chinese': '密码不能为中文',
'Please enter a password (6-22) character password': '请输入密码(6-22)字符密码',
'Confirmation password cannot be in Chinese': '确认密码不能为中文',
'Please enter a confirmation password (6-22) character password': '请输入确认密码(6-22)字符密码',
'The password is inconsistent with the confirmation password': '密码与确认密码不一致,请重新确认',
'Please select the datasource': '请选择数据源',
'Please select resources': '请选择资源',
Query: '查询',
'Non Query': '非查询',
'prop(required)': 'prop(必填)',
'value(optional)': 'value(选填)',
'value(required)': 'value(必填)',
'prop is empty': 'prop不能为空',
'value is empty': 'value不能为空',
'prop is repeat': 'prop中有重复',
'Start Time': '开始时间',
'End Time': '结束时间',
crontab: 'crontab',
'Failure Strategy': '失败策略',
online: '上线',
offline: '下线',
'Task Status': '任务状态',
'Process Instance': '工作流实例',
'Task Instance': '任务实例',
'Select date range': '选择日期区间',
Date: '日期',
waiting: '等待',
execution: '执行中',
finish: '完成',
'Create File': '创建文件',
'Create folder': '创建文件夹',
'File Name': '文件名称',
'Folder Name': '文件夹名称',
'File Format': '文件格式',
'Folder Format': '文件夹格式',
'File Content': '文件内容',
'Upload File Size': '文件大小不能超过1G',
Create: '创建',
'Please enter the resource content': '请输入资源内容',
'Resource content cannot exceed 3000 lines': '资源内容不能超过3000行',
'File Details': '文件详情',
'Download Details': '下载详情',
Return: '返回',
Save: '保存',
'File Manage': '文件管理',
'Upload Files': '上传文件',
'Create UDF Function': '创建UDF函数',
'Upload UDF Resources': '上传UDF资源',
'Service-Master': '服务管理-Master',
'Service-Worker': '服务管理-Worker',
'Process Name': '工作流名称',
Executor: '执行用户',
'Run Type': '运行类型',
'Scheduling Time': '调度时间',
'Run Times': '运行次数',
host: 'host',
'fault-tolerant sign': '容错标识',
Rerun: '重跑',
'Recovery Failed': '恢复失败',
Stop: '停止',
Pause: '暂停',
'Recovery Suspend': '恢复运行',
Gantt: '甘特图',
Name: '名称',
'Node Type': '节点类型',
'Submit Time': '提交时间',
Duration: '运行时长',
'Retry Count': '重试次数',
'Task Name': '任务名称',
'Task Date': '任务日期',
'Source Table': '源表',
'Record Number': '记录数',
'Target Table': '目标表',
'Online viewing type is not supported': '不支持在线查看类型',
Size: '大小',
Rename: '重命名',
Download: '下载',
Export: '导出',
Submit: '提交',
'Edit UDF Function': '编辑UDF函数',
type: '类型',
'UDF Function Name': 'UDF函数名称',
'Please enter a function name': '请输入函数名',
'Package Name': '包名类名',
'Please enter a Package name': '请输入包名类名',
Parameter: '参数',
'Please enter a parameter': '请输入参数',
'UDF Resources': 'UDF资源',
'Upload Resources': '上传资源',
Instructions: '使用说明',
'Please enter a instructions': '请输入使用说明',
'Please enter a UDF function name': '请输入UDF函数名称',
'Select UDF Resources': '请选择UDF资源',
'Class Name': '类名',
'Jar Package': 'jar包',
'Library Name': '库名',
'UDF Resource Name': 'UDF资源名称',
'File Size': '文件大小',
Description: '描述',
'Drag Nodes and Selected Items': '拖动节点和选中项',
'Select Line Connection': '选择线条连接',
'Delete selected lines or nodes': '删除选中的线或节点',
'Full Screen': '全屏',
Unpublished: '未发布',
'Start Process': '启动工作流',
'Execute from the current node': '从当前节点开始执行',
'Recover tolerance fault process': '恢复被容错的工作流',
'Resume the suspension process': '恢复运行流程',
'Execute from the failed nodes': '从失败节点开始执行',
'Complement Data': '补数',
'Scheduling execution': '调度执行',
'Recovery waiting thread': '恢复等待线程',
'Submitted successfully': '提交成功',
Executing: '正在执行',
'Ready to pause': '准备暂停',
'Ready to stop': '准备停止',
'Need fault tolerance': '需要容错',
kill: 'kill',
'Waiting for thread': '等待线程',
'Waiting for dependence': '等待依赖',
Start: '运行',
Copy: '复制节点',
'Copy name': '复制名称',
Delete: '删除',
'Please enter keyword': '请输入关键词',
'File Upload': '文件上传',
'Drag the file into the current upload window': '请将文件拖拽到当前上传窗口内!',
'Drag area upload': '拖动区域上传',
Upload: '上传',
'Please enter file name': '请输入文件名',
'Please select the file to upload': '请选择要上传的文件',
'Resources manage': '资源中心',
Security: '安全中心',
Logout: '退出',
'No data': '查询无数据',
'Uploading...': '文件上传中',
'Loading...': '正在努力加载中...',
List: '列表',
'Unable to download without proper url': '无下载url无法下载',
Process: '工作流',
'Process definition': '工作流定义',
'Task record': '任务记录',
'Warning group manage': '告警组管理',
'Servers manage': '服务管理',
'UDF manage': 'UDF管理',
'Resource manage': '资源管理',
'Function manage': '函数管理',
'Edit password': '修改密码',
'Ordinary users': '普通用户',
'Create process': '创建工作流',
'Import process': '导入工作流',
'Timing state': '定时状态',
Timing: '定时',
TreeView: '树形图',
'Mailbox already exists! Recipients and copyers cannot repeat': '邮箱已存在!收件人和抄送人不能重复',
'Mailbox input is illegal': '邮箱输入不合法',
'Please set the parameters before starting': '启动前请先设置参数',
Continue: '继续',
End: '结束',
'Node execution': '节点执行',
'Backward execution': '向后执行',
'Forward execution': '向前执行',
'Execute only the current node': '仅执行当前节点',
'Notification strategy': '通知策略',
'Notification group': '通知组',
'Please select a notification group': '请选择通知组',
Recipient: '收件人',
Cc: '抄送人',
'Whether it is a complement process?': '是否补数',
'Schedule date': '调度日期',
'Mode of execution': '执行方式',
'Serial execution': '串行执行',
'Parallel execution': '并行执行',
'Set parameters before timing': '定时前请先设置参数',
'Start and stop time': '起止时间',
'Please select time': '请选择时间',
'Please enter crontab': '请输入crontab',
none_1: '都不发',
success_1: '成功发',
failure_1: '失败发',
All_1: '成功或失败都发',
Toolbar: '工具栏',
'View variables': '查看变量',
'Format DAG': '格式化DAG',
'Refresh DAG status': '刷新DAG状态',
Return_1: '返回上一节点',
'Please enter format': '请输入格式为',
'connection parameter': '连接参数',
'Process definition details': '流程定义详情',
'Create process definition': '创建流程定义',
'Scheduled task list': '定时任务列表',
'Process instance details': '流程实例详情',
'Create Resource': '创建资源',
'User Center': '用户中心',
'Please enter method': '请输入方法',
none: '无',
name: '名称',
'Process priority': '流程优先级',
'Task priority': '任务优先级',
'Task timeout alarm': '任务超时告警',
'Timeout strategy': '超时策略',
'Timeout alarm': '超时告警',
'Timeout failure': '超时失败',
'Timeout period': '超时时长',
'Timeout strategy must be selected': '超时策略必须选一个',
'Timeout must be a positive integer': '超时时长必须为正整数',
'Add dependency': '添加依赖',
and: '且',
or: '或',
month: '月',
week: '周',
day: '日',
hour: '时',
Running: '正在运行',
'Waiting for dependency to complete': '等待依赖完成',
Selected: '已选',
CurrentHour: '当前小时',
Last1Hour: '前1小时',
Last2Hours: '前2小时',
Last3Hours: '前3小时',
Last24Hours: '前24小时',
today: '今天',
Last1Days: '昨天',
Last2Days: '前两天',
Last3Days: '前三天',
Last7Days: '前七天',
ThisWeek: '本周',
LastWeek: '上周',
LastMonday: '上周一',
LastTuesday: '上周二',
LastWednesday: '上周三',
LastThursday: '上周四',
LastFriday: '上周五',
LastSaturday: '上周六',
LastSunday: '上周日',
ThisMonth: '本月',
LastMonth: '上月',
LastMonthBegin: '上月初',
LastMonthEnd: '上月末',
'Refresh status succeeded': '刷新状态成功',
'Queue manage': '队列管理',
'Create queue': '创建队列',
'Edit queue': '编辑队列',
'Datasource manage': '数据源中心',
'History task record': '历史任务记录',
'Please go online': '不要忘记上线',
'Queue value': '队列值',
'Please enter queue value': '请输入队列值',
'Worker group manage': 'Worker分组管理',
'Create worker group': '创建Worker分组',
'Edit worker group': '编辑Worker分组',
'Token manage': '令牌管理',
'Create token': '创建令牌',
'Edit token': '编辑令牌',
'Please enter the IP address separated by commas': '请输入IP地址多个用英文逗号隔开',
'Note: Multiple IP addresses have been comma separated': '注意:多个IP地址以英文逗号分割',
'Failure time': '失效时间',
User: '用户',
'Please enter token': '请输入令牌',
'Generate token': '生成令牌',
Monitor: '监控中心',
Group: '分组',
'Queue statistics': '队列统计',
'Command status statistics': '命令状态统计',
'Task kill': '等待kill任务',
'Task queue': '等待执行任务',
'Error command count': '错误指令数',
'Normal command count': '正确指令数',
Manage: '管理',
'Number of connections': '连接数',
Sent: '发送量',
Received: '接收量',
'Min latency': '最低延时',
'Avg latency': '平均延时',
'Max latency': '最大延时',
'Node count': '节点数',
'Query time': '当前查询时间',
'Node self-test status': '节点自检状态',
'Health status': '健康状态',
'Max connections': '最大连接数',
'Threads connections': '当前连接数',
'Max used connections': '同时使用连接最大数',
'Threads running connections': '数据库当前活跃连接数',
'Worker group': 'Worker分组',
'Please enter a positive integer greater than 0': '请输入大于 0 的正整数',
'Pre Statement': '前置sql',
'Post Statement': '后置sql',
'Statement cannot be empty': '语句不能为空',
'Process Define Count': '工作流定义数',
'Process Instance Running Count': '正在运行的流程数',
'Please select a queue': '默认为租户关联队列',
'command number of waiting for running': '待执行的命令数',
'failure command number': '执行失败的命令数',
'tasks number of waiting running': '待运行任务数',
'task number of ready to kill': '待杀死任务数',
'Statistics manage': '统计管理',
statistics: '统计',
'select tenant': '选择租户',
'Please enter Principal': '请输入Principal',
'The start time must not be the same as the end': '开始时间和结束时间不能相同',
'Startup parameter': '启动参数',
'Startup type': '启动类型',
'warning of timeout': '超时告警',
'Next five execution times': '接下来五次执行时间',
'Execute time': '执行时间',
'Complement range': '补数范围',
slot: 'slot数量',
taskManager: 'taskManage数量',
jobManagerMemory: 'jobManager内存数',
taskManagerMemory: 'taskManager内存数',
'Http Url': '请求地址',
'Http Method': '请求类型',
'Http Parameters': '请求参数',
'Http Parameters Key': '参数名',
'Http Parameters Position': '参数位置',
'Http Parameters Value': '参数值',
'Http Check Condition': '校验条件',
'Http Condition': '校验内容',
'Please Enter Http Url': '请填写请求地址(必填)',
'Please Enter Http Condition': '请填写校验内容',
'There is no data for this period of time': '该时间段无数据',
'IP address cannot be empty': 'IP地址不能为空',
'Please enter the correct IP': '请输入正确的IP',
'Please generate token': '请生成Token',
'Spark Version': 'Spark版本',
TargetDataBase: '目标库',
TargetTable: '目标表',
'Please enter the table of target': '请输入目标表名',
'Please enter a Target Table(required)': '请输入目标表(必填)',
SpeedByte: '限流(字节数)',
SpeedRecord: '限流(记录数)',
'0 means unlimited by byte': 'KB,0代表不限制',
'0 means unlimited by count': '0代表不限制',
'Modify User': '修改用户',
'Whether directory': '是否文件夹',
Yes: '是',
No: '否',
'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)',
'Please enter Mysql Table(required)': '请输入Mysql表名(必填)',
'Please enter Columns (Comma separated)': '请输入列名,用 , 隔开',
'Please enter Target Dir(required)': '请输入目标路径(必填)',
'Please enter Export Dir(required)': '请输入数据源路径(必填)',
'Please enter Hive Database(required)': '请输入Hive数据库(必填)',
'Please enter Hive Table(required)': '请输入Hive表名(必填)',
'Please enter Hive Partition Keys': '请输入分区键',
'Please enter Hive Partition Values': '请输入分区值',
'Please enter Replace Delimiter': '请输入替换分隔符',
'Please enter Fields Terminated': '请输入列分隔符',
'Please enter Lines Terminated': '请输入行分隔符',
'Please enter Concurrency': '请输入并发度',
'Please enter Update Key': '请输入更新列',
Direct: '流向',
Type: '类型',
ModelType: '模式',
ColumnType: '列类型',
Database: '数据库',
Column: '列',
'Map Column Hive': 'Hive类型映射',
'Map Column Java': 'Java类型映射',
'Export Dir': '数据源路径',
'Hive partition Keys': 'Hive 分区键',
'Hive partition Values': 'Hive 分区值',
FieldsTerminated: '列分隔符',
LinesTerminated: '行分隔符',
IsUpdate: '是否更新',
UpdateKey: '更新列',
UpdateMode: '更新类型',
'Target Dir': '目标路径',
DeleteTargetDir: '是否删除目录',
FileType: '保存格式',
CompressionCodec: '压缩类型',
CreateHiveTable: '是否创建新表',
DropDelimiter: '是否删除分隔符',
OverWriteSrc: '是否覆盖数据源',
ReplaceDelimiter: '替换分隔符',
Concurrency: '并发度',
Form: '表单',
OnlyUpdate: '只更新',
AllowInsert: '无更新便插入',
'Data Source': '数据来源',
'Data Target': '数据目的',
'All Columns': '全表导入',
'Some Columns': '选择列',
'Branch flow': '分支流转',
'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点',
'Successful branch flow and failed branch flow are required': 'conditions节点成功和失败分支流转必填',
'Unauthorized or deleted resources': '未授权或已删除资源',
'Please delete all non-existent resources': '请删除所有未授权或已删除资源',
'The Worker group no longer exists, please select the correct Worker group!': '该Worker分组已经不存在,请选择正确的Worker分组!',
'Please confirm whether the workflow has been saved before downloading': '下载前请确定工作流是否已保存',
'User name length is between 3 and 39': '用户名长度在3~39之间',
'Timeout Settings': '超时设置',
'Connect Timeout':'连接超时',
'Socket Timeout':'Socket超时',
'Connect timeout be a positive integer': '连接超时必须为数字',
'Socket Timeout be a positive integer': 'Socket超时必须为数字',
'ms':'毫秒',
zkDirectory: 'zk注册目录',
'Directory detail': '查看目录详情',
'Connection name': '连线名',
'Current connection settings': '当前连线设置',
'Please save the DAG before formatting': '格式化前请先保存DAG'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,327 | [Improvement][api] resource file content update | used dolphin-scheduler V1.3.1,
resource file content update interface only arguments of type "String" are supported, if need update a jar file content, you must delete this file first, then upload again.
if a task depends on it, that will get worse. you must delete this task or remove depends, otherwise, you will get an exception.
| https://github.com/apache/dolphinscheduler/issues/3327 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-07-28T02:30:04Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/taskInstance/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<table class="fixed">
<tr>
<th scope="col" style="min-width: 50px">
<span>{{$t('#')}}</span>
</th>
<th scope="col" style="min-width: 200px;max-width: 300px;">
<span>{{$t('Name')}}</span>
</th>
<th scope="col" style="min-width: 200px;max-width: 300px;">
<span>{{$t('Process Instance')}}</span>
</th>
<th scope="col" style="min-width: 60px">
<span>{{$t('Executor')}}</span>
</th>
<th scope="col" style="min-width: 70px">
<span style="margin-left: 5px">{{$t('Node Type')}}</span>
</th>
<th scope="col" style="min-width: 30px">
<span>{{$t('State')}}</span>
</th>
<th scope="col" style="min-width: 130px">
<span>{{$t('Submit Time')}}</span>
</th>
<th scope="col" style="min-width: 130px">
<span>{{$t('Start Time')}}</span>
</th>
<th scope="col" style="min-width: 130px">
<span>{{$t('End Time')}}</span>
</th>
<th scope="col" style="min-width: 130px">
<span>{{$t('host')}}</span>
</th>
<th scope="col" style="min-width: 70px">
<span>{{$t('Duration')}}(s)</span>
</th>
<th scope="col" style="min-width: 60px">
<div style="width: 50px">
<span>{{$t('Retry Count')}}</span>
</div>
</th>
<th scope="col" style="min-width: 60px">
<span>{{$t('Operation')}}</span>
</th>
</tr>
<tr v-for="(item, $index) in list" :key="item.id">
<td>
<span>{{parseInt(pageNo === 1 ? ($index + 1) : (($index + 1) + (pageSize * (pageNo - 1))))}}</span>
</td>
<td style="min-width: 200px;max-width: 300px;padding-right: 10px;">
<span class="ellipsis" :title="item.name">{{item.name}}</span>
</td>
<td style="min-width: 200px;max-width: 300px;padding-right: 10px;"><a href="javascript:" class="links" @click="_go(item)"><span class="ellipsis" :title="item.processInstanceName">{{item.processInstanceName}}</span></a></td>
<td>
<span v-if="item.executorName">{{item.executorName}}</span>
<span v-else>-</span>
</td>
<td><span style="margin-left: 5px">{{item.taskType}}</span></td>
<td><span v-html="_rtState(item.state)" style="cursor: pointer;"></span></td>
<td>
<span v-if="item.submitTime">{{item.submitTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td>
<span v-if="item.startTime">{{item.startTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td>
<span v-if="item.endTime">{{item.endTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td><span>{{item.host || '-'}}</span></td>
<td><span>{{item.duration}}</span></td>
<td><span>{{item.retryTimes}}</span></td>
<td>
<x-button
type="info"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('View log')"
icon="ans-icon-log"
@click="_refreshLog(item)">
</x-button>
</td>
</tr>
</table>
</div>
</div>
</template>
<script>
import Permissions from '@/module/permissions'
import mLog from '@/conf/home/pages/dag/_source/formModel/log'
import { tasksState } from '@/conf/home/pages/dag/_source/config'
export default {
name: 'list',
data () {
return {
list: [],
isAuth: Permissions.getAuth(),
backfillItem: {}
}
},
props: {
taskInstanceList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
_rtState (code) {
let o = tasksState[code]
return `<em class="${o.icoUnicode} ${o.isSpin ? 'as as-spin' : ''}" style="color:${o.color}" data-toggle="tooltip" data-container="body" title="${o.desc}"></em>`
},
_refreshLog (item) {
let self = this
let instance = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'v-modal-custom',
transitionName: 'opacityp',
render (h) {
return h(mLog, {
on: {
ok () {
},
close () {
instance.remove()
}
},
props: {
self: self,
source: 'list',
logId: item.id
}
})
}
})
},
_go (item) {
this.$router.push({ path: `/projects/instance/list/${item.processInstanceId}` })
},
},
watch: {
taskInstanceList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
},
mounted () {
this.list = this.taskInstanceList
},
components: { }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,327 | [Improvement][api] resource file content update | used dolphin-scheduler V1.3.1,
resource file content update interface only arguments of type "String" are supported, if need update a jar file content, you must delete this file first, then upload again.
if a task depends on it, that will get worse. you must delete this task or remove depends, otherwise, you will get an exception.
| https://github.com/apache/dolphinscheduler/issues/3327 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-07-28T02:30:04Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<table class="fixed">
<tr>
<th scope="col">
<span>{{$t('#')}}</span>
</th>
<th scope="col">
<span>{{$t('Name')}}</span>
</th>
<th scope="col">
<span>{{$t('Whether directory')}}</span>
</th>
<th scope="col">
<span>{{$t('File Name')}}</span>
</th>
<th scope="col">
<span>{{$t('Description')}}</span>
</th>
<th scope="col" width="100">
<span>{{$t('Size')}}</span>
</th>
<th scope="col" width="140">
<span>{{$t('Update Time')}}</span>
</th>
<th scope="col" width="140">
<span>{{$t('Operation')}}</span>
</th>
</tr>
<tr v-for="(item, $index) in list" :key="item.id">
<td>
<span>{{parseInt(pageNo === 1 ? ($index + 1) : (($index + 1) + (pageSize * (pageNo - 1))))}}</span>
</td>
<td>
<span class="ellipsis" v-tooltip.large.top.start.light="{text: item.alias, maxWidth: '500px'}">
<a href="javascript:" class="links" @click="_go(item)">{{item.alias}}</a>
</span>
</td>
<td>
<span>{{item.directory? $t('Yes') : $t('No')}}</span>
</td>
<td><span class="ellipsis" v-tooltip.large.top.start.light="{text: item.fileName, maxWidth: '500px'}">{{item.fileName}}</span></td>
<td>
<span v-if="item.description" class="ellipsis" v-tooltip.large.top.start.light="{text: item.description, maxWidth: '500px'}">{{item.description}}</span>
<span v-else>-</span>
</td>
<td>
<span>{{_rtSize(item.size)}}</span>
</td>
<td>
<span v-if="item.updateTime">{{item.updateTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td>
<x-button
type="info"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('Edit')"
:disabled="_rtDisb(item)"
@click="_edit(item,$index)"
icon="ans-icon-edit">
</x-button>
<x-button
type="info"
shape="circle"
size="xsmall"
icon="ans-icon-play"
data-toggle="tooltip"
:title="$t('Rename')"
@click="_rename(item,$index)">
</x-button>
<x-button
type="info"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('Download')"
:disabled="item.directory? true: false"
@click="_downloadFile(item)"
icon="ans-icon-download">
</x-button>
<x-poptip
:ref="'poptip-' + $index"
placement="bottom-end"
width="90">
<p>{{$t('Delete?')}}</p>
<div style="text-align: right; margin: 0;padding-top: 4px;">
<x-button type="text" size="xsmall" shape="circle" @click="_closeDelete($index)">{{$t('Cancel')}}</x-button>
<x-button type="primary" size="xsmall" shape="circle" @click="_delete(item,$index)">{{$t('Confirm')}}</x-button>
</div>
<template slot="reference">
<x-button
icon="ans-icon-trash"
type="error"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('delete')">
</x-button>
</template>
</x-poptip>
</td>
</tr>
</table>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import mRename from './rename'
import { mapActions } from 'vuex'
import { filtTypeArr } from '../../_source/common'
import { bytesToSize } from '@/module/util/util'
import { downloadFile } from '@/module/download'
import localStore from '@/module/util/localStorage'
export default {
name: 'file-manage-list',
data () {
return {
list: []
}
},
props: {
fileResourcesList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('resource', ['deleteResource']),
_edit (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
this.$router.push({ path: `/resource/file/edit/${item.id}` })
},
_go (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
if(item.directory) {
localStore.setItem('currentDir', `${item.fullName}`)
this.$router.push({ path: `/resource/file/subdirectory/${item.id}` })
} else {
this.$router.push({ path: `/resource/file/list/${item.id}` })
}
},
_downloadFile (item) {
downloadFile('/dolphinscheduler/resources/download', {
id: item.id
})
},
_rtSize (val) {
return bytesToSize(parseInt(val))
},
_closeDelete (i) {
this.$refs[`poptip-${i}`][0].doClose()
},
_delete (item, i) {
this.deleteResource({
id: item.id
}).then(res => {
this.$refs[`poptip-${i}`][0].doClose()
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$refs[`poptip-${i}`][0].doClose()
this.$message.error(e.msg || '')
})
},
_rename (item, i) {
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'v-modal-custom',
transitionName: 'opacityp',
render (h) {
return h(mRename, {
on: {
onUpDate (item) {
self.$set(self.list, i, item)
modal.remove()
},
close () {
modal.remove()
}
},
props: {
item: item
}
})
}
})
},
_rtDisb ({ alias, size }) {
let i = alias.lastIndexOf('.')
let a = alias.substring(i, alias.length)
let flag = _.includes(filtTypeArr, _.trimStart(a, '.'))
if (flag && (size < 1000000)) {
flag = true
} else {
flag = false
}
return !flag
}
},
watch: {
fileResourcesList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
},
mounted () {
this.list = this.fileResourcesList
},
components: { }
}
</script> |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,327 | [Improvement][api] resource file content update | used dolphin-scheduler V1.3.1,
resource file content update interface only arguments of type "String" are supported, if need update a jar file content, you must delete this file first, then upload again.
if a task depends on it, that will get worse. you must delete this task or remove depends, otherwise, you will get an exception.
| https://github.com/apache/dolphinscheduler/issues/3327 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-07-28T02:30:04Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/subdirectory/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<table class="fixed">
<tr>
<th scope="col">
<span>{{$t('#')}}</span>
</th>
<th scope="col">
<span>{{$t('Name')}}</span>
</th>
<th scope="col">
<span>{{$t('Whether directory')}}</span>
</th>
<th scope="col">
<span>{{$t('File Name')}}</span>
</th>
<th scope="col">
<span>{{$t('Description')}}</span>
</th>
<th scope="col" width="100">
<span>{{$t('Size')}}</span>
</th>
<th scope="col" width="140">
<span>{{$t('Update Time')}}</span>
</th>
<th scope="col" width="140">
<span>{{$t('Operation')}}</span>
</th>
</tr>
<tr v-for="(item, $index) in list" :key="item.id">
<td>
<span>{{parseInt(pageNo === 1 ? ($index + 1) : (($index + 1) + (pageSize * (pageNo - 1))))}}</span>
</td>
<td>
<span class="ellipsis" v-tooltip.large.top.start.light="{text: item.alias, maxWidth: '500px'}">
<a href="javascript:" class="links" @click="_go(item)">{{item.alias}}</a>
</span>
</td>
<td>
<span>{{item.directory? $t('Yes') : $t('No')}}</span>
</td>
<td><span class="ellipsis" v-tooltip.large.top.start.light="{text: item.fileName, maxWidth: '500px'}">{{item.fileName}}</span></td>
<td>
<span v-if="item.description" class="ellipsis" v-tooltip.large.top.start.light="{text: item.description, maxWidth: '500px'}">{{item.description}}</span>
<span v-else>-</span>
</td>
<td>
<span>{{_rtSize(item.size)}}</span>
</td>
<td>
<span v-if="item.updateTime">{{item.updateTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td>
<x-button
type="info"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('Edit')"
:disabled="_rtDisb(item)"
@click="_edit(item,$index)"
icon="ans-icon-edit">
</x-button>
<x-button
type="info"
shape="circle"
size="xsmall"
icon="ans-icon-play"
data-toggle="tooltip"
:title="$t('Rename')"
@click="_rename(item,$index)">
</x-button>
<x-button
type="info"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('Download')"
:disabled="item.directory? true: false"
@click="_downloadFile(item)"
icon="ans-icon-download">
</x-button>
<x-poptip
:ref="'poptip-' + $index"
placement="bottom-end"
width="90">
<p>{{$t('Delete?')}}</p>
<div style="text-align: right; margin: 0;padding-top: 4px;">
<x-button type="text" size="xsmall" shape="circle" @click="_closeDelete($index)">{{$t('Cancel')}}</x-button>
<x-button type="primary" size="xsmall" shape="circle" @click="_delete(item,$index)">{{$t('Confirm')}}</x-button>
</div>
<template slot="reference">
<x-button
icon="ans-icon-trash"
type="error"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('delete')">
</x-button>
</template>
</x-poptip>
</td>
</tr>
</table>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import mRename from './rename'
import { mapActions } from 'vuex'
import { filtTypeArr } from '../../_source/common'
import { bytesToSize } from '@/module/util/util'
import { downloadFile } from '@/module/download'
import localStore from '@/module/util/localStorage'
export default {
name: 'file-manage-list',
data () {
return {
list: []
}
},
props: {
fileResourcesList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('resource', ['deleteResource']),
_edit (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
this.$router.push({ path: `/resource/file/edit/${item.id}` })
},
_go (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
if(item.directory) {
localStore.setItem('currentDir', `${item.fullName}`)
this.$router.push({ path: `/resource/file/subdirectory/${item.id}` })
} else {
this.$router.push({ path: `/resource/file/list/${item.id}` })
}
},
_downloadFile (item) {
downloadFile('/dolphinscheduler/resources/download', {
id: item.id
})
},
_rtSize (val) {
return bytesToSize(parseInt(val))
},
_closeDelete (i) {
this.$refs[`poptip-${i}`][0].doClose()
},
_delete (item, i) {
this.deleteResource({
id: item.id
}).then(res => {
this.$refs[`poptip-${i}`][0].doClose()
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$refs[`poptip-${i}`][0].doClose()
this.$message.error(e.msg || '')
})
},
_rename (item, i) {
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'v-modal-custom',
transitionName: 'opacityp',
render (h) {
return h(mRename, {
on: {
onUpDate (item) {
self.$set(self.list, i, item)
modal.remove()
},
close () {
modal.remove()
}
},
props: {
item: item
}
})
}
})
},
_rtDisb ({ alias, size }) {
let i = alias.lastIndexOf('.')
let a = alias.substring(i, alias.length)
let flag = _.includes(filtTypeArr, _.trimStart(a, '.'))
if (flag && (size < 1000000)) {
flag = true
} else {
flag = false
}
return !flag
}
},
watch: {
fileResourcesList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
},
// Listening for routing changes
// '$route': {
// deep: false,
// handler () {
// this.$emit('on-update',this.$route.params.id)
// }
// }
},
beforeRouteUpdate (to, from, next) {
next() // 一定要有next
},
created () {
},
mounted () {
this.list = this.fileResourcesList
},
components: { }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,327 | [Improvement][api] resource file content update | used dolphin-scheduler V1.3.1,
resource file content update interface only arguments of type "String" are supported, if need update a jar file content, you must delete this file first, then upload again.
if a task depends on it, that will get worse. you must delete this task or remove depends, otherwise, you will get an exception.
| https://github.com/apache/dolphinscheduler/issues/3327 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-07-28T02:30:04Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<table class="fixed">
<tr>
<th scope="col">
<span>{{$t('#')}}</span>
</th>
<th scope="col">
<span>{{$t('UDF Resource Name')}}</span>
</th>
<th scope="col">
<span>{{$t('Whether directory')}}</span>
</th>
<th scope="col">
<span>{{$t('File Name')}}</span>
</th>
<th scope="col" width="80">
<span>{{$t('File Size')}}</span>
</th>
<th scope="col">
<span>{{$t('Description')}}</span>
</th>
<th scope="col" width="140">
<span>{{$t('Create Time')}}</span>
</th>
<th scope="col" width="140">
<span>{{$t('Update Time')}}</span>
</th>
<th scope="col" width="110">
<span>{{$t('Operation')}}</span>
</th>
</tr>
<tr v-for="(item, $index) in list" :key="$index">
<td>
<span>{{parseInt(pageNo === 1 ? ($index + 1) : (($index + 1) + (pageSize * (pageNo - 1))))}}</span>
</td>
<td>
<span class="ellipsis" v-tooltip.large.top.start.light="{text: item.alias, maxWidth: '500px'}">
<a href="javascript:" class="links" @click="_go(item)">{{item.alias}}</a>
</span>
</td>
<td>
<span>{{item.directory? $t('Yes') : $t('No')}}</span>
</td>
<td><span class="ellipsis" v-tooltip.large.top.start.light="{text: item.fileName, maxWidth: '500px'}">{{item.fileName}}</span></td>
<td>
<span>{{_rtSize(item.size)}}</span>
</td>
<td>
<span v-if="item.description" class="ellipsis" v-tooltip.large.top.start.light="{text: item.description, maxWidth: '500px'}">{{item.description}}</span>
<span v-else>-</span>
</td>
<td>
<span v-if="item.createTime">{{item.createTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td>
<span v-if="item.updateTime">{{item.updateTime | formatDate}}</span>
<span v-else>-</span>
</td>
<td>
<x-button
type="info"
shape="circle"
size="xsmall"
icon="ans-icon-play"
data-toggle="tooltip"
:title="$t('Rename')"
@click="_rename(item,$index)">
</x-button>
<x-button
type="info"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('Download')"
:disabled="item.directory? true: false"
icon="ans-icon-download"
@click="_downloadFile(item)">
</x-button>
<x-poptip
:ref="'poptip-' + $index"
placement="bottom-end"
width="90">
<p>{{$t('Delete?')}}</p>
<div style="text-align: right; margin: 0;padding-top: 4px;">
<x-button type="text" size="xsmall" shape="circle" @click="_closeDelete($index)">{{$t('Cancel')}}</x-button>
<x-button type="primary" size="xsmall" shape="circle" @click="_delete(item,$index)">{{$t('Confirm')}}</x-button>
</div>
<template slot="reference">
<x-button
type="error"
shape="circle"
size="xsmall"
data-toggle="tooltip"
:title="$t('delete')"
icon="ans-icon-trash">
</x-button>
</template>
</x-poptip>
</td>
</tr>
</table>
</div>
</div>
</template>
<script>
import { mapActions } from 'vuex'
import mRename from './rename'
import { downloadFile } from '@/module/download'
import { bytesToSize } from '@/module/util/util'
import localStore from '@/module/util/localStorage'
export default {
name: 'udf-manage-list',
data () {
return {
list: []
}
},
props: {
udfResourcesList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('resource', ['deleteResource']),
_downloadFile (item) {
downloadFile('/dolphinscheduler/resources/download', {
id: item.id
})
},
_go (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
if(item.directory) {
localStore.setItem('currentDir', `${item.fullName}`)
this.$router.push({ path: `/resource/udf/subUdfDirectory/${item.id}` })
}
},
_rtSize (val) {
return bytesToSize(parseInt(val))
},
_closeDelete (i) {
this.$refs[`poptip-${i}`][0].doClose()
},
_delete (item, i) {
this.deleteResource({
id: item.id
}).then(res => {
this.$refs[`poptip-${i}`][0].doClose()
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$refs[`poptip-${i}`][0].doClose()
this.$message.error(e.msg || '')
})
},
_rename (item, i) {
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'v-modal-custom',
transitionName: 'opacityp',
render (h) {
return h(mRename, {
on: {
onUpDate (item) {
self.$set(self.list, i, item)
modal.remove()
},
close () {
modal.remove()
}
},
props: {
item: item
}
})
}
})
}
},
watch: {
udfResourcesList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
},
mounted () {
this.list = this.udfResourcesList
},
components: { }
}
</script> |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,327 | [Improvement][api] resource file content update | used dolphin-scheduler V1.3.1,
resource file content update interface only arguments of type "String" are supported, if need update a jar file content, you must delete this file first, then upload again.
if a task depends on it, that will get worse. you must delete this task or remove depends, otherwise, you will get an exception.
| https://github.com/apache/dolphinscheduler/issues/3327 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-07-28T02:30:04Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/module/components/fileUpdate/fileChildReUpdate.vue | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,327 | [Improvement][api] resource file content update | used dolphin-scheduler V1.3.1,
resource file content update interface only arguments of type "String" are supported, if need update a jar file content, you must delete this file first, then upload again.
if a task depends on it, that will get worse. you must delete this task or remove depends, otherwise, you will get an exception.
| https://github.com/apache/dolphinscheduler/issues/3327 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-07-28T02:30:04Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/module/components/fileUpdate/fileReUpload.vue | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,327 | [Improvement][api] resource file content update | used dolphin-scheduler V1.3.1,
resource file content update interface only arguments of type "String" are supported, if need update a jar file content, you must delete this file first, then upload again.
if a task depends on it, that will get worse. you must delete this task or remove depends, otherwise, you will get an exception.
| https://github.com/apache/dolphinscheduler/issues/3327 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-07-28T02:30:04Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/module/components/nav/nav.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="nav-model">
<router-link :to="{ path: '/home'}" tag="div" class="logo-box">
<a href="javascript:"></a>
</router-link>
<div class="nav-box">
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/home'}" tag="a" active-class="active">
<span><em class="ansiconfont ans-icon-home-empty"></em>{{$t('Home')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/projects'}" tag="a" active-class="active">
<span><em class="ansiconfont ans-icon-setting"></em>{{$t('Project Manage')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/resource'}" tag="a" active-class="active">
<span><em class="ansiconfont ans-icon-folder-empty"></em>{{$t('Resources manage')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/datasource'}" tag="a" active-class="active">
<span><em class="ansiconfont ans-icon-database"></em>{{$t('Datasource manage')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list">
<div class="nav-links">
<router-link :to="{ path: '/monitor'}" tag="a" active-class="active">
<span><em class="ansiconfont ans-icon-monitor"></em>{{$t('Monitor')}}</span><strong></strong>
</router-link>
</div>
</div>
<div class="clearfix list" >
<div class="nav-links">
<router-link :to="{ path: '/security'}" tag="a" active-class="active" v-ps="['ADMIN_USER']">
<span><em class="ansiconfont ans-icon-shield"></em>{{$t('Security')}}</span><strong></strong>
</router-link>
</div>
</div>
</div>
<div class="right">
<!--<span class="docs">
<a :href="docLink">doc</a>
</span>-->
<span class="lang">
<x-poptip
style="width: 80px"
trigger="click">
<div class="lrns-list">
<a href="javascript:" @click="_toggleLanguage(item.code)" v-for="(item,$index) in localeList" :key="$index"><span>{{item.name}}</span></a>
</div>
<div class="login-model" slot="reference">
<span>{{activeLocale.name}}</span>
<em class="ans-icon-arrow-down"></em>
</div>
</x-poptip>
</span>
<x-poptip
ref="login"
trigger="click"
v-model="isLogin"
placement="bottom-end">
<div class="lrns-list">
<a href="javascript:" @click="_goAccount">
<em class="ans-icon-user-empty"></em>
<span>{{$t('User Information')}}</span>
</a>
<a href="javascript:" @click="_signOut">
<em class="ans-icon-off"></em>
<span>{{$t('Logout')}}</span>
</a>
</div>
<div class="login-model" slot="reference">
<em class="ans-icon-user-solid"></em>
<span>{{userInfo.userName}}</span>
<em class="ans-icon-arrow-down"></em>
</div>
</x-poptip>
</div>
<div class="file-update-model" @click="_toggleArchive" v-if="isUpdate">
<div class="icon-cloud">
<em class="ans ans-icon-upload"></em>
</div>
<div class="progress-box">
<m-progress-bar :value="progress" text-placement="bottom"></m-progress-bar>
</div>
</div>
<div class="adaptive-m-nav">
<div class="m-nav-box ">
<a href="javascript:" @click="mIsNav = !mIsNav"><em class="ans-icon-database"></em></a>
</div>
<div class="m-title-box">
<div class="logo-m"></div>
</div>
<div class="m-user-box">
<a href="javascript:" @click="_goAccount"><em class="ans-icon-user-empty"></em></a>
</div>
<transition name="slide-fade">
<div class="m-nav-list" v-if="mIsNav">
<ul @click="mIsNav = false">
<router-link :to="{ path: '/home'}" tag="li" active-class="active">
<em class="ans-icon-home-empty"></em>
<span>{{$t('Home')}}</span>
</router-link>
<router-link :to="{ path: '/projects'}" tag="li" active-class="active">
<em class="ans-icon-setting"></em>
<span>{{$t('Project manage')}}</span>
</router-link>
<router-link :to="{ path: '/resource'}" tag="li" active-class="active">
<em class="ans-icon-folder-empty"></em>
<span>{{$t('Resources manage')}}</span>
</router-link>
<router-link :to="{ path: '/datasource'}" tag="li" active-class="active">
<em class="ans-icon-database"></em>
<span>{{$t('Datasource manage')}}</span>
</router-link>
<router-link :to="{ path: '/security'}" tag="li" active-class="active" v-ps="['ADMIN_USER']">
<em class="ans-icon-shield"></em>
<span>{{$t('Security')}}</span>
</router-link>
</ul>
</div>
</transition>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import cookies from 'js-cookie'
import { mapState, mapActions } from 'vuex'
import { findComponentDownward } from '@/module/util/'
import mFileUpdate from '@/module/components/fileUpdate/fileUpdate'
import mFileChildUpdate from '@/module/components/fileUpdate/fileChildUpdate'
import mResourceChildUpdate from '@/module/components/fileUpdate/resourceChildUpdate'
import mDefinitionUpdate from '@/module/components/fileUpdate/definitionUpdate'
import mProgressBar from '@/module/components/progressBar/progressBar'
import { findLocale, localeList } from '@/module/i18n/config'
export default {
name: 'roof-nav',
data () {
return {
// Whether to drag
isDrag: false,
// Upload progress
progress: 0,
// Whether to upload
isUpdate: false,
// Whether to log in
isLogin: false,
// Mobile compatible navigation
mIsNav: false,
// Take the language list data to get rid of the language pack
localeList: _.map(_.cloneDeep(localeList()), v => _.omit(v, ['locale'])),
// Selected language
activeLocale: '',
// Environmental variable
docLink: ''
}
},
methods: {
...mapActions('user', ['signOut']),
/**
* User Info
*/
_goAccount () {
this.isLogin = false
this.$router.push({ name: 'account' })
},
/**
* Upload (for the time being)
*/
_fileUpdate (type) {
if (this.progress) {
this._toggleArchive()
return
}
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'update-file-modal',
transitionName: 'opacityp',
render (h) {
if(type === 'DEFINITION'){
return h(mDefinitionUpdate, {
on: {
onProgress (val) {
self.progress = val
},
onUpdate () {
findComponentDownward(self.$root, `definition-list-index`)._updateList()
self.isUpdate = false
self.progress = 0
modal.remove()
},
onArchive () {
self.isUpdate = true
},
close () {
self.progress = 0
modal.remove()
}
},
props: {
type: type
}
})
}else{
return h(mFileUpdate, {
on: {
onProgress (val) {
self.progress = val
},
onUpdate () {
findComponentDownward(self.$root, `resource-list-index-${type}`)._updateList()
self.isUpdate = false
self.progress = 0
modal.remove()
},
onArchive () {
self.isUpdate = true
},
close () {
self.progress = 0
modal.remove()
}
},
props: {
type: type
}
})
}
}
})
},
_fileChildUpdate (type,data) {
if (this.progress) {
this._toggleArchive()
return
}
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'update-file-modal',
transitionName: 'opacityp',
render (h) {
return h(mFileChildUpdate, {
on: {
onProgress (val) {
self.progress = val
},
onUpdate () {
findComponentDownward(self.$root, `resource-list-index-${type}`)._updateList(data)
self.isUpdate = false
self.progress = 0
modal.remove()
},
onArchive () {
self.isUpdate = true
},
close () {
self.progress = 0
modal.remove()
}
},
props: {
type: type,
id: data
}
})
}
})
},
_resourceChildUpdate (type,data) {
if (this.progress) {
this._toggleArchive()
return
}
let self = this
let modal = this.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'update-file-modal',
transitionName: 'opacityp',
render (h) {
return h(mResourceChildUpdate, {
on: {
onProgress (val) {
self.progress = val
},
onUpdate () {
findComponentDownward(self.$root, `resource-list-index-${type}`)._updateList(data)
self.isUpdate = false
self.progress = 0
modal.remove()
},
onArchive () {
self.isUpdate = true
},
close () {
self.progress = 0
modal.remove()
}
},
props: {
type: type,
id: data
}
})
}
})
},
/**
* Upload popup layer display
*/
_toggleArchive () {
$('.update-file-modal').show()
},
/**
* sign out
*/
_signOut () {
this.signOut()
},
/**
* Language switching
*/
_toggleLanguage (language) {
cookies.set('language', language, { path: '/' })
setTimeout(() => {
window.location.reload()
}, 100)
}
},
created () {
let language = cookies.get('language')
this.activeLocale = language ? findLocale(language) : '中文'
this.docLink = process.env.NODE_ENV === 'true' ? 'docs' : `/view/docs/${this.activeLocale.code}/_book` // eslint-disable-line
},
computed: {
...mapState('user', ['userInfo'])
},
components: { mFileUpdate, mProgressBar, mDefinitionUpdate }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.nav-model {
height: 60px;
background: #2D303A;
box-shadow: 0 3px 5px rgba(0, 0, 0, 0.3);
position: relative;
.m-title-box {
text-align: center;
.logo-m {
width: 36px;
height: 36px;
margin: 0 auto;
position: relative;
top: 12px;
}
}
.logo-box {
position: absolute;
left: 10px;
top: 8px;
cursor: pointer;
>a {
width: 180px;
height: 46px;
display: block;
background: url("./logo.svg") no-repeat;
}
}
.nav-box {
height: 60px;
line-height: 60px;
position: absolute;
left: 220px;
top: 0;
.list {
width: 106px;
float: left;
position: relative;
cursor: pointer;
margin-right: 14px;
.nav-links {
height: 60px;
a {
position: relative;
text-decoration: none;
font-size: 15px;
color: #333;
display: inline-block;
float: left;
text-align: center;
span {
display: block;
width: 106px;
color: #fff;
.ansiconfont {
vertical-align: -2px;
font-size: 22px;
margin-right: 4px;
}
}
&:hover {
span {
color: #2d8cf0;
}
}
&.active {
span {
color: #2D8BF0;
i {
color: #2d8cf0;
}
}
b {
height: 2px;
background: #2d8cf0;
display: block;
margin-top: -2px;
}
}
}
}
}
}
.right {
position: absolute;
right: 0;
top: 0;
.ans-poptip {
min-width: 120px;
}
>.lang {
.ans-poptip {
min-width: 80px;
}
}
>.docs {
padding-right: 20px;
a {
color: #fff;
font-size: 14px;
vertical-align: middle;
&:hover {
color: #2d8bf0;
}
}
}
.current-project {
height: 60px;
line-height: 56px;
display: inline-block;
margin-right: 16px;
cursor: pointer;
i {
font-size: 18px;
vertical-align: middle;
&:nth-child(1) {
font-size: 20px;
color:#2d8cf0;
}
}
span {
color: #333;
vertical-align: middle;
font-size: 14px;
}
&:hover {
span {
color: #2d8cf0;
}
}
}
.login-model {
display: inline-block;
margin-right: 20px;
cursor: pointer;
margin-top: 16px;
em {
font-size: 18px;
vertical-align: middle;
color: #fff;
}
span {
color: #fff;
vertical-align: middle;
font-size: 14px;
}
&:hover {
>i,>span {
color: #2d8cf0;
}
}
}
.lrns-list {
margin: -6px 0;
a {
display: block;
height: 30px;
line-height: 30px;
font-size: 14px;
&:hover {
i,span {
color: #2d8cf0;
}
}
}
}
}
.file-update-model {
position: absolute;
right: 160px;
top: 18px;
cursor: pointer;
.progress-box {
width: 240px;
}
.icon-cloud {
position: absolute;
left: -34px;
top: 2px;
> i {
font-size: 22px;
color: #2d8cf0;
}
}
}
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,327 | [Improvement][api] resource file content update | used dolphin-scheduler V1.3.1,
resource file content update interface only arguments of type "String" are supported, if need update a jar file content, you must delete this file first, then upload again.
if a task depends on it, that will get worse. you must delete this task or remove depends, otherwise, you will get an exception.
| https://github.com/apache/dolphinscheduler/issues/3327 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-07-28T02:30:04Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': 'User Name',
'Please enter user name': 'Please enter user name',
Password: 'Password',
'Please enter your password': 'Please enter your password',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22',
Login: 'Login',
Home: 'Home',
'Failed to create node to save': 'Failed to create node to save',
'Global parameters': 'Global parameters',
'Local parameters': 'Local parameters',
'Copy success': 'Copy success',
'The browser does not support automatic copying': 'The browser does not support automatic copying',
'Whether to save the DAG graph': 'Whether to save the DAG graph',
'Current node settings': 'Current node settings',
'View history': 'View history',
'View log': 'View log',
'Enter this child node': 'Enter this child node',
'Node name': 'Node name',
'Run flag': 'Run flag',
Normal: 'Normal',
'Prohibition execution': 'Prohibition execution',
'Please enter description': 'Please enter description',
'Number of failed retries': 'Number of failed retries',
Times: 'Times',
'Failed retry interval': 'Failed retry interval',
Minute: 'Minute',
Cancel: 'Cancel',
'Confirm add': 'Confirm add',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process',
'The task has not been executed and cannot enter the sub-Process': 'The task has not been executed and cannot enter the sub-Process',
'Please enter name (required)': 'Please enter name (required)',
'Name already exists': 'Name already exists',
'Download Log': 'Download Log',
'Refresh Log': 'Refresh Log',
'Enter full screen': 'Enter full screen',
'Cancel full screen': 'Cancel full screen',
Close: 'Close',
'Update log success': 'Update log success',
'No more logs': 'No more logs',
'No log': 'No log',
'Loading Log...': 'Loading Log...',
'Set the DAG diagram name': 'Set the DAG diagram name',
'Please enter description(optional)': 'Please enter description(optional)',
'Set global': 'Set global',
'Whether to update the process definition': 'Whether to update the process definition',
Add: 'Add',
'DAG graph name cannot be empty': 'DAG graph name cannot be empty',
'Create Datasource': 'Create Datasource',
'Project Home': 'Project Home',
'Project Manage': 'Project',
'Create Project': 'Create Project',
'Cron Manage': 'Cron Manage',
'Copy Workflow': 'Copy Workflow',
'Tenant Manage': 'Tenant Manage',
'Create Tenant': 'Create Tenant',
'User Manage': 'User Manage',
'Create User': 'Create User',
'User Information': 'User Information',
'Edit Password': 'Edit Password',
success: 'success',
failed: 'failed',
delete: 'delete',
'Please choose': 'Please choose',
'Please enter a positive integer': 'Please enter a positive integer',
'Program Type': 'Program Type',
'Main class': 'Main class',
'Main jar package': 'Main jar package',
'Please enter main jar package': 'Please enter main jar package',
'Command-line parameters': 'Command-line parameters',
'Please enter Command-line parameters': 'Please enter Command-line parameters',
'Other parameters': 'Other parameters',
'Please enter other parameters': 'Please enter other parameters',
Resources: 'Resources',
'Custom Parameters': 'Custom Parameters',
'Custom template': 'Custom template',
Datasource: 'Datasource',
methods: 'methods',
'Please enter method(optional)': 'Please enter method(optional)',
Script: 'Script',
'Please enter script(required)': 'Please enter script(required)',
'Deploy Mode': 'Deploy Mode',
'Driver core number': 'Driver core number',
'Please enter driver core number': 'Please enter driver core number',
'Driver memory use': 'Driver memory use',
'Please enter driver memory use': 'Please enter driver memory use',
'Number of Executors': 'Number of Executors',
'Please enter the number of Executor': 'Please enter the number of Executor',
'Executor memory': 'Executor memory',
'Please enter the Executor memory': 'Please enter the Executor memory',
'Executor core number': 'Executor core number',
'Please enter Executor core number': 'Please enter Executor core number',
'The number of Executors should be a positive integer': 'The number of Executors should be a positive integer',
'Memory should be a positive integer': 'Memory should be a positive integer',
'Please enter ExecutorPlease enter Executor core number': 'Please enter ExecutorPlease enter Executor core number',
'Core number should be positive integer': 'Core number should be positive integer',
'SQL Type': 'SQL Type',
Title: 'Title',
'Please enter the title of email': 'Please enter the title of email',
Table: 'Table',
TableMode: 'Table',
Attachment: 'Attachment',
'SQL Parameter': 'SQL Parameter',
'SQL Statement': 'SQL Statement',
'UDF Function': 'UDF Function',
'Please enter a SQL Statement(required)': 'Please enter a SQL Statement(required)',
'Please enter a JSON Statement(required)': 'Please enter a JSON Statement(required)',
'One form or attachment must be selected': 'One form or attachment must be selected',
'Recipient required': 'Recipient required',
'Mail subject required': 'Mail subject required',
'Child Node': 'Child Node',
'Please select a sub-Process': 'Please select a sub-Process',
Edit: 'Edit',
'Datasource Name': 'Datasource Name',
'Please enter datasource name': 'Please enter datasource name',
IP: 'IP',
'Please enter IP': 'Please enter IP',
Port: 'Port',
'Please enter port': 'Please enter port',
'Database Name': 'Database Name',
'Please enter database name': 'Please enter database name',
'Oracle Connect Type': 'ServiceName or SID',
'Oracle Service Name': 'ServiceName',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc connect parameters',
'Test Connect': 'Test Connect',
'Please enter resource name': 'Please enter resource name',
'Please enter resource folder name': 'Please enter resource folder name',
'Please enter a non-query SQL statement': 'Please enter a non-query SQL statement',
'Please enter IP/hostname': 'Please enter IP/hostname',
'jdbc connection parameters is not a correct JSON format': 'jdbc connection parameters is not a correct JSON format',
'#': '#',
'Datasource Type': 'Datasource Type',
'Datasource Parameter': 'Datasource Parameter',
'Create Time': 'Create Time',
'Update Time': 'Update Time',
Operation: 'Operation',
'Click to view': 'Click to view',
'Delete?': 'Delete?',
Confirm: 'Confirm',
'Task status statistics': 'Task Status Statistics',
Number: 'Number',
State: 'State',
'Process Status Statistics': 'Process Status Statistics',
'Process Definition Statistics': 'Process Definition Statistics',
'Project Name': 'Project Name',
'Please enter name': 'Please enter name',
'Owned Users': 'Owned Users',
'Process Pid': 'Process Pid',
'Zk registration directory': 'Zk registration directory',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': 'Last heartbeat time',
'Edit Tenant': 'Edit Tenant',
'Tenant Code': 'Tenant Code',
'Tenant Name': 'Tenant Name',
Queue: 'Queue',
'Please select a queue': 'default is tenant association queue',
'Please enter the tenant code in English': 'Please enter the tenant code in English',
'Please enter tenant code in English': 'Please enter tenant code in English',
'Edit User': 'Edit User',
Tenant: 'Tenant',
Email: 'Email',
Phone: 'Phone',
'Please enter phone number': 'Please enter phone number',
'Please enter main class': 'Please enter main class',
'Please enter email': 'Please enter email',
'Please enter the correct email format': 'Please enter the correct email format',
'Please enter the correct mobile phone format': 'Please enter the correct mobile phone format',
Project: 'Project',
Authorize: 'Authorize',
'File resources': 'File resources',
'UDF resources': 'UDF resources',
'Please select UDF resources directory': 'Please select UDF resources directory',
'UDF resources directory': 'UDF resources directory',
'Upload File Size': 'Upload File size cannot exceed 1g',
'Edit alarm group': 'Edit alarm group',
'Create alarm group': 'Create alarm group',
'Group Name': 'Group Name',
'Please enter group name': 'Please enter group name',
'Group Type': 'Group Type',
Remarks: 'Remarks',
SMS: 'SMS',
'Managing Users': 'Managing Users',
Permission: 'Permission',
Administrator: 'Administrator',
'Confirm Password': 'Confirm Password',
'Please enter confirm password': 'Please enter confirm password',
'Password cannot be in Chinese': 'Password cannot be in Chinese',
'Please enter a password (6-22) character password': 'Please enter a password (6-22) character password',
'Confirmation password cannot be in Chinese': 'Confirmation password cannot be in Chinese',
'Please enter a confirmation password (6-22) character password': 'Please enter a confirmation password (6-22) character password',
'The password is inconsistent with the confirmation password': 'The password is inconsistent with the confirmation password',
'Please select the datasource': 'Please select the datasource',
'Please select resources': 'Please select resources',
Query: 'Query',
'Non Query': 'Non Query',
'prop(required)': 'prop(required)',
'value(optional)': 'value(optional)',
'value(required)': 'value(required)',
'prop is empty': 'prop is empty',
'value is empty': 'value is empty',
'prop is repeat': 'prop is repeat',
'Start Time': 'Start Time',
'End Time': 'End Time',
crontab: 'crontab',
'Failure Strategy': 'Failure Strategy',
online: 'online',
offline: 'offline',
'Task Status': 'Task Status',
'Process Instance': 'Process Instance',
'Task Instance': 'Task Instance',
'Select date range': 'Select date range',
Date: 'Date',
waiting: 'waiting',
execution: 'execution',
finish: 'finish',
'Create File': 'Create File',
'Create folder': 'Create folder',
'File Name': 'File Name',
'Folder Name': 'Folder Name',
'File Format': 'File Format',
'Folder Format': 'Folder Format',
'File Content': 'File Content',
Create: 'Create',
'Please enter the resource content': 'Please enter the resource content',
'Resource content cannot exceed 3000 lines': 'Resource content cannot exceed 3000 lines',
'File Details': 'File Details',
'Download Details': 'Download Details',
Return: 'Return',
Save: 'Save',
'File Manage': 'File Manage',
'Upload Files': 'Upload Files',
'Create UDF Function': 'Create UDF Function',
'Upload UDF Resources': 'Upload UDF Resources',
'Service-Master': 'Service-Master',
'Service-Worker': 'Service-Worker',
'Process Name': 'Process Name',
Executor: 'Executor',
'Run Type': 'Run Type',
'Scheduling Time': 'Scheduling Time',
'Run Times': 'Run Times',
host: 'host',
'fault-tolerant sign': 'fault-tolerant sign',
Rerun: 'Rerun',
'Recovery Failed': 'Recovery Failed',
Stop: 'Stop',
Pause: 'Pause',
'Recovery Suspend': 'Recovery Suspend',
Gantt: 'Gantt',
Name: 'Name',
'Node Type': 'Node Type',
'Submit Time': 'Submit Time',
Duration: 'Duration',
'Retry Count': 'Retry Count',
'Task Name': 'Task Name',
'Task Date': 'Task Date',
'Source Table': 'Source Table',
'Record Number': 'Record Number',
'Target Table': 'Target Table',
'Online viewing type is not supported': 'Online viewing type is not supported',
Size: 'Size',
Rename: 'Rename',
Download: 'Download',
Export: 'Export',
Submit: 'Submit',
'Edit UDF Function': 'Edit UDF Function',
type: 'type',
'UDF Function Name': 'UDF Function Name',
FILE: 'FILE',
UDF: 'UDF',
'File Subdirectory': 'File Subdirectory',
'Please enter a function name': 'Please enter a function name',
'Package Name': 'Package Name',
'Please enter a Package name': 'Please enter a Package name',
Parameter: 'Parameter',
'Please enter a parameter': 'Please enter a parameter',
'UDF Resources': 'UDF Resources',
'Upload Resources': 'Upload Resources',
Instructions: 'Instructions',
'Please enter a instructions': 'Please enter a instructions',
'Please enter a UDF function name': 'Please enter a UDF function name',
'Select UDF Resources': 'Select UDF Resources',
'Class Name': 'Class Name',
'Jar Package': 'Jar Package',
'Library Name': 'Library Name',
'UDF Resource Name': 'UDF Resource Name',
'File Size': 'File Size',
Description: 'Description',
'Drag Nodes and Selected Items': 'Drag Nodes and Selected Items',
'Select Line Connection': 'Select Line Connection',
'Delete selected lines or nodes': 'Delete selected lines or nodes',
'Full Screen': 'Full Screen',
Unpublished: 'Unpublished',
'Start Process': 'Start Process',
'Execute from the current node': 'Execute from the current node',
'Recover tolerance fault process': 'Recover tolerance fault process',
'Resume the suspension process': 'Resume the suspension process',
'Execute from the failed nodes': 'Execute from the failed nodes',
'Complement Data': 'Complement Data',
slot: 'slot',
taskManager: 'taskManager',
jobManagerMemory: 'jobManagerMemory',
taskManagerMemory: 'taskManagerMemory',
'Scheduling execution': 'Scheduling execution',
'Recovery waiting thread': 'Recovery waiting thread',
'Submitted successfully': 'Submitted successfully',
Executing: 'Executing',
'Ready to pause': 'Ready to pause',
'Ready to stop': 'Ready to stop',
'Need fault tolerance': 'Need fault tolerance',
kill: 'kill',
'Waiting for thread': 'Waiting for thread',
'Waiting for dependence': 'Waiting for dependence',
Start: 'Start',
Copy: 'Copy',
'Copy name': 'Copy name',
Delete: 'Delete',
'Please enter keyword': 'Please enter keyword',
'File Upload': 'File Upload',
'Drag the file into the current upload window': 'Drag the file into the current upload window',
'Drag area upload': 'Drag area upload',
Upload: 'Upload',
'Please enter file name': 'Please enter file name',
'Please select the file to upload': 'Please select the file to upload',
'Resources manage': 'Resources',
Security: 'Security',
Logout: 'Logout',
'No data': 'No data',
'Uploading...': 'Uploading...',
'Loading...': 'Loading...',
List: 'List',
'Unable to download without proper url': 'Unable to download without proper url',
Process: 'Process',
'Process definition': 'Process definition',
'Task record': 'Task record',
'Warning group manage': 'Warning group manage',
'Servers manage': 'Servers manage',
'UDF manage': 'UDF manage',
'Resource manage': 'Resource manage',
'Function manage': 'Function manage',
'Edit password': 'Edit password',
'Ordinary users': 'Ordinary users',
'Create process': 'Create process',
'Import process': 'Import process',
'Timing state': 'Timing state',
Timing: 'Timing',
TreeView: 'TreeView',
'Mailbox already exists! Recipients and copyers cannot repeat': 'Mailbox already exists! Recipients and copyers cannot repeat',
'Mailbox input is illegal': 'Mailbox input is illegal',
'Please set the parameters before starting': 'Please set the parameters before starting',
Continue: 'Continue',
End: 'End',
'Node execution': 'Node execution',
'Backward execution': 'Backward execution',
'Forward execution': 'Forward execution',
'Execute only the current node': 'Execute only the current node',
'Notification strategy': 'Notification strategy',
'Notification group': 'Notification group',
'Please select a notification group': 'Please select a notification group',
Recipient: 'Recipient',
Cc: 'Cc',
'Whether it is a complement process?': 'Whether it is a complement process?',
'Schedule date': 'Schedule date',
'Mode of execution': 'Mode of execution',
'Serial execution': 'Serial execution',
'Parallel execution': 'Parallel execution',
'Set parameters before timing': 'Set parameters before timing',
'Start and stop time': 'Start and stop time',
'Please select time': 'Please select time',
'Please enter crontab': 'Please enter crontab',
none_1: 'none',
success_1: 'success',
failure_1: 'failure',
All_1: 'All',
Toolbar: 'Toolbar',
'View variables': 'View variables',
'Format DAG': 'Format DAG',
'Refresh DAG status': 'Refresh DAG status',
Return_1: 'Return',
'Please enter format': 'Please enter format',
'connection parameter': 'connection parameter',
'Process definition details': 'Process definition details',
'Create process definition': 'Create process definition',
'Scheduled task list': 'Scheduled task list',
'Process instance details': 'Process instance details',
'Create Resource': 'Create Resource',
'User Center': 'User Center',
'Please enter method': 'Please enter method',
none: 'none',
name: 'name',
'Process priority': 'Process priority',
'Task priority': 'Task priority',
'Task timeout alarm': 'Task timeout alarm',
'Timeout strategy': 'Timeout strategy',
'Timeout alarm': 'Timeout alarm',
'Timeout failure': 'Timeout failure',
'Timeout period': 'Timeout period',
'Timeout strategy must be selected': 'Timeout strategy must be selected',
'Timeout must be a positive integer': 'Timeout must be a positive integer',
'Add dependency': 'Add dependency',
and: 'and',
or: 'or',
month: 'month',
week: 'week',
day: 'day',
hour: 'hour',
Running: 'Running',
'Waiting for dependency to complete': 'Waiting for dependency to complete',
Selected: 'Selected',
CurrentHour: 'CurrentHour',
Last1Hour: 'Last1Hour',
Last2Hours: 'Last2Hours',
Last3Hours: 'Last3Hours',
Last24Hours: 'Last24Hours',
today: 'today',
Last1Days: 'Last1Days',
Last2Days: 'Last2Days',
Last3Days: 'Last3Days',
Last7Days: 'Last7Days',
ThisWeek: 'ThisWeek',
LastWeek: 'LastWeek',
LastMonday: 'LastMonday',
LastTuesday: 'LastTuesday',
LastWednesday: 'LastWednesday',
LastThursday: 'LastThursday',
LastFriday: 'LastFriday',
LastSaturday: 'LastSaturday',
LastSunday: 'LastSunday',
ThisMonth: 'ThisMonth',
LastMonth: 'LastMonth',
LastMonthBegin: 'LastMonthBegin',
LastMonthEnd: 'LastMonthEnd',
'Refresh status succeeded': 'Refresh status succeeded',
'Queue manage': 'Queue manage',
'Create queue': 'Create queue',
'Edit queue': 'Edit queue',
'Datasource manage': 'Datasource',
'History task record': 'History task record',
'Please go online': 'Please go online',
'Queue value': 'Queue value',
'Please enter queue value': 'Please enter queue value',
'Worker group manage': 'Worker group manage',
'Create worker group': 'Create worker group',
'Edit worker group': 'Edit worker group',
'Token manage': 'Token manage',
'Create token': 'Create token',
'Edit token': 'Edit token',
'Please enter the IP address separated by commas': 'Please enter the IP address separated by commas',
'Note: Multiple IP addresses have been comma separated': 'Note: Multiple IP addresses have been comma separated',
'Failure time': 'Failure time',
User: 'User',
'Please enter token': 'Please enter token',
'Generate token': 'Generate token',
Monitor: 'Monitor',
Group: 'Group',
'Queue statistics': 'Queue statistics',
'Command status statistics': 'Command status statistics',
'Task kill': 'Task Kill',
'Task queue': 'Task queue',
'Error command count': 'Error command count',
'Normal command count': 'Normal command count',
Manage: ' Manage',
'Number of connections': 'Number of connections',
Sent: 'Sent',
Received: 'Received',
'Min latency': 'Min latency',
'Avg latency': 'Avg latency',
'Max latency': 'Max latency',
'Node count': 'Node count',
'Query time': 'Query time',
'Node self-test status': 'Node self-test status',
'Health status': 'Health status',
'Max connections': 'Max connections',
'Threads connections': 'Threads connections',
'Max used connections': 'Max used connections',
'Threads running connections': 'Threads running connections',
'Worker group': 'Worker group',
'Please enter a positive integer greater than 0': 'Please enter a positive integer greater than 0',
'Pre Statement': 'Pre Statement',
'Post Statement': 'Post Statement',
'Statement cannot be empty': 'Statement cannot be empty',
'Process Define Count': 'Work flow Define Count',
'Process Instance Running Count': 'Process Instance Running Count',
'command number of waiting for running': 'command number of waiting for running',
'failure command number': 'failure command number',
'tasks number of waiting running': 'tasks number of waiting running',
'task number of ready to kill': 'task number of ready to kill',
'Statistics manage': 'Statistics Manage',
statistics: 'Statistics',
'select tenant': 'select tenant',
'Please enter Principal': 'Please enter Principal',
'The start time must not be the same as the end': 'The start time must not be the same as the end',
'Startup parameter': 'Startup parameter',
'Startup type': 'Startup type',
'warning of timeout': 'warning of timeout',
'Next five execution times': 'Next five execution times',
'Execute time': 'Execute time',
'Complement range': 'Complement range',
'Http Url': 'Http Url',
'Http Method': 'Http Method',
'Http Parameters': 'Http Parameters',
'Http Parameters Key': 'Http Parameters Key',
'Http Parameters Position': 'Http Parameters Position',
'Http Parameters Value': 'Http Parameters Value',
'Http Check Condition': 'Http Check Condition',
'Http Condition': 'Http Condition',
'Please Enter Http Url': 'Please Enter Http Url(required)',
'Please Enter Http Condition': 'Please Enter Http Condition',
'There is no data for this period of time': 'There is no data for this period of time',
'IP address cannot be empty': 'IP address cannot be empty',
'Please enter the correct IP': 'Please enter the correct IP',
'Please generate token': 'Please generate token',
'Spark Version': 'Spark Version',
TargetDataBase: 'target database',
TargetTable: 'target table',
'Please enter the table of target': 'Please enter the table of target',
'Please enter a Target Table(required)': 'Please enter a Target Table(required)',
SpeedByte: 'speed(byte count)',
SpeedRecord: 'speed(record count)',
'0 means unlimited by byte': '0 means unlimited',
'0 means unlimited by count': '0 means unlimited',
'Modify User': 'Modify User',
'Whether directory': 'Whether directory',
Yes: 'Yes',
No: 'No',
'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)',
'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)',
'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)',
'Please enter Target Dir(required)': 'Please enter Target Dir(required)',
'Please enter Export Dir(required)': 'Please enter Export Dir(required)',
'Please enter Hive Database(required)': 'Please enter Hive Databasec(required)',
'Please enter Hive Table(required)': 'Please enter Hive Table(required)',
'Please enter Hive Partition Keys': 'Please enter Hive Partition Key',
'Please enter Hive Partition Values': 'Please enter Partition Value',
'Please enter Replace Delimiter': 'Please enter Replace Delimiter',
'Please enter Fields Terminated': 'Please enter Fields Terminated',
'Please enter Lines Terminated': 'Please enter Lines Terminated',
'Please enter Concurrency': 'Please enter Concurrency',
'Please enter Update Key': 'Please enter Update Key',
Direct: 'Direct',
Type: 'Type',
ModelType: 'ModelType',
ColumnType: 'ColumnType',
Database: 'Database',
Column: 'Column',
'Map Column Hive': 'Map Column Hive',
'Map Column Java': 'Map Column Java',
'Export Dir': 'Export Dir',
'Hive partition Keys': 'Hive partition Keys',
'Hive partition Values': 'Hive partition Values',
FieldsTerminated: 'FieldsTerminated',
LinesTerminated: 'LinesTerminated',
IsUpdate: 'IsUpdate',
UpdateKey: 'UpdateKey',
UpdateMode: 'UpdateMode',
'Target Dir': 'Target Dir',
DeleteTargetDir: 'DeleteTargetDir',
FileType: 'FileType',
CompressionCodec: 'CompressionCodec',
CreateHiveTable: 'CreateHiveTable',
DropDelimiter: 'DropDelimiter',
OverWriteSrc: 'OverWriteSrc',
ReplaceDelimiter: 'ReplaceDelimiter',
Concurrency: 'Concurrency',
Form: 'Form',
OnlyUpdate: 'OnlyUpdate',
AllowInsert: 'AllowInsert',
'Data Source': 'Data Source',
'Data Target': 'Data Target',
'All Columns': 'All Columns',
'Some Columns': 'Some Columns',
'Branch flow': 'Branch flow',
'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow',
'Successful branch flow and failed branch flow are required': 'conditions node Successful and failed branch flow are required',
'Unauthorized or deleted resources': 'Unauthorized or deleted resources',
'Please delete all non-existent resources': 'Please delete all non-existent resources',
'The Worker group no longer exists, please select the correct Worker group!': 'The Worker group no longer exists, please select the correct Worker group!',
'Please confirm whether the workflow has been saved before downloading': 'Please confirm whether the workflow has been saved before downloading',
'User name length is between 3 and 39': 'User name length is between 3 and 39',
zkDirectory: 'zkDirectory',
'Directory detail': 'Directory detail',
'Connection name': 'Connection name',
'Current connection settings': 'Current connection settings',
'Please save the DAG before formatting': 'Please save the DAG before formatting'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,327 | [Improvement][api] resource file content update | used dolphin-scheduler V1.3.1,
resource file content update interface only arguments of type "String" are supported, if need update a jar file content, you must delete this file first, then upload again.
if a task depends on it, that will get worse. you must delete this task or remove depends, otherwise, you will get an exception.
| https://github.com/apache/dolphinscheduler/issues/3327 | https://github.com/apache/dolphinscheduler/pull/3394 | 811fd48f0a27b3112092b4f8b4fbeb5d55cf2da0 | 78672b035900d5d06f1f66af73f2ee41f87086ca | "2020-07-28T02:30:04Z" | java | "2020-08-04T03:40:49Z" | dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': '用户名',
'Please enter user name': '请输入用户名',
Password: '密码',
'Please enter your password': '请输入密码',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': '密码至少包含数字,字母和字符的两种组合,长度在6-22之间',
Login: '登录',
Home: '首页',
'Failed to create node to save': '未创建节点保存失败',
'Global parameters': '全局参数',
'Local parameters': '局部参数',
'Copy success': '复制成功',
'The browser does not support automatic copying': '该浏览器不支持自动复制',
'Whether to save the DAG graph': '是否保存DAG图',
'Current node settings': '当前节点设置',
'View history': '查看历史',
'View log': '查看日志',
'Enter this child node': '进入该子节点',
'Node name': '节点名称',
'Please enter name (required)': '请输入名称(必填)',
'Run flag': '运行标志',
Normal: '正常',
'Prohibition execution': '禁止执行',
'Please enter description': '请输入描述',
'Number of failed retries': '失败重试次数',
Times: '次',
'Failed retry interval': '失败重试间隔',
Minute: '分',
Cancel: '取消',
'Confirm add': '确认添加',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': '新创建子工作流还未执行,不能进入子工作流',
'The task has not been executed and cannot enter the sub-Process': '该任务还未执行,不能进入子工作流',
'Name already exists': '名称已存在请重新输入',
'Download Log': '下载日志',
'Refresh Log': '刷新日志',
'Enter full screen': '进入全屏',
'Cancel full screen': '取消全屏',
Close: '关闭',
'Update log success': '更新日志成功',
'No more logs': '暂无更多日志',
'No log': '暂无日志',
'Loading Log...': '正在努力请求日志中...',
'Set the DAG diagram name': '设置DAG图名称',
'Please enter description(optional)': '请输入描述(选填)',
'Set global': '设置全局',
'Whether to update the process definition': '是否更新流程定义',
Add: '添加',
'DAG graph name cannot be empty': 'DAG图名称不能为空',
'Create Datasource': '创建数据源',
'Project Home': '项目首页',
'Project Manage': '项目管理',
'Create Project': '创建项目',
'Cron Manage': '定时管理',
'Copy Workflow': '复制工作流',
'Tenant Manage': '租户管理',
'Create Tenant': '创建租户',
'User Manage': '用户管理',
'Create User': '创建用户',
'User Information': '用户信息',
'Edit Password': '密码修改',
success: '成功',
failed: '失败',
delete: '删除',
'Please choose': '请选择',
'Please enter a positive integer': '请输入正整数',
'Program Type': '程序类型',
'Main class': '主函数的class',
'Main jar package': '主jar包',
'Please enter main jar package': '请选择主jar包',
'Command-line parameters': '命令行参数',
'Please enter Command-line parameters': '请输入命令行参数',
'Other parameters': '其他参数',
'Please enter other parameters': '请输入其他参数',
Resources: '资源',
'Custom Parameters': '自定义参数',
'Custom template': '自定义模版',
'Please enter main class': '请填写主函数的class',
Datasource: '数据源',
methods: '方法',
'Please enter method(optional)': '请输入方法(选填)',
Script: '脚本',
'Please enter script(required)': '请输入脚本(必填)',
'Deploy Mode': '部署方式',
'Driver core number': 'Driver内核数',
'Please enter driver core number': '请输入Driver内核数',
'Driver memory use': 'Driver内存数',
'Please enter driver memory use': '请输入Driver内存数',
'Number of Executors': 'Executor数量',
'Please enter the number of Executor': '请输入Executor数量',
'Executor memory': 'Executor内存数',
'Please enter the Executor memory': '请输入Executor内存数',
'Executor core number': 'Executor内核数',
'Please enter Executor core number': '请输入Executor内核数',
'The number of Executors should be a positive integer': 'Executor数量为正整数',
'Memory should be a positive integer': '内存数为数字',
'Please enter ExecutorPlease enter Executor core number': '请填写Executor内核数',
'Core number should be positive integer': '内核数为正整数',
'SQL Type': 'sql类型',
Title: '主题',
'Please enter the title of email': '请输入邮件主题',
Table: '表名',
TableMode: '表格',
Attachment: '附件',
'SQL Parameter': 'sql参数',
'SQL Statement': 'sql语句',
'UDF Function': 'UDF函数',
FILE: '文件',
UDF: 'UDF',
'File Subdirectory': '文件子目录',
'Please enter a SQL Statement(required)': '请输入sql语句(必填)',
'Please enter a JSON Statement(required)': '请输入json语句(必填)',
'One form or attachment must be selected': '表格、附件必须勾选一个',
'Recipient required': '收件人邮箱必填',
'Mail subject required': '邮件主题必填',
'Child Node': '子节点',
'Please select a sub-Process': '请选择子工作流',
Edit: '编辑',
'Datasource Name': '数据源名称',
'Please enter datasource name': '请输入数据源名称',
IP: 'IP主机名',
'Please enter IP': '请输入IP主机名',
Port: '端口',
'Please enter port': '请输入端口',
'Database Name': '数据库名',
'Please enter database name': '请输入数据库名',
'Oracle Connect Type': '服务名或SID',
'Oracle Service Name': '服务名',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc连接参数',
'Test Connect': '测试连接',
'Please enter resource name': '请输入数据源名称',
'Please enter resource folder name': '请输入资源文件夹名称',
'Please enter a non-query SQL statement': '请输入非查询sql语句',
'Please enter IP/hostname': '请输入IP/主机名',
'jdbc connection parameters is not a correct JSON format': 'jdbc连接参数不是一个正确的JSON格式',
'#': '编号',
'Datasource Type': '数据源类型',
'Datasource Parameter': '数据源参数',
'Create Time': '创建时间',
'Update Time': '更新时间',
Operation: '操作',
'Click to view': '点击查看',
'Delete?': '确定删除吗?',
Confirm: '确定',
'Task status statistics': '任务状态统计',
Number: '数量',
State: '状态',
'Process Status Statistics': '流程状态统计',
'Process Definition Statistics': '流程定义统计',
'Project Name': '项目名称',
'Please enter name': '请输入名称',
'Owned Users': '所属用户',
'Process Pid': '进程Pid',
'Zk registration directory': 'zk注册目录',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': '最后心跳时间',
'Edit Tenant': '编辑租户',
'Tenant Code': '租户编码',
'Tenant Name': '租户名称',
Queue: '队列',
'Please enter the tenant code in English': '请输入租户编码只允许英文',
'Please enter tenant code in English': '请输入英文租户编码',
'Edit User': '编辑用户',
Tenant: '租户',
Email: '邮件',
Phone: '手机',
'Please enter phone number': '请输入手机',
'Please enter email': '请输入邮箱',
'Please enter the correct email format': '请输入正确的邮箱格式',
'Please enter the correct mobile phone format': '请输入正确的手机格式',
Project: '项目',
Authorize: '授权',
'File resources': '文件资源',
'UDF resources': 'UDF资源',
'UDF resources directory': 'UDF资源目录',
'Please select UDF resources directory': '请选择UDF资源目录',
'Edit alarm group': '编辑告警组',
'Create alarm group': '创建告警组',
'Group Name': '组名称',
'Please enter group name': '请输入组名称',
'Group Type': '组类型',
Remarks: '备注',
SMS: '短信',
'Managing Users': '管理用户',
Permission: '权限',
Administrator: '管理员',
'Confirm Password': '确认密码',
'Please enter confirm password': '请输入确认密码',
'Password cannot be in Chinese': '密码不能为中文',
'Please enter a password (6-22) character password': '请输入密码(6-22)字符密码',
'Confirmation password cannot be in Chinese': '确认密码不能为中文',
'Please enter a confirmation password (6-22) character password': '请输入确认密码(6-22)字符密码',
'The password is inconsistent with the confirmation password': '密码与确认密码不一致,请重新确认',
'Please select the datasource': '请选择数据源',
'Please select resources': '请选择资源',
Query: '查询',
'Non Query': '非查询',
'prop(required)': 'prop(必填)',
'value(optional)': 'value(选填)',
'value(required)': 'value(必填)',
'prop is empty': 'prop不能为空',
'value is empty': 'value不能为空',
'prop is repeat': 'prop中有重复',
'Start Time': '开始时间',
'End Time': '结束时间',
crontab: 'crontab',
'Failure Strategy': '失败策略',
online: '上线',
offline: '下线',
'Task Status': '任务状态',
'Process Instance': '工作流实例',
'Task Instance': '任务实例',
'Select date range': '选择日期区间',
Date: '日期',
waiting: '等待',
execution: '执行中',
finish: '完成',
'Create File': '创建文件',
'Create folder': '创建文件夹',
'File Name': '文件名称',
'Folder Name': '文件夹名称',
'File Format': '文件格式',
'Folder Format': '文件夹格式',
'File Content': '文件内容',
'Upload File Size': '文件大小不能超过1G',
Create: '创建',
'Please enter the resource content': '请输入资源内容',
'Resource content cannot exceed 3000 lines': '资源内容不能超过3000行',
'File Details': '文件详情',
'Download Details': '下载详情',
Return: '返回',
Save: '保存',
'File Manage': '文件管理',
'Upload Files': '上传文件',
'Create UDF Function': '创建UDF函数',
'Upload UDF Resources': '上传UDF资源',
'Service-Master': '服务管理-Master',
'Service-Worker': '服务管理-Worker',
'Process Name': '工作流名称',
Executor: '执行用户',
'Run Type': '运行类型',
'Scheduling Time': '调度时间',
'Run Times': '运行次数',
host: 'host',
'fault-tolerant sign': '容错标识',
Rerun: '重跑',
'Recovery Failed': '恢复失败',
Stop: '停止',
Pause: '暂停',
'Recovery Suspend': '恢复运行',
Gantt: '甘特图',
Name: '名称',
'Node Type': '节点类型',
'Submit Time': '提交时间',
Duration: '运行时长',
'Retry Count': '重试次数',
'Task Name': '任务名称',
'Task Date': '任务日期',
'Source Table': '源表',
'Record Number': '记录数',
'Target Table': '目标表',
'Online viewing type is not supported': '不支持在线查看类型',
Size: '大小',
Rename: '重命名',
Download: '下载',
Export: '导出',
Submit: '提交',
'Edit UDF Function': '编辑UDF函数',
type: '类型',
'UDF Function Name': 'UDF函数名称',
'Please enter a function name': '请输入函数名',
'Package Name': '包名类名',
'Please enter a Package name': '请输入包名类名',
Parameter: '参数',
'Please enter a parameter': '请输入参数',
'UDF Resources': 'UDF资源',
'Upload Resources': '上传资源',
Instructions: '使用说明',
'Please enter a instructions': '请输入使用说明',
'Please enter a UDF function name': '请输入UDF函数名称',
'Select UDF Resources': '请选择UDF资源',
'Class Name': '类名',
'Jar Package': 'jar包',
'Library Name': '库名',
'UDF Resource Name': 'UDF资源名称',
'File Size': '文件大小',
Description: '描述',
'Drag Nodes and Selected Items': '拖动节点和选中项',
'Select Line Connection': '选择线条连接',
'Delete selected lines or nodes': '删除选中的线或节点',
'Full Screen': '全屏',
Unpublished: '未发布',
'Start Process': '启动工作流',
'Execute from the current node': '从当前节点开始执行',
'Recover tolerance fault process': '恢复被容错的工作流',
'Resume the suspension process': '恢复运行流程',
'Execute from the failed nodes': '从失败节点开始执行',
'Complement Data': '补数',
'Scheduling execution': '调度执行',
'Recovery waiting thread': '恢复等待线程',
'Submitted successfully': '提交成功',
Executing: '正在执行',
'Ready to pause': '准备暂停',
'Ready to stop': '准备停止',
'Need fault tolerance': '需要容错',
kill: 'kill',
'Waiting for thread': '等待线程',
'Waiting for dependence': '等待依赖',
Start: '运行',
Copy: '复制节点',
'Copy name': '复制名称',
Delete: '删除',
'Please enter keyword': '请输入关键词',
'File Upload': '文件上传',
'Drag the file into the current upload window': '请将文件拖拽到当前上传窗口内!',
'Drag area upload': '拖动区域上传',
Upload: '上传',
'Please enter file name': '请输入文件名',
'Please select the file to upload': '请选择要上传的文件',
'Resources manage': '资源中心',
Security: '安全中心',
Logout: '退出',
'No data': '查询无数据',
'Uploading...': '文件上传中',
'Loading...': '正在努力加载中...',
List: '列表',
'Unable to download without proper url': '无下载url无法下载',
Process: '工作流',
'Process definition': '工作流定义',
'Task record': '任务记录',
'Warning group manage': '告警组管理',
'Servers manage': '服务管理',
'UDF manage': 'UDF管理',
'Resource manage': '资源管理',
'Function manage': '函数管理',
'Edit password': '修改密码',
'Ordinary users': '普通用户',
'Create process': '创建工作流',
'Import process': '导入工作流',
'Timing state': '定时状态',
Timing: '定时',
TreeView: '树形图',
'Mailbox already exists! Recipients and copyers cannot repeat': '邮箱已存在!收件人和抄送人不能重复',
'Mailbox input is illegal': '邮箱输入不合法',
'Please set the parameters before starting': '启动前请先设置参数',
Continue: '继续',
End: '结束',
'Node execution': '节点执行',
'Backward execution': '向后执行',
'Forward execution': '向前执行',
'Execute only the current node': '仅执行当前节点',
'Notification strategy': '通知策略',
'Notification group': '通知组',
'Please select a notification group': '请选择通知组',
Recipient: '收件人',
Cc: '抄送人',
'Whether it is a complement process?': '是否补数',
'Schedule date': '调度日期',
'Mode of execution': '执行方式',
'Serial execution': '串行执行',
'Parallel execution': '并行执行',
'Set parameters before timing': '定时前请先设置参数',
'Start and stop time': '起止时间',
'Please select time': '请选择时间',
'Please enter crontab': '请输入crontab',
none_1: '都不发',
success_1: '成功发',
failure_1: '失败发',
All_1: '成功或失败都发',
Toolbar: '工具栏',
'View variables': '查看变量',
'Format DAG': '格式化DAG',
'Refresh DAG status': '刷新DAG状态',
Return_1: '返回上一节点',
'Please enter format': '请输入格式为',
'connection parameter': '连接参数',
'Process definition details': '流程定义详情',
'Create process definition': '创建流程定义',
'Scheduled task list': '定时任务列表',
'Process instance details': '流程实例详情',
'Create Resource': '创建资源',
'User Center': '用户中心',
'Please enter method': '请输入方法',
none: '无',
name: '名称',
'Process priority': '流程优先级',
'Task priority': '任务优先级',
'Task timeout alarm': '任务超时告警',
'Timeout strategy': '超时策略',
'Timeout alarm': '超时告警',
'Timeout failure': '超时失败',
'Timeout period': '超时时长',
'Timeout strategy must be selected': '超时策略必须选一个',
'Timeout must be a positive integer': '超时时长必须为正整数',
'Add dependency': '添加依赖',
and: '且',
or: '或',
month: '月',
week: '周',
day: '日',
hour: '时',
Running: '正在运行',
'Waiting for dependency to complete': '等待依赖完成',
Selected: '已选',
CurrentHour: '当前小时',
Last1Hour: '前1小时',
Last2Hours: '前2小时',
Last3Hours: '前3小时',
Last24Hours: '前24小时',
today: '今天',
Last1Days: '昨天',
Last2Days: '前两天',
Last3Days: '前三天',
Last7Days: '前七天',
ThisWeek: '本周',
LastWeek: '上周',
LastMonday: '上周一',
LastTuesday: '上周二',
LastWednesday: '上周三',
LastThursday: '上周四',
LastFriday: '上周五',
LastSaturday: '上周六',
LastSunday: '上周日',
ThisMonth: '本月',
LastMonth: '上月',
LastMonthBegin: '上月初',
LastMonthEnd: '上月末',
'Refresh status succeeded': '刷新状态成功',
'Queue manage': '队列管理',
'Create queue': '创建队列',
'Edit queue': '编辑队列',
'Datasource manage': '数据源中心',
'History task record': '历史任务记录',
'Please go online': '不要忘记上线',
'Queue value': '队列值',
'Please enter queue value': '请输入队列值',
'Worker group manage': 'Worker分组管理',
'Create worker group': '创建Worker分组',
'Edit worker group': '编辑Worker分组',
'Token manage': '令牌管理',
'Create token': '创建令牌',
'Edit token': '编辑令牌',
'Please enter the IP address separated by commas': '请输入IP地址多个用英文逗号隔开',
'Note: Multiple IP addresses have been comma separated': '注意:多个IP地址以英文逗号分割',
'Failure time': '失效时间',
User: '用户',
'Please enter token': '请输入令牌',
'Generate token': '生成令牌',
Monitor: '监控中心',
Group: '分组',
'Queue statistics': '队列统计',
'Command status statistics': '命令状态统计',
'Task kill': '等待kill任务',
'Task queue': '等待执行任务',
'Error command count': '错误指令数',
'Normal command count': '正确指令数',
Manage: '管理',
'Number of connections': '连接数',
Sent: '发送量',
Received: '接收量',
'Min latency': '最低延时',
'Avg latency': '平均延时',
'Max latency': '最大延时',
'Node count': '节点数',
'Query time': '当前查询时间',
'Node self-test status': '节点自检状态',
'Health status': '健康状态',
'Max connections': '最大连接数',
'Threads connections': '当前连接数',
'Max used connections': '同时使用连接最大数',
'Threads running connections': '数据库当前活跃连接数',
'Worker group': 'Worker分组',
'Please enter a positive integer greater than 0': '请输入大于 0 的正整数',
'Pre Statement': '前置sql',
'Post Statement': '后置sql',
'Statement cannot be empty': '语句不能为空',
'Process Define Count': '工作流定义数',
'Process Instance Running Count': '正在运行的流程数',
'Please select a queue': '默认为租户关联队列',
'command number of waiting for running': '待执行的命令数',
'failure command number': '执行失败的命令数',
'tasks number of waiting running': '待运行任务数',
'task number of ready to kill': '待杀死任务数',
'Statistics manage': '统计管理',
statistics: '统计',
'select tenant': '选择租户',
'Please enter Principal': '请输入Principal',
'The start time must not be the same as the end': '开始时间和结束时间不能相同',
'Startup parameter': '启动参数',
'Startup type': '启动类型',
'warning of timeout': '超时告警',
'Next five execution times': '接下来五次执行时间',
'Execute time': '执行时间',
'Complement range': '补数范围',
slot: 'slot数量',
taskManager: 'taskManage数量',
jobManagerMemory: 'jobManager内存数',
taskManagerMemory: 'taskManager内存数',
'Http Url': '请求地址',
'Http Method': '请求类型',
'Http Parameters': '请求参数',
'Http Parameters Key': '参数名',
'Http Parameters Position': '参数位置',
'Http Parameters Value': '参数值',
'Http Check Condition': '校验条件',
'Http Condition': '校验内容',
'Please Enter Http Url': '请填写请求地址(必填)',
'Please Enter Http Condition': '请填写校验内容',
'There is no data for this period of time': '该时间段无数据',
'IP address cannot be empty': 'IP地址不能为空',
'Please enter the correct IP': '请输入正确的IP',
'Please generate token': '请生成Token',
'Spark Version': 'Spark版本',
TargetDataBase: '目标库',
TargetTable: '目标表',
'Please enter the table of target': '请输入目标表名',
'Please enter a Target Table(required)': '请输入目标表(必填)',
SpeedByte: '限流(字节数)',
SpeedRecord: '限流(记录数)',
'0 means unlimited by byte': 'KB,0代表不限制',
'0 means unlimited by count': '0代表不限制',
'Modify User': '修改用户',
'Whether directory': '是否文件夹',
Yes: '是',
No: '否',
'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)',
'Please enter Mysql Table(required)': '请输入Mysql表名(必填)',
'Please enter Columns (Comma separated)': '请输入列名,用 , 隔开',
'Please enter Target Dir(required)': '请输入目标路径(必填)',
'Please enter Export Dir(required)': '请输入数据源路径(必填)',
'Please enter Hive Database(required)': '请输入Hive数据库(必填)',
'Please enter Hive Table(required)': '请输入Hive表名(必填)',
'Please enter Hive Partition Keys': '请输入分区键',
'Please enter Hive Partition Values': '请输入分区值',
'Please enter Replace Delimiter': '请输入替换分隔符',
'Please enter Fields Terminated': '请输入列分隔符',
'Please enter Lines Terminated': '请输入行分隔符',
'Please enter Concurrency': '请输入并发度',
'Please enter Update Key': '请输入更新列',
Direct: '流向',
Type: '类型',
ModelType: '模式',
ColumnType: '列类型',
Database: '数据库',
Column: '列',
'Map Column Hive': 'Hive类型映射',
'Map Column Java': 'Java类型映射',
'Export Dir': '数据源路径',
'Hive partition Keys': 'Hive 分区键',
'Hive partition Values': 'Hive 分区值',
FieldsTerminated: '列分隔符',
LinesTerminated: '行分隔符',
IsUpdate: '是否更新',
UpdateKey: '更新列',
UpdateMode: '更新类型',
'Target Dir': '目标路径',
DeleteTargetDir: '是否删除目录',
FileType: '保存格式',
CompressionCodec: '压缩类型',
CreateHiveTable: '是否创建新表',
DropDelimiter: '是否删除分隔符',
OverWriteSrc: '是否覆盖数据源',
ReplaceDelimiter: '替换分隔符',
Concurrency: '并发度',
Form: '表单',
OnlyUpdate: '只更新',
AllowInsert: '无更新便插入',
'Data Source': '数据来源',
'Data Target': '数据目的',
'All Columns': '全表导入',
'Some Columns': '选择列',
'Branch flow': '分支流转',
'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点',
'Successful branch flow and failed branch flow are required': 'conditions节点成功和失败分支流转必填',
'Unauthorized or deleted resources': '未授权或已删除资源',
'Please delete all non-existent resources': '请删除所有未授权或已删除资源',
'The Worker group no longer exists, please select the correct Worker group!': '该Worker分组已经不存在,请选择正确的Worker分组!',
'Please confirm whether the workflow has been saved before downloading': '下载前请确定工作流是否已保存',
'User name length is between 3 and 39': '用户名长度在3~39之间',
'Timeout Settings': '超时设置',
'Connect Timeout':'连接超时',
'Socket Timeout':'Socket超时',
'Connect timeout be a positive integer': '连接超时必须为数字',
'Socket Timeout be a positive integer': 'Socket超时必须为数字',
'ms':'毫秒',
zkDirectory: 'zk注册目录',
'Directory detail': '查看目录详情',
'Connection name': '连线名',
'Current connection settings': '当前连线设置',
'Please save the DAG before formatting': '格式化前请先保存DAG'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,256 | admin account modify report error, caused by mobile phone | >
>
> 我安装1.2.1也不能修改管理员密码,提示request parameter xx is not valid
我这边安装的版本是1.3.1,在进行修改管理原密码的时候,也是提示跟你同样的问题;后面通过F12,查询相应信息,发现这个错误是‘手机’这个信息产生影响的,你必须将该‘手机’参数修改为正确的格式,否则无法正常修改密码,如
![图片](https://user-images.githubusercontent.com/49484750/88007828-62da2e00-cb41-11ea-8804-9ebaacc690e2.png)
_Originally posted by @wjf989 in https://github.com/apache/incubator-dolphinscheduler/issues/219#issuecomment-661597078_
建议:将“手机”的参数变量,同其他地方的变量信息进行脱钩; | https://github.com/apache/dolphinscheduler/issues/3256 | https://github.com/apache/dolphinscheduler/pull/3425 | 2b990e1f000ba583504585624591e8403958c374 | f784ce504f06d8a6b444a98ead01c6aa8b184679 | "2020-07-21T03:04:22Z" | java | "2020-08-07T02:20:56Z" | sql/upgrade/1.2.0_schema/mysql/dolphinscheduler_dml.sql | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY',''));
SET FOREIGN_KEY_CHECKS=0;
UPDATE QRTZ_CRON_TRIGGERS SET SCHED_NAME='DolphinScheduler' WHERE SCHED_NAME='EasyScheduler';
UPDATE QRTZ_TRIGGERS SET SCHED_NAME='DolphinScheduler' WHERE SCHED_NAME='EasyScheduler';
UPDATE QRTZ_FIRED_TRIGGERS SET SCHED_NAME='DolphinScheduler' WHERE SCHED_NAME='EasyScheduler';
UPDATE QRTZ_JOB_DETAILS SET SCHED_NAME='DolphinScheduler' WHERE SCHED_NAME='EasyScheduler';
UPDATE QRTZ_JOB_DETAILS SET JOB_CLASS_NAME='org.apache.dolphinscheduler.dao.quartz.ProcessScheduleJob' WHERE JOB_CLASS_NAME='cn.escheduler.server.quartz.ProcessScheduleJob';
UPDATE QRTZ_LOCKS SET SCHED_NAME='DolphinScheduler' WHERE SCHED_NAME='EasyScheduler';
UPDATE QRTZ_SCHEDULER_STATE SET SCHED_NAME='DolphinScheduler' WHERE SCHED_NAME='EasyScheduler'; |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,256 | admin account modify report error, caused by mobile phone | >
>
> 我安装1.2.1也不能修改管理员密码,提示request parameter xx is not valid
我这边安装的版本是1.3.1,在进行修改管理原密码的时候,也是提示跟你同样的问题;后面通过F12,查询相应信息,发现这个错误是‘手机’这个信息产生影响的,你必须将该‘手机’参数修改为正确的格式,否则无法正常修改密码,如
![图片](https://user-images.githubusercontent.com/49484750/88007828-62da2e00-cb41-11ea-8804-9ebaacc690e2.png)
_Originally posted by @wjf989 in https://github.com/apache/incubator-dolphinscheduler/issues/219#issuecomment-661597078_
建议:将“手机”的参数变量,同其他地方的变量信息进行脱钩; | https://github.com/apache/dolphinscheduler/issues/3256 | https://github.com/apache/dolphinscheduler/pull/3425 | 2b990e1f000ba583504585624591e8403958c374 | f784ce504f06d8a6b444a98ead01c6aa8b184679 | "2020-07-21T03:04:22Z" | java | "2020-08-07T02:20:56Z" | sql/upgrade/1.3.0_schema/mysql/dolphinscheduler_dml.sql | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY',''));
SET FOREIGN_KEY_CHECKS=0;
UPDATE t_ds_resources SET pid=-1,is_directory=false WHERE pid IS NULL;
UPDATE t_ds_resources SET full_name = concat('/',alias) WHERE pid=-1 and full_name IS NULL;
UPDATE QRTZ_JOB_DETAILS SET JOB_CLASS_NAME='org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob' WHERE JOB_CLASS_NAME='org.apache.dolphinscheduler.server.quartz.ProcessScheduleJob';
UPDATE t_ds_process_instance instance SET `worker_group`=IFNULL((SELECT name from t_ds_worker_group WHERE instance.worker_group=CONCAT(id,'')),'default');
UPDATE t_ds_task_instance instance SET `worker_group`=IFNULL((SELECT name from t_ds_worker_group WHERE instance.worker_group=CONCAT(id,'')),'default');
UPDATE t_ds_schedules schedule SET `worker_group`=IFNULL((SELECT name from t_ds_worker_group WHERE schedule.worker_group=CONCAT(id,'')),'default');
UPDATE t_ds_command command SET `worker_group`=IFNULL((SELECT name from t_ds_worker_group WHERE command.worker_group=CONCAT(id,'')),'default');
UPDATE t_ds_error_command command SET `worker_group`=IFNULL((SELECT name from t_ds_worker_group WHERE command.worker_group=CONCAT(id,'')),'default'); |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,256 | admin account modify report error, caused by mobile phone | >
>
> 我安装1.2.1也不能修改管理员密码,提示request parameter xx is not valid
我这边安装的版本是1.3.1,在进行修改管理原密码的时候,也是提示跟你同样的问题;后面通过F12,查询相应信息,发现这个错误是‘手机’这个信息产生影响的,你必须将该‘手机’参数修改为正确的格式,否则无法正常修改密码,如
![图片](https://user-images.githubusercontent.com/49484750/88007828-62da2e00-cb41-11ea-8804-9ebaacc690e2.png)
_Originally posted by @wjf989 in https://github.com/apache/incubator-dolphinscheduler/issues/219#issuecomment-661597078_
建议:将“手机”的参数变量,同其他地方的变量信息进行脱钩; | https://github.com/apache/dolphinscheduler/issues/3256 | https://github.com/apache/dolphinscheduler/pull/3425 | 2b990e1f000ba583504585624591e8403958c374 | f784ce504f06d8a6b444a98ead01c6aa8b184679 | "2020-07-21T03:04:22Z" | java | "2020-08-07T02:20:56Z" | sql/upgrade/1.3.0_schema/postgresql/dolphinscheduler_dml.sql | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
UPDATE t_ds_resources SET pid=-1,is_directory=false WHERE pid IS NULL;
UPDATE t_ds_resources SET full_name = concat('/',alias) WHERE pid=-1 and full_name IS NULL;
UPDATE QRTZ_JOB_DETAILS SET JOB_CLASS_NAME='org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob' WHERE JOB_CLASS_NAME='org.apache.dolphinscheduler.server.quartz.ProcessScheduleJob';
UPDATE t_ds_process_instance instance SET worker_group=COALESCE((SELECT name from t_ds_worker_group WHERE instance.worker_group=CONCAT(id,'')),'default');
UPDATE t_ds_task_instance instance SET worker_group=COALESCE((SELECT name from t_ds_worker_group WHERE instance.worker_group=CONCAT(id,'')),'default');
UPDATE t_ds_schedules schedule SET worker_group=COALESCE((SELECT name from t_ds_worker_group WHERE schedule.worker_group=CONCAT(id,'')),'default');
UPDATE t_ds_command command SET worker_group=COALESCE((SELECT name from t_ds_worker_group WHERE command.worker_group=CONCAT(id,'')),'default');
UPDATE t_ds_error_command command SET worker_group=COALESCE((SELECT name from t_ds_worker_group WHERE command.worker_group=CONCAT(id,'')),'default'); |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,262 | [Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled | When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
eg:
![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png)
yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
| https://github.com/apache/dolphinscheduler/issues/3262 | https://github.com/apache/dolphinscheduler/pull/3264 | 232a24441bbb559c18739a74abb69ecdb718e158 | 5584f0cb4d27e3ae64ec28c65a8669b38c75d188 | "2020-07-21T13:20:54Z" | java | "2020-08-10T07:18:01Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import org.apache.commons.io.IOUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.ResUploadType;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.client.cli.RMAdminCLI;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.nio.file.Files;
import java.security.PrivilegedExceptionAction;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.apache.dolphinscheduler.common.Constants.RESOURCE_UPLOAD_PATH;
/**
* hadoop utils
* single instance
*/
public class HadoopUtils implements Closeable {
private static final Logger logger = LoggerFactory.getLogger(HadoopUtils.class);
private static String hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER);
public static final String resourceUploadPath = PropertyUtils.getString(RESOURCE_UPLOAD_PATH, "/dolphinscheduler");
public static final String rmHaIds = PropertyUtils.getString(Constants.YARN_RESOURCEMANAGER_HA_RM_IDS);
public static final String appAddress = PropertyUtils.getString(Constants.YARN_APPLICATION_STATUS_ADDRESS);
public static final String jobHistoryAddress = PropertyUtils.getString(Constants.YARN_JOB_HISTORY_STATUS_ADDRESS);
private static final String HADOOP_UTILS_KEY = "HADOOP_UTILS_KEY";
private static final LoadingCache<String, HadoopUtils> cache = CacheBuilder
.newBuilder()
.expireAfterWrite(PropertyUtils.getInt(Constants.KERBEROS_EXPIRE_TIME, 2), TimeUnit.HOURS)
.build(new CacheLoader<String, HadoopUtils>() {
@Override
public HadoopUtils load(String key) throws Exception {
return new HadoopUtils();
}
});
private static volatile boolean yarnEnabled = false;
private Configuration configuration;
private FileSystem fs;
private HadoopUtils() {
init();
initHdfsPath();
}
public static HadoopUtils getInstance() {
return cache.getUnchecked(HADOOP_UTILS_KEY);
}
/**
* init dolphinscheduler root path in hdfs
*/
private void initHdfsPath() {
Path path = new Path(resourceUploadPath);
try {
if (!fs.exists(path)) {
fs.mkdirs(path);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
/**
* init hadoop configuration
*/
private void init() {
try {
configuration = new Configuration();
String resourceStorageType = PropertyUtils.getUpperCaseString(Constants.RESOURCE_STORAGE_TYPE);
ResUploadType resUploadType = ResUploadType.valueOf(resourceStorageType);
if (resUploadType == ResUploadType.HDFS) {
if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)) {
System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF,
PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH));
configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
hdfsUser = "";
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME),
PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH));
}
String defaultFS = configuration.get(Constants.FS_DEFAULTFS);
//first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file
// the default is the local file system
if (defaultFS.startsWith("file")) {
String defaultFSProp = PropertyUtils.getString(Constants.FS_DEFAULTFS);
if (StringUtils.isNotBlank(defaultFSProp)) {
Map<String, String> fsRelatedProps = PropertyUtils.getPrefixedProperties("fs.");
configuration.set(Constants.FS_DEFAULTFS, defaultFSProp);
fsRelatedProps.forEach((key, value) -> configuration.set(key, value));
} else {
logger.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULTFS);
throw new RuntimeException(
String.format("property: %s can not to be empty, please set!", Constants.FS_DEFAULTFS)
);
}
} else {
logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULTFS, defaultFS);
}
if (fs == null) {
if (StringUtils.isNotEmpty(hdfsUser)) {
UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser);
ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
@Override
public Boolean run() throws Exception {
fs = FileSystem.get(configuration);
return true;
}
});
} else {
logger.warn("hdfs.root.user is not set value!");
fs = FileSystem.get(configuration);
}
}
} else if (resUploadType == ResUploadType.S3) {
System.setProperty(Constants.AWS_S3_V4, Constants.STRING_TRUE);
configuration.set(Constants.FS_DEFAULTFS, PropertyUtils.getString(Constants.FS_DEFAULTFS));
configuration.set(Constants.FS_S3A_ENDPOINT, PropertyUtils.getString(Constants.FS_S3A_ENDPOINT));
configuration.set(Constants.FS_S3A_ACCESS_KEY, PropertyUtils.getString(Constants.FS_S3A_ACCESS_KEY));
configuration.set(Constants.FS_S3A_SECRET_KEY, PropertyUtils.getString(Constants.FS_S3A_SECRET_KEY));
fs = FileSystem.get(configuration);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
/**
* @return Configuration
*/
public Configuration getConfiguration() {
return configuration;
}
/**
* get application url
*
* @param applicationId application id
* @return url of application
*/
public String getApplicationUrl(String applicationId) throws Exception {
/**
* if rmHaIds contains xx, it signs not use resourcemanager
* otherwise:
* if rmHaIds is empty, single resourcemanager enabled
* if rmHaIds not empty: resourcemanager HA enabled
*/
String appUrl = "";
if (StringUtils.isEmpty(rmHaIds)){
//single resourcemanager enabled
appUrl = appAddress;
yarnEnabled = true;
} else {
//resourcemanager HA enabled
appUrl = getAppAddress(appAddress, rmHaIds);
yarnEnabled = true;
logger.info("application url : {}", appUrl);
}
if(StringUtils.isBlank(appUrl)){
throw new Exception("application url is blank");
}
return String.format(appUrl, applicationId);
}
public String getJobHistoryUrl(String applicationId) {
//eg:application_1587475402360_712719 -> job_1587475402360_712719
String jobId = applicationId.replace("application", "job");
return String.format(jobHistoryAddress, jobId);
}
/**
* cat file on hdfs
*
* @param hdfsFilePath hdfs file path
* @return byte[] byte array
* @throws IOException errors
*/
public byte[] catFile(String hdfsFilePath) throws IOException {
if (StringUtils.isBlank(hdfsFilePath)) {
logger.error("hdfs file path:{} is blank", hdfsFilePath);
return new byte[0];
}
FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath));
return IOUtils.toByteArray(fsDataInputStream);
}
/**
* cat file on hdfs
*
* @param hdfsFilePath hdfs file path
* @param skipLineNums skip line numbers
* @param limit read how many lines
* @return content of file
* @throws IOException errors
*/
public List<String> catFile(String hdfsFilePath, int skipLineNums, int limit) throws IOException {
if (StringUtils.isBlank(hdfsFilePath)) {
logger.error("hdfs file path:{} is blank", hdfsFilePath);
return Collections.emptyList();
}
try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))) {
BufferedReader br = new BufferedReader(new InputStreamReader(in));
Stream<String> stream = br.lines().skip(skipLineNums).limit(limit);
return stream.collect(Collectors.toList());
}
}
/**
* make the given file and all non-existent parents into
* directories. Has the semantics of Unix 'mkdir -p'.
* Existence of the directory hierarchy is not an error.
*
* @param hdfsPath path to create
* @return mkdir result
* @throws IOException errors
*/
public boolean mkdir(String hdfsPath) throws IOException {
return fs.mkdirs(new Path(hdfsPath));
}
/**
* copy files between FileSystems
*
* @param srcPath source hdfs path
* @param dstPath destination hdfs path
* @param deleteSource whether to delete the src
* @param overwrite whether to overwrite an existing file
* @return if success or not
* @throws IOException errors
*/
public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException {
return FileUtil.copy(fs, new Path(srcPath), fs, new Path(dstPath), deleteSource, overwrite, fs.getConf());
}
/**
* the src file is on the local disk. Add it to FS at
* the given dst name.
*
* @param srcFile local file
* @param dstHdfsPath destination hdfs path
* @param deleteSource whether to delete the src
* @param overwrite whether to overwrite an existing file
* @return if success or not
* @throws IOException errors
*/
public boolean copyLocalToHdfs(String srcFile, String dstHdfsPath, boolean deleteSource, boolean overwrite) throws IOException {
Path srcPath = new Path(srcFile);
Path dstPath = new Path(dstHdfsPath);
fs.copyFromLocalFile(deleteSource, overwrite, srcPath, dstPath);
return true;
}
/**
* copy hdfs file to local
*
* @param srcHdfsFilePath source hdfs file path
* @param dstFile destination file
* @param deleteSource delete source
* @param overwrite overwrite
* @return result of copy hdfs file to local
* @throws IOException errors
*/
public boolean copyHdfsToLocal(String srcHdfsFilePath, String dstFile, boolean deleteSource, boolean overwrite) throws IOException {
Path srcPath = new Path(srcHdfsFilePath);
File dstPath = new File(dstFile);
if (dstPath.exists()) {
if (dstPath.isFile()) {
if (overwrite) {
Files.delete(dstPath.toPath());
}
} else {
logger.error("destination file must be a file");
}
}
if (!dstPath.getParentFile().exists()) {
dstPath.getParentFile().mkdirs();
}
return FileUtil.copy(fs, srcPath, dstPath, deleteSource, fs.getConf());
}
/**
* delete a file
*
* @param hdfsFilePath the path to delete.
* @param recursive if path is a directory and set to
* true, the directory is deleted else throws an exception. In
* case of a file the recursive can be set to either true or false.
* @return true if delete is successful else false.
* @throws IOException errors
*/
public boolean delete(String hdfsFilePath, boolean recursive) throws IOException {
return fs.delete(new Path(hdfsFilePath), recursive);
}
/**
* check if exists
*
* @param hdfsFilePath source file path
* @return result of exists or not
* @throws IOException errors
*/
public boolean exists(String hdfsFilePath) throws IOException {
return fs.exists(new Path(hdfsFilePath));
}
/**
* Gets a list of files in the directory
*
* @param filePath file path
* @return {@link FileStatus} file status
* @throws Exception errors
*/
public FileStatus[] listFileStatus(String filePath) throws Exception {
try {
return fs.listStatus(new Path(filePath));
} catch (IOException e) {
logger.error("Get file list exception", e);
throw new Exception("Get file list exception", e);
}
}
/**
* Renames Path src to Path dst. Can take place on local fs
* or remote DFS.
*
* @param src path to be renamed
* @param dst new path after rename
* @return true if rename is successful
* @throws IOException on failure
*/
public boolean rename(String src, String dst) throws IOException {
return fs.rename(new Path(src), new Path(dst));
}
/**
* hadoop resourcemanager enabled or not
*
* @return result
*/
public boolean isYarnEnabled() {
return yarnEnabled;
}
/**
* get the state of an application
*
* @param applicationId application id
* @return the return may be null or there may be other parse exceptions
*/
public ExecutionStatus getApplicationStatus(String applicationId) throws Exception {
if (StringUtils.isEmpty(applicationId)) {
return null;
}
String result = Constants.FAILED;
String applicationUrl = getApplicationUrl(applicationId);
logger.info("applicationUrl={}", applicationUrl);
String responseContent = HttpUtils.get(applicationUrl);
if (responseContent != null) {
ObjectNode jsonObject = JSONUtils.parseObject(responseContent);
result = jsonObject.path("app").path("finalStatus").asText();
} else {
//may be in job history
String jobHistoryUrl = getJobHistoryUrl(applicationId);
logger.info("jobHistoryUrl={}", jobHistoryUrl);
responseContent = HttpUtils.get(jobHistoryUrl);
ObjectNode jsonObject = JSONUtils.parseObject(responseContent);
if (!jsonObject.has("job")){
return ExecutionStatus.FAILURE;
}
result = jsonObject.path("job").path("state").asText();
}
switch (result) {
case Constants.ACCEPTED:
return ExecutionStatus.SUBMITTED_SUCCESS;
case Constants.SUCCEEDED:
return ExecutionStatus.SUCCESS;
case Constants.NEW:
case Constants.NEW_SAVING:
case Constants.SUBMITTED:
case Constants.FAILED:
return ExecutionStatus.FAILURE;
case Constants.KILLED:
return ExecutionStatus.KILL;
case Constants.RUNNING:
default:
return ExecutionStatus.RUNNING_EXECUTION;
}
}
/**
* get data hdfs path
*
* @return data hdfs path
*/
public static String getHdfsDataBasePath() {
if ("/".equals(resourceUploadPath)) {
// if basepath is configured to /, the generated url may be //default/resources (with extra leading /)
return "";
} else {
return resourceUploadPath;
}
}
/**
* hdfs resource dir
*
* @param tenantCode tenant code
* @param resourceType resource type
* @return hdfs resource dir
*/
public static String getHdfsDir(ResourceType resourceType, String tenantCode) {
String hdfsDir = "";
if (resourceType.equals(ResourceType.FILE)) {
hdfsDir = getHdfsResDir(tenantCode);
} else if (resourceType.equals(ResourceType.UDF)) {
hdfsDir = getHdfsUdfDir(tenantCode);
}
return hdfsDir;
}
/**
* hdfs resource dir
*
* @param tenantCode tenant code
* @return hdfs resource dir
*/
public static String getHdfsResDir(String tenantCode) {
return String.format("%s/resources", getHdfsTenantDir(tenantCode));
}
/**
* hdfs user dir
*
* @param tenantCode tenant code
* @param userId user id
* @return hdfs resource dir
*/
public static String getHdfsUserDir(String tenantCode, int userId) {
return String.format("%s/home/%d", getHdfsTenantDir(tenantCode), userId);
}
/**
* hdfs udf dir
*
* @param tenantCode tenant code
* @return get udf dir on hdfs
*/
public static String getHdfsUdfDir(String tenantCode) {
return String.format("%s/udfs", getHdfsTenantDir(tenantCode));
}
/**
* get hdfs file name
*
* @param resourceType resource type
* @param tenantCode tenant code
* @param fileName file name
* @return hdfs file name
*/
public static String getHdfsFileName(ResourceType resourceType, String tenantCode, String fileName) {
if (fileName.startsWith("/")) {
fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsDir(resourceType, tenantCode), fileName);
}
/**
* get absolute path and name for resource file on hdfs
*
* @param tenantCode tenant code
* @param fileName file name
* @return get absolute path and name for file on hdfs
*/
public static String getHdfsResourceFileName(String tenantCode, String fileName) {
if (fileName.startsWith("/")) {
fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsResDir(tenantCode), fileName);
}
/**
* get absolute path and name for udf file on hdfs
*
* @param tenantCode tenant code
* @param fileName file name
* @return get absolute path and name for udf file on hdfs
*/
public static String getHdfsUdfFileName(String tenantCode, String fileName) {
if (fileName.startsWith("/")) {
fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsUdfDir(tenantCode), fileName);
}
/**
* @param tenantCode tenant code
* @return file directory of tenants on hdfs
*/
public static String getHdfsTenantDir(String tenantCode) {
return String.format("%s/%s", getHdfsDataBasePath(), tenantCode);
}
/**
* getAppAddress
*
* @param appAddress app address
* @param rmHa resource manager ha
* @return app address
*/
public static String getAppAddress(String appAddress, String rmHa) {
//get active ResourceManager
String activeRM = YarnHAAdminUtils.getAcitveRMName(rmHa);
String[] split1 = appAddress.split(Constants.DOUBLE_SLASH);
if (split1.length != 2) {
return null;
}
String start = split1[0] + Constants.DOUBLE_SLASH;
String[] split2 = split1[1].split(Constants.COLON);
if (split2.length != 2) {
return null;
}
String end = Constants.COLON + split2[1];
return start + activeRM + end;
}
@Override
public void close() throws IOException {
if (fs != null) {
try {
fs.close();
} catch (IOException e) {
logger.error("Close HadoopUtils instance failed", e);
throw new IOException("Close HadoopUtils instance failed", e);
}
}
}
/**
* yarn ha admin utils
*/
private static final class YarnHAAdminUtils extends RMAdminCLI {
/**
* get active resourcemanager
*
* @param rmIds
* @return
*/
public static String getAcitveRMName(String rmIds) {
String[] rmIdArr = rmIds.split(Constants.COMMA);
int activeResourceManagerPort = PropertyUtils.getInt(Constants.HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT, 8088);
String yarnUrl = "http://%s:" + activeResourceManagerPort + "/ws/v1/cluster/info";
String state = null;
try {
/**
* send http get request to rm1
*/
state = getRMState(String.format(yarnUrl, rmIdArr[0]));
if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) {
return rmIdArr[0];
} else if (Constants.HADOOP_RM_STATE_STANDBY.equals(state)) {
state = getRMState(String.format(yarnUrl, rmIdArr[1]));
if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) {
return rmIdArr[1];
}
} else {
return null;
}
} catch (Exception e) {
state = getRMState(String.format(yarnUrl, rmIdArr[1]));
if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) {
return rmIdArr[0];
}
}
return null;
}
/**
* get ResourceManager state
*
* @param url
* @return
*/
public static String getRMState(String url) {
String retStr = HttpUtils.get(url);
if (StringUtils.isEmpty(retStr)) {
return null;
}
//to json
ObjectNode jsonObject = JSONUtils.parseObject(retStr);
//get ResourceManager state
if (!jsonObject.has("clusterInfo")){
return null;
}
return jsonObject.get("clusterInfo").path("haState").asText();
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,262 | [Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled | When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
eg:
![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png)
yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
| https://github.com/apache/dolphinscheduler/issues/3262 | https://github.com/apache/dolphinscheduler/pull/3264 | 232a24441bbb559c18739a74abb69ecdb718e158 | 5584f0cb4d27e3ae64ec28c65a8669b38c75d188 | "2020-07-21T13:20:54Z" | java | "2020-08-10T07:18:01Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HttpUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.http.HttpEntity;
import org.apache.http.client.config.AuthSchemes;
import org.apache.http.client.config.CookieSpecs;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.config.Registry;
import org.apache.http.config.RegistryBuilder;
import org.apache.http.conn.socket.ConnectionSocketFactory;
import org.apache.http.conn.socket.PlainConnectionSocketFactory;
import org.apache.http.conn.ssl.NoopHostnameVerifier;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.util.EntityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import java.io.IOException;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.X509Certificate;
import java.util.Arrays;
/**
* http utils
*/
public class HttpUtils {
public static final Logger logger = LoggerFactory.getLogger(HttpUtils.class);
private HttpUtils() {
}
public static CloseableHttpClient getInstance(){
return HttpClientInstance.httpClient;
}
private static class HttpClientInstance{
private static final CloseableHttpClient httpClient = HttpClients.custom().setConnectionManager(cm).setDefaultRequestConfig(requestConfig).build();
}
private static PoolingHttpClientConnectionManager cm;
private static SSLContext ctx = null;
private static SSLConnectionSocketFactory socketFactory;
private static RequestConfig requestConfig;
private static Registry<ConnectionSocketFactory> socketFactoryRegistry;
private static X509TrustManager xtm = new X509TrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType) {
}
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType) {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return null;
}
};
static {
try {
ctx = SSLContext.getInstance(SSLConnectionSocketFactory.TLS);
ctx.init(null, new TrustManager[] { xtm }, null);
} catch (NoSuchAlgorithmException e) {
logger.error("SSLContext init with NoSuchAlgorithmException", e);
} catch (KeyManagementException e) {
logger.error("SSLContext init with KeyManagementException", e);
}
socketFactory = new SSLConnectionSocketFactory(ctx, NoopHostnameVerifier.INSTANCE);
/** set timeout、request time、socket timeout */
requestConfig = RequestConfig.custom().setCookieSpec(CookieSpecs.IGNORE_COOKIES)
.setExpectContinueEnabled(Boolean.TRUE)
.setTargetPreferredAuthSchemes(Arrays.asList(AuthSchemes.NTLM, AuthSchemes.DIGEST))
.setProxyPreferredAuthSchemes(Arrays.asList(AuthSchemes.BASIC))
.setConnectTimeout(Constants.HTTP_CONNECT_TIMEOUT).setSocketTimeout(Constants.SOCKET_TIMEOUT)
.setConnectionRequestTimeout(Constants.HTTP_CONNECTION_REQUEST_TIMEOUT).setRedirectsEnabled(true)
.build();
socketFactoryRegistry = RegistryBuilder.<ConnectionSocketFactory>create()
.register("http", PlainConnectionSocketFactory.INSTANCE).register("https", socketFactory).build();
cm = new PoolingHttpClientConnectionManager(socketFactoryRegistry);
cm.setDefaultMaxPerRoute(60);
cm.setMaxTotal(100);
}
/**
* get http request content
* @param url url
* @return http get request response content
*/
public static String get(String url){
CloseableHttpClient httpclient = HttpUtils.getInstance();
HttpGet httpget = new HttpGet(url);
String responseContent = null;
CloseableHttpResponse response = null;
try {
response = httpclient.execute(httpget);
//check response status is 200
if (response.getStatusLine().getStatusCode() == 200) {
HttpEntity entity = response.getEntity();
if (entity != null) {
responseContent = EntityUtils.toString(entity, Constants.UTF_8);
}else{
logger.warn("http entity is null");
}
}else{
logger.error("http get:{} response status code is not 200!", response.getStatusLine().getStatusCode());
}
}catch (Exception e){
logger.error(e.getMessage(),e);
}finally {
try {
if (response != null) {
EntityUtils.consume(response.getEntity());
response.close();
}
} catch (IOException e) {
logger.error(e.getMessage(),e);
}
if (!httpget.isAborted()) {
httpget.releaseConnection();
httpget.abort();
}
}
return responseContent;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,262 | [Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled | When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
eg:
![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png)
yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
| https://github.com/apache/dolphinscheduler/issues/3262 | https://github.com/apache/dolphinscheduler/pull/3264 | 232a24441bbb559c18739a74abb69ecdb718e158 | 5584f0cb4d27e3ae64ec28c65a8669b38c75d188 | "2020-07-21T13:20:54Z" | java | "2020-08-10T07:18:01Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClient.java | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,262 | [Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled | When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
eg:
![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png)
yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
| https://github.com/apache/dolphinscheduler/issues/3262 | https://github.com/apache/dolphinscheduler/pull/3264 | 232a24441bbb559c18739a74abb69ecdb718e158 | 5584f0cb4d27e3ae64ec28c65a8669b38c75d188 | "2020-07-21T13:20:54Z" | java | "2020-08-10T07:18:01Z" | dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/HttpUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.http.impl.client.CloseableHttpClient;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* HttpClient utils test
*/
public class HttpUtilsTest {
public static final Logger logger = LoggerFactory.getLogger(HttpUtilsTest.class);
@Test
public void testGetTest(){
//success
String result = HttpUtils.get("https://github.com/manifest.json");
Assert.assertNotNull(result);
ObjectNode jsonObject = JSONUtils.parseObject(result);
Assert.assertEquals("GitHub", jsonObject.path("name").asText());
result = HttpUtils.get("https://123.333.111.33/ccc");
Assert.assertNull(result);
}
@Test
public void testGetHttpClient() {
CloseableHttpClient httpClient1 = HttpUtils.getInstance();
CloseableHttpClient httpClient2 = HttpUtils.getInstance();
Assert.assertEquals(httpClient1, httpClient2);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,262 | [Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled | When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
eg:
![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png)
yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
| https://github.com/apache/dolphinscheduler/issues/3262 | https://github.com/apache/dolphinscheduler/pull/3264 | 232a24441bbb559c18739a74abb69ecdb718e158 | 5584f0cb4d27e3ae64ec28c65a8669b38c75d188 | "2020-07-21T13:20:54Z" | java | "2020-08-10T07:18:01Z" | dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/utils/KerberosHttpClientTest.java | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,262 | [Bug][dolphinscheduler-common] When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled | When you request the URL through applicationID to get the application status, you cannot get it if Kerberos authentication is enabled
eg:
![image](https://user-images.githubusercontent.com/59079269/88059613-b0818580-cb97-11ea-892b-3c3f94c32652.png)
yarn.application.status.address=http://ark1:8088/ws/v1/cluster/apps/%s
| https://github.com/apache/dolphinscheduler/issues/3262 | https://github.com/apache/dolphinscheduler/pull/3264 | 232a24441bbb559c18739a74abb69ecdb718e158 | 5584f0cb4d27e3ae64ec28c65a8669b38c75d188 | "2020-07-21T13:20:54Z" | java | "2020-08-10T07:18:01Z" | pom.xml | <?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId>
<version>1.3.2-SNAPSHOT</version>
<packaging>pom</packaging>
<name>${project.artifactId}</name>
<url>http://dolphinscheduler.apache.org</url>
<description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated
to solving the complex dependencies in data processing, making the scheduling system out of the box for data
processing.
</description>
<licenses>
<license>
<name>Apache License 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<connection>scm:git:https://github.com/apache/incubator-dolphinscheduler.git</connection>
<developerConnection>scm:git:https://github.com/apache/incubator-dolphinscheduler.git</developerConnection>
<url>https://github.com/apache/incubator-dolphinscheduler</url>
<tag>HEAD</tag>
</scm>
<mailingLists>
<mailingList>
<name>DolphinScheduler Developer List</name>
<post>dev@dolphinscheduler.incubator.apache.org</post>
<subscribe>dev-subscribe@dolphinscheduler.incubator.apache.org</subscribe>
<unsubscribe>dev-unsubscribe@dolphinscheduler.incubator.apache.org</unsubscribe>
</mailingList>
</mailingLists>
<parent>
<groupId>org.apache</groupId>
<artifactId>apache</artifactId>
<version>21</version>
</parent>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<curator.version>4.3.0</curator.version>
<spring.version>5.1.5.RELEASE</spring.version>
<spring.boot.version>2.1.3.RELEASE</spring.boot.version>
<java.version>1.8</java.version>
<logback.version>1.2.3</logback.version>
<hadoop.version>2.7.3</hadoop.version>
<quartz.version>2.2.3</quartz.version>
<jackson.version>2.9.8</jackson.version>
<mybatis-plus.version>3.2.0</mybatis-plus.version>
<mybatis.spring.version>2.0.1</mybatis.spring.version>
<cron.utils.version>5.0.5</cron.utils.version>
<druid.version>1.1.14</druid.version>
<h2.version>1.4.200</h2.version>
<commons.codec.version>1.6</commons.codec.version>
<commons.logging.version>1.1.1</commons.logging.version>
<httpclient.version>4.4.1</httpclient.version>
<httpcore.version>4.4.1</httpcore.version>
<junit.version>4.12</junit.version>
<mysql.connector.version>5.1.34</mysql.connector.version>
<slf4j.api.version>1.7.5</slf4j.api.version>
<slf4j.log4j12.version>1.7.5</slf4j.log4j12.version>
<commons.collections.version>3.2.2</commons.collections.version>
<commons.httpclient>3.0.1</commons.httpclient>
<commons.beanutils.version>1.7.0</commons.beanutils.version>
<commons.configuration.version>1.10</commons.configuration.version>
<commons.email.version>1.5</commons.email.version>
<poi.version>3.17</poi.version>
<javax.servlet.api.version>3.1.0</javax.servlet.api.version>
<commons.collections4.version>4.1</commons.collections4.version>
<guava.version>20.0</guava.version>
<postgresql.version>42.1.4</postgresql.version>
<hive.jdbc.version>2.1.0</hive.jdbc.version>
<commons.io.version>2.4</commons.io.version>
<oshi.core.version>3.5.0</oshi.core.version>
<clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version>
<mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version>
<jsp-2.1.version>6.1.14</jsp-2.1.version>
<spotbugs.version>3.1.12</spotbugs.version>
<checkstyle.version>3.0.0</checkstyle.version>
<apache.rat.version>0.13</apache.rat.version>
<zookeeper.version>3.4.14</zookeeper.version>
<frontend-maven-plugin.version>1.6</frontend-maven-plugin.version>
<maven-compiler-plugin.version>3.3</maven-compiler-plugin.version>
<maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version>
<maven-release-plugin.version>2.5.3</maven-release-plugin.version>
<maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version>
<maven-source-plugin.version>2.4</maven-source-plugin.version>
<maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version>
<maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version>
<rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version>
<jacoco.version>0.8.4</jacoco.version>
<jcip.version>1.0</jcip.version>
<maven.deploy.skip>false</maven.deploy.skip>
<cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version>
<mockito.version>2.21.0</mockito.version>
<powermock.version>2.0.2</powermock.version>
<servlet-api.version>2.5</servlet-api.version>
<swagger.version>1.9.3</swagger.version>
<springfox.version>2.9.2</springfox.version>
<guava-retry.version>2.0.0</guava-retry.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<!-- quartz-->
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
<version>${quartz.version}</version>
</dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz-jobs</artifactId>
<version>${quartz.version}</version>
</dependency>
<dependency>
<groupId>com.cronutils</groupId>
<artifactId>cron-utils</artifactId>
<version>${cron.utils.version}</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>${druid.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>${spring.boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-server</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-plugin-api</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-dao</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-api</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-remote</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-service</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-alert</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-framework</artifactId>
<version>${curator.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<version>${curator.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<artifactId>netty</artifactId>
<groupId>io.netty</groupId>
</exclusion>
<exclusion>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-annotations</artifactId>
</exclusion>
</exclusions>
<version>${zookeeper.version}</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>${commons.codec.version}</version>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>${commons.logging.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>${httpclient.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>${httpcore.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>${mockito.version}</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<version>${powermock.version}</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito2</artifactId>
<version>${powermock.version}</version>
<type>jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.connector.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>${h2.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.api.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.log4j12.version}</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>${commons.collections.version}</version>
</dependency>
<dependency>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
<version>${commons.httpclient}</version>
</dependency>
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>${commons.beanutils.version}</version>
</dependency>
<dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
<version>${commons.configuration.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-email</artifactId>
<version>${commons.email.version}</version>
</dependency>
<!--excel poi-->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${poi.version}</version>
</dependency>
<!-- hadoop -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>com.sun.jersey</artifactId>
<groupId>jersey-json</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-aws</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
<version>${commons.collections4.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>${postgresql.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.jdbc.version}</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons.io.version}</version>
</dependency>
<dependency>
<groupId>com.github.oshi</groupId>
<artifactId>oshi-core</artifactId>
<version>${oshi.core.version}</version>
</dependency>
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<version>${clickhouse.jdbc.version}</version>
</dependency>
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
<version>${mssql.jdbc.version}</version>
</dependency>
<dependency>
<groupId>net.jcip</groupId>
<artifactId>jcip-annotations</artifactId>
<version>${jcip.version}</version>
<optional>true</optional>
</dependency>
<!-- for api module -->
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jsp-2.1</artifactId>
<version>${jsp-2.1.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<version>${servlet-api.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>${javax.servlet.api.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<version>${springfox.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
<version>${springfox.version}</version>
</dependency>
<dependency>
<groupId>com.github.xiaoymin</groupId>
<artifactId>swagger-bootstrap-ui</artifactId>
<version>${swagger.version}</version>
</dependency>
<dependency>
<groupId>com.github.rholder</groupId>
<artifactId>guava-retrying</artifactId>
<version>${guava-retry.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<finalName>apache-dolphinscheduler-incubating-${project.version}</finalName>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>rpm-maven-plugin</artifactId>
<version>${rpm-maven-plugion.version}</version>
<inherited>false</inherited>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<testSource>${java.version}</testSource>
<testTarget>${java.version}</testTarget>
</configuration>
<version>${maven-compiler-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>${maven-release-plugin.version}</version>
<configuration>
<tagNameFormat>@{project.version}</tagNameFormat>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>${maven-assembly-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<configuration>
<source>8</source>
<failOnError>false</failOnError>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>${maven-source-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<version>${maven-dependency-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<phase>verify</phase>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
<configuration>
<aggregate>true</aggregate>
<charset>${project.build.sourceEncoding}</charset>
<encoding>${project.build.sourceEncoding}</encoding>
<docencoding>${project.build.sourceEncoding}</docencoding>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>${maven-release-plugin.version}</version>
<configuration>
<autoVersionSubmodules>true</autoVersionSubmodules>
<tagNameFormat>@{project.version}</tagNameFormat>
<tagBase>${project.version}</tagBase>
<!--<goals>-f pom.xml deploy</goals>-->
</configuration>
<dependencies>
<dependency>
<groupId>org.apache.maven.scm</groupId>
<artifactId>maven-scm-provider-jgit</artifactId>
<version>1.9.5</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<encoding>${project.build.sourceEncoding}</encoding>
<skip>false</skip><!--not skip compile test classes-->
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<includes>
<include>**/alert/utils/DingTalkUtilsTest.java</include>
<include>**/alert/template/AlertTemplateFactoryTest.java</include>
<include>**/alert/template/impl/DefaultHTMLTemplateTest.java</include>
<include>**/alert/utils/EnterpriseWeChatUtilsTest.java</include>
<include>**/alert/utils/ExcelUtilsTest.java</include>
<include>**/alert/utils/FuncUtilsTest.java</include>
<include>**/alert/utils/JSONUtilsTest.java</include>
<!--<include>**/alert/utils/MailUtilsTest.java</include>-->
<include>**/alert/plugin/EmailAlertPluginTest.java</include>
<include>**/api/controller/ProcessDefinitionControllerTest.java</include>
<include>**/api/dto/resources/filter/ResourceFilterTest.java</include>
<include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include>
<includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest>
<include>**/api/enums/StatusTest.java</include>
<include>**/api/exceptions/ApiExceptionHandlerTest.java</include>
<include>**/api/exceptions/ServiceExceptionTest.java</include>
<include>**/api/interceptor/LoginHandlerInterceptorTest.java</include>
<include>**/api/security/PasswordAuthenticatorTest.java</include>
<include>**/api/security/SecurityConfigTest.java</include>
<include>**/api/service/AccessTokenServiceTest.java</include>
<include>**/api/service/AlertGroupServiceTest.java</include>
<include>**/api/service/BaseDAGServiceTest.java</include>
<include>**/api/service/BaseServiceTest.java</include>
<include>**/api/service/DataAnalysisServiceTest.java</include>
<!--<include>**/api/service/DataSourceServiceTest.java</include>-->
<include>**/api/service/ExecutorService2Test.java</include>
<include>**/api/service/ExecutorServiceTest.java</include>
<include>**/api/service/LoggerServiceTest.java</include>
<include>**/api/service/MonitorServiceTest.java</include>
<include>**/api/service/ProcessDefinitionServiceTest.java</include>
<include>**/api/service/ProcessInstanceServiceTest.java</include>
<include>**/api/service/ProjectServiceTest.java</include>
<include>**/api/service/QueueServiceTest.java</include>
<include>**/api/service/ResourcesServiceTest.java</include>
<include>**/api/service/SchedulerServiceTest.java</include>
<include>**/api/service/SessionServiceTest.java</include>
<include>**/api/service/TaskInstanceServiceTest.java</include>
<include>**/api/service/TenantServiceTest.java</include>
<include>**/api/service/UdfFuncServiceTest.java</include>
<include>**/api/service/UserAlertGroupServiceTest.java</include>
<include>**/api/service/UsersServiceTest.java</include>
<include>**/api/service/WorkerGroupServiceTest.java</include>
<include>**/api/service/WorkFlowLineageServiceTest.java</include>
<include>**/api/controller/ProcessDefinitionControllerTest.java</include>
<include>**/api/controller/WorkFlowLineageControllerTest.java</include>
<include>**/api/utils/exportprocess/DataSourceParamTest.java</include>
<include>**/api/utils/exportprocess/DependentParamTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/FileUtilsTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/ResultTest.java</include>
<include>**/common/graph/DAGTest.java</include>
<include>**/common/os/OshiTest.java</include>
<include>**/common/os/OSUtilsTest.java</include>
<include>**/common/shell/ShellExecutorTest.java</include>
<include>**/common/task/EntityTestUtils.java</include>
<include>**/common/task/FlinkParametersTest.java</include>
<include>**/common/task/HttpParametersTest.java</include>
<include>**/common/task/SqoopParameterEntityTest.java</include>
<include>**/common/threadutils/ThreadPoolExecutorsTest.java</include>
<include>**/common/threadutils/ThreadUtilsTest.java</include>
<include>**/common/utils/process/ProcessBuilderForWin32Test.java</include>
<include>**/common/utils/process/ProcessEnvironmentForWin32Test.java</include>
<include>**/common/utils/process/ProcessImplForWin32Test.java</include>
<include>**/common/utils/CollectionUtilsTest.java</include>
<include>**/common/utils/CommonUtilsTest.java</include>
<include>**/common/utils/DateUtilsTest.java</include>
<include>**/common/utils/DependentUtilsTest.java</include>
<include>**/common/utils/EncryptionUtilsTest.java</include>
<include>**/common/utils/FileUtilsTest.java</include>
<include>**/common/utils/IpUtilsTest.java</include>
<include>**/common/utils/JSONUtilsTest.java</include>
<include>**/common/utils/LoggerUtilsTest.java</include>
<include>**/common/utils/OSUtilsTest.java</include>
<include>**/common/utils/ParameterUtilsTest.java</include>
<include>**/common/utils/PreconditionsTest.java</include>
<include>**/common/utils/PropertyUtilsTest.java</include>
<include>**/common/utils/SchemaUtilsTest.java</include>
<include>**/common/utils/ScriptRunnerTest.java</include>
<include>**/common/utils/SensitiveLogUtilsTest.java</include>
<include>**/common/utils/StringTest.java</include>
<include>**/common/utils/StringUtilsTest.java</include>
<include>**/common/utils/TaskParametersUtilsTest.java</include>
<include>**/common/utils/HadoopUtilsTest.java</include>
<include>**/common/utils/HttpUtilsTest.java</include>
<include>**/common/ConstantsTest.java</include>
<include>**/common/utils/HadoopUtils.java</include>
<include>**/common/utils/RetryerUtilsTest.java</include>
<include>**/common/plugin/FilePluginManagerTest</include>
<include>**/common/plugin/PluginClassLoaderTest</include>
<include>**/dao/mapper/AccessTokenMapperTest.java</include>
<include>**/dao/mapper/AlertGroupMapperTest.java</include>
<include>**/dao/mapper/CommandMapperTest.java</include>
<include>**/dao/mapper/ConnectionFactoryTest.java</include>
<include>**/dao/mapper/DataSourceMapperTest.java</include>
<include>**/dao/entity/UdfFuncTest.java</include>
<include>**/remote/JsonSerializerTest.java</include>
<include>**/remote/RemoveTaskLogResponseCommandTest.java</include>
<include>**/remote/RemoveTaskLogRequestCommandTest.java</include>
<!--<include>**/remote/NettyRemotingClientTest.java</include>-->
<include>**/remote/ResponseFutureTest.java</include>
<include>**/server/log/LoggerServerTest.java</include>
<include>**/server/entity/SQLTaskExecutionContextTest.java</include>
<include>**/server/log/MasterLogFilterTest.java</include>
<include>**/server/log/SensitiveDataConverterTest.java</include>
<!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>-->
<include>**/server/log/TaskLogFilterTest.java</include>
<include>**/server/log/WorkerLogFilterTest.java</include>
<include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include>
<include>**/server/master/runner/MasterTaskExecThreadTest.java</include>
<!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>-->
<include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include>
<include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include>
<include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include>
<!--<include>**/server/master/register/MasterRegistryTest.java</include>-->
<include>**/server/master/AlertManagerTest.java</include>
<include>**/server/master/MasterCommandTest.java</include>
<include>**/server/master/DependentTaskTest.java</include>
<!--<include>**/server/master/ConditionsTaskTest.java</include>-->
<include>**/server/master/MasterExecThreadTest.java</include>
<include>**/server/master/ParamsTest.java</include>
<include>**/server/register/ZookeeperNodeManagerTest.java</include>
<include>**/server/utils/DataxUtilsTest.java</include>
<include>**/server/utils/ExecutionContextTestUtils.java</include>
<!--<include>**/server/utils/FlinkArgsUtilsTest.java</include>-->
<include>**/server/utils/ParamUtilsTest.java</include>
<include>**/server/utils/ProcessUtilsTest.java</include>
<include>**/server/utils/SparkArgsUtilsTest.java</include>
<include>**/server/worker/processor/TaskCallbackServiceTest.java</include>
<include>**/server/worker/registry/WorkerRegistryTest.java</include>
<include>**/server/worker/shell/ShellCommandExecutorTest.java</include>
<include>**/server/worker/sql/SqlExecutorTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<include>**/server/worker/task/EnvFileTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<!--<include>**/server/worker/task/datax/DataxTaskTest.java</include>-->
<!--<include>**/server/worker/task/http/HttpTaskTest.java</include>-->
<include>**/server/worker/task/sqoop/SqoopTaskTest.java</include>
<include>**/server/worker/EnvFileTest.java</include>
<include>**/service/quartz/cron/CronUtilsTest.java</include>
<include>**/service/zk/DefaultEnsembleProviderTest.java</include>
<include>**/service/zk/ZKServerTest.java</include>
<include>**/service/zk/CuratorZookeeperClientTest.java</include>
<include>**/service/queue/TaskUpdateQueueTest.java</include>
<include>**/dao/mapper/DataSourceUserMapperTest.java</include>
<!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>-->
<include>**/dao/mapper/ProcessDefinitionMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapperTest.java</include>
<include>**/dao/mapper/ProjectMapperTest.java</include>
<include>**/dao/mapper/ProjectUserMapperTest.java</include>
<include>**/dao/mapper/QueueMapperTest.java</include>
<include>**/dao/mapper/ResourceUserMapperTest.java</include>
<include>**/dao/mapper/ScheduleMapperTest.java</include>
<include>**/dao/mapper/SessionMapperTest.java</include>
<include>**/dao/mapper/TaskInstanceMapperTest.java</include>
<include>**/dao/mapper/TenantMapperTest.java</include>
<include>**/dao/mapper/UdfFuncMapperTest.java</include>
<include>**/dao/mapper/UDFUserMapperTest.java</include>
<include>**/dao/mapper/UserAlertGroupMapperTest.java</include>
<include>**/dao/mapper/UserMapperTest.java</include>
<include>**/dao/utils/DagHelperTest.java</include>
<include>**/dao/AlertDaoTest.java</include>
<include>**/dao/datasource/OracleDataSourceTest.java</include>
<include>**/dao/datasource/HiveDataSourceTest.java</include>
<include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include>
<include>**/dao/upgrade/WokrerGrouopDaoTest.java</include>
<include>**/dao/upgrade/UpgradeDaoTest.java</include>
<include>**/plugin/model/AlertDataTest.java</include>
<include>**/plugin/model/AlertInfoTest.java</include>
<include>**/plugin/utils/PropertyUtilsTest.java</include>
</includes>
<!-- <skip>true</skip> -->
</configuration>
</plugin>
<!-- jenkins plugin jacoco report-->
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>${jacoco.version}</version>
<configuration>
<destFile>target/jacoco.exec</destFile>
<dataFile>target/jacoco.exec</dataFile>
</configuration>
<executions>
<execution>
<id>jacoco-initialize</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>jacoco-site</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<version>${apache.rat.version}</version>
<configuration>
<excludeSubProjects>false</excludeSubProjects>
<addDefaultLicenseMatchers>false</addDefaultLicenseMatchers>
<licenses>
<license implementation="org.apache.rat.analysis.license.SimplePatternBasedLicense">
<licenseFamilyCategory>AL20</licenseFamilyCategory>
<licenseFamilyName>Apache License, 2.0</licenseFamilyName>
<patterns>
<pattern>Licensed to the Apache Software Foundation (ASF)</pattern>
</patterns>
</license>
</licenses>
<licenseFamilies>
<licenseFamily implementation="org.apache.rat.license.SimpleLicenseFamily">
<familyName>Apache License, 2.0</familyName>
</licenseFamily>
</licenseFamilies>
<excludes>
<exclude>**/node_modules/**</exclude>
<exclude>**/node/**</exclude>
<exclude>**/dist/**</exclude>
<exclude>**/licenses/**</exclude>
<exclude>.github/**</exclude>
<exclude>**/sql/soft_version</exclude>
<exclude>**/common/utils/ScriptRunner.java</exclude>
<exclude>**/*.json</exclude>
<!-- document files -->
<exclude>**/*.md</exclude>
<excldue>**/*.MD</excldue>
<exclude>**/*.txt</exclude>
<exclude>**/docs/**</exclude>
<exclude>**/*.babelrc</exclude>
<exclude>**/*.eslintrc</exclude>
<exclude>**/.mvn/jvm.config</exclude>
<exclude>**/.mvn/wrapper/**</exclude>
</excludes>
<consoleOutput>true</consoleOutput>
</configuration>
</plugin>
<plugin>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-maven-plugin</artifactId>
<version>${spotbugs.version}</version>
<configuration>
<xmlOutput>true</xmlOutput>
<threshold>medium</threshold>
<effort>default</effort>
<excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile>
<failOnError>true</failOnError>
</configuration>
<dependencies>
<dependency>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs</artifactId>
<version>4.0.0-beta4</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>${checkstyle.version}</version>
<dependencies>
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>8.18</version>
</dependency>
</dependencies>
<configuration>
<consoleOutput>true</consoleOutput>
<encoding>UTF-8</encoding>
<configLocation>style/checkstyle.xml</configLocation>
<suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation>
<suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
<failOnViolation>true</failOnViolation>
<violationSeverity>warning</violationSeverity>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<sourceDirectories>
<sourceDirectory>${project.build.sourceDirectory}</sourceDirectory>
</sourceDirectories>
<excludes>**\/generated-sources\/</excludes>
<skip>true</skip>
</configuration>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>cobertura-maven-plugin</artifactId>
<version>${cobertura-maven-plugin.version}</version>
<configuration>
<check>
</check>
<aggregate>true</aggregate>
<outputDirectory>./target/cobertura</outputDirectory>
<encoding>${project.build.sourceEncoding}</encoding>
<quiet>true</quiet>
<format>xml</format>
<instrumentation>
<ignoreTrivial>true</ignoreTrivial>
</instrumentation>
</configuration>
</plugin>
</plugins>
</build>
<modules>
<module>dolphinscheduler-ui</module>
<module>dolphinscheduler-server</module>
<module>dolphinscheduler-common</module>
<module>dolphinscheduler-api</module>
<module>dolphinscheduler-dao</module>
<module>dolphinscheduler-alert</module>
<module>dolphinscheduler-dist</module>
<module>dolphinscheduler-remote</module>
<module>dolphinscheduler-service</module>
<module>dolphinscheduler-plugin-api</module>
<module>dolphinscheduler-microbench</module>
</modules>
</project>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,058 | [BUG] 1.3.0 branch The task running order in the process instance does not follow the topological order in the process definition | 1. process definition
![1593309494(1)](https://user-images.githubusercontent.com/9334358/85935770-d9ab4f00-b926-11ea-9f69-52a9e11ad699.jpg)
2. task instance order in database
![72423ea1aafbee8b832e4798046dff7](https://user-images.githubusercontent.com/9334358/85935784-fa73a480-b926-11ea-93a2-aa89f2718f29.png)
3. task instance order in web ui
![image](https://user-images.githubusercontent.com/9334358/85935791-0b241a80-b927-11ea-9288-a61e66d0486b.png)
4.task instance order in process instance
![image](https://user-images.githubusercontent.com/9334358/85935795-1aa36380-b927-11ea-9532-99462048e24c.png)
The task running order in the process instance does not follow the topological order in the process definition, Why?
| https://github.com/apache/dolphinscheduler/issues/3058 | https://github.com/apache/dolphinscheduler/pull/3464 | 46f058698170fff573bbd02af74762ded639769c | e2553e33d78f438f8519f6e3dce1f555b002db65 | "2020-06-28T02:07:54Z" | java | "2020-08-11T10:51:12Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/jsPlumbHandle.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import 'jquery-ui/ui/widgets/draggable'
import 'jquery-ui/ui/widgets/droppable'
import 'jquery-ui/ui/widgets/resizable'
import Vue from 'vue'
import _ from 'lodash'
import i18n from '@/module/i18n'
import { jsPlumb } from 'jsplumb'
import DragZoom from './dragZoom'
import store from '@/conf/home/store'
import router from '@/conf/home/router'
import { uuid, findComponentDownward } from '@/module/util/'
import {
tasksAll,
rtTasksTpl,
setSvgColor,
saveTargetarr,
rtTargetarrArr,
computeScale
} from './util'
import mStart from '@/conf/home/pages/projects/pages/definition/pages/list/_source/start'
let JSP = function () {
this.dag = {}
this.selectedElement = {}
this.config = {
// Whether to drag
isDrag: true,
// Whether to allow connection
isAttachment: false,
// Whether to drag a new node
isNewNodes: true,
// Whether to support double-click node events
isDblclick: true,
// Whether to support right-click menu events
isContextmenu: true,
// Whether to allow click events
isClick: false
}
}
/**
* dag init
*/
JSP.prototype.init = function ({ dag, instance, options }) {
// Get the dag component instance
this.dag = dag
// Get jsplumb instance
this.JspInstance = instance
// Get JSP options
this.options = options || {}
// Register jsplumb connection type and configuration
this.JspInstance.registerConnectionType('basic', {
anchor: 'Continuous',
connector: 'Bezier' // Line type
})
// Initial configuration
this.setConfig({
isDrag: !store.state.dag.isDetails,
isAttachment: false,
isNewNodes: !store.state.dag.isDetails, // Permissions.getAuth() === false ? false : !store.state.dag.isDetails,
isDblclick: true,
isContextmenu: true,
isClick: false
})
// Monitor line click
this.JspInstance.bind('click', e => {
if (this.config.isClick) {
this.connectClick(e)
}
})
// Drag and drop
if (this.config.isNewNodes) {
DragZoom.init()
}
}
/**
* set config attribute
*/
JSP.prototype.setConfig = function (o) {
this.config = Object.assign(this.config, {}, o)
}
/**
* Node binding event
*/
JSP.prototype.tasksEvent = function (selfId) {
const tasks = $(`#${selfId}`)
// Bind right event
tasks.on('contextmenu', e => {
this.tasksContextmenu(e)
return false
})
// Binding double click event
tasks.find('.icos').bind('dblclick', e => {
this.tasksDblclick(e)
})
// Binding click event
tasks.on('click', e => {
this.tasksClick(e)
})
}
/**
* Dag node drag and drop processing
*/
JSP.prototype.draggable = function () {
if (this.config.isNewNodes) {
let selfId
const self = this
$('.toolbar-btn .roundedRect').draggable({
scope: 'plant',
helper: 'clone',
containment: $('.dag-model'),
stop: function (e, ui) {
},
drag: function () {
$('body').find('.tooltip.fade.top.in').remove()
}
})
$('#canvas').droppable({
scope: 'plant',
drop: function (ev, ui) {
let id = 'tasks-' + Math.ceil(Math.random() * 100000) // eslint-disable-line
let scale = computeScale($(this))
scale = scale || 1
// Get mouse coordinates and after scale coordinate
const left = parseInt(ui.offset.left - $(this).offset().left) / scale
const top = parseInt(ui.offset.top - $(this).offset().top) / scale
// Generate template node
$('#canvas').append(rtTasksTpl({
id: id,
name: id,
x: left,
y: top,
isAttachment: self.config.isAttachment,
taskType: findComponentDownward(self.dag.$root, 'dag-chart').dagBarId
}))
// Get the generated node
const thisDom = jsPlumb.getSelector('.statemachine-demo .w')
// Generating a connection node
self.JspInstance.batch(() => {
self.initNode(thisDom[thisDom.length - 1])
})
selfId = id
self.tasksEvent(selfId)
// Dom structure is not generated without pop-up form form
if ($(`#${selfId}`).html()) {
// dag event
findComponentDownward(self.dag.$root, 'dag-chart')._createNodes({
id: selfId
})
}
}
})
}
}
/**
* Echo json processing and old data structure processing
*/
JSP.prototype.jsonHandle = function ({ largeJson, locations }) {
_.map(largeJson, v => {
// Generate template
$('#canvas').append(rtTasksTpl({
id: v.id,
name: v.name,
x: locations[v.id].x,
y: locations[v.id].y,
targetarr: locations[v.id].targetarr,
isAttachment: this.config.isAttachment,
taskType: v.type,
runFlag: v.runFlag,
nodenumber: locations[v.id].nodenumber,
successNode: v.conditionResult === undefined? '' : v.conditionResult.successNode[0],
failedNode: v.conditionResult === undefined? '' : v.conditionResult.failedNode[0]
}))
// contextmenu event
$(`#${v.id}`).on('contextmenu', e => {
this.tasksContextmenu(e)
return false
})
// dblclick event
$(`#${v.id}`).find('.icos').bind('dblclick', e => {
this.tasksDblclick(e)
})
// click event
$(`#${v.id}`).bind('click', e => {
this.tasksClick(e)
})
})
}
/**
* Initialize a single node
*/
JSP.prototype.initNode = function (el) {
// Whether to drag
if (this.config.isDrag) {
this.JspInstance.draggable(el, {
containment: 'dag-container'
})
}
// Node attribute configuration
this.JspInstance.makeSource(el, {
filter: '.ep',
anchor: 'Continuous',
connectorStyle: {
stroke: '#2d8cf0',
strokeWidth: 2,
outlineStroke: 'transparent',
outlineWidth: 4
},
// This place is leaking
// connectionType: "basic",
extract: {
action: 'the-action'
},
maxConnections: -1
})
// Node connection property configuration
this.JspInstance.makeTarget(el, {
dropOptions: { hoverClass: 'dragHover' },
anchor: 'Continuous',
allowLoopback: false // Forbid yourself to connect yourself
})
this.JspInstance.fire('jsPlumbDemoNodeAdded', el)
}
/**
* Node right click menu
*/
JSP.prototype.tasksContextmenu = function (event) {
if (this.config.isContextmenu) {
const routerName = router.history.current.name
// state
const isOne = routerName === 'projects-definition-details' && this.dag.releaseState !== 'NOT_RELEASE'
// hide
const isTwo = store.state.dag.isDetails
const html = [
`<a href="javascript:" id="startRunning" class="${isOne ? '' : 'disbled'}"><em class="ans-icon-play"></em><span>${i18n.$t('Start')}</span></a>`,
`<a href="javascript:" id="editNodes" class="${isTwo ? 'disbled' : ''}"><em class="ans-icon-edit"></em><span>${i18n.$t('Edit')}</span></a>`,
`<a href="javascript:" id="copyNodes" class="${isTwo ? 'disbled' : ''}"><em class="ans-icon-copy"></em><span>${i18n.$t('Copy')}</span></a>`,
`<a href="javascript:" id="removeNodes" class="${isTwo ? 'disbled' : ''}"><em class="ans-icon-trash"></em><span>${i18n.$t('Delete')}</span></a>`
]
const operationHtml = () => {
return html.splice(',')
}
const e = event
const $id = e.currentTarget.id
const $contextmenu = $('#contextmenu')
const $name = $(`#${$id}`).find('.name-p').text()
const $left = e.pageX + document.body.scrollLeft - 5
const $top = e.pageY + document.body.scrollTop - 5
$contextmenu.css({
left: $left,
top: $top,
visibility: 'visible'
})
// Action bar
$contextmenu.html('').append(operationHtml)
if (isOne) {
// start run
$('#startRunning').on('click', () => {
const name = store.state.dag.name
const id = router.history.current.params.id
store.dispatch('dag/getStartCheck', { processDefinitionId: id }).then(res => {
const modal = Vue.$modal.dialog({
closable: false,
showMask: true,
escClose: true,
className: 'v-modal-custom',
transitionName: 'opacityp',
render (h) {
return h(mStart, {
on: {
onUpdate () {
modal.remove()
},
close () {
modal.remove()
}
},
props: {
item: {
id: id,
name: name
},
startNodeList: $name,
sourceType: 'contextmenu'
}
})
}
})
}).catch(e => {
Vue.$message.error(e.msg || '')
})
})
}
if (!isTwo) {
// edit node
$('#editNodes').click(ev => {
findComponentDownward(this.dag.$root, 'dag-chart')._createNodes({
id: $id,
type: $(`#${$id}`).attr('data-tasks-type')
})
})
// delete node
$('#removeNodes').click(ev => {
this.removeNodes($id)
})
// copy node
$('#copyNodes').click(res => {
this.copyNodes($id)
})
}
}
}
/**
* Node double click event
*/
JSP.prototype.tasksDblclick = function (e) {
// Untie event
if (this.config.isDblclick) {
const id = $(e.currentTarget.offsetParent).attr('id')
findComponentDownward(this.dag.$root, 'dag-chart')._createNodes({
id: id,
type: $(`#${id}`).attr('data-tasks-type')
})
}
}
/**
* Node click event
*/
JSP.prototype.tasksClick = function (e) {
let $id
const self = this
const $body = $('body')
if (this.config.isClick) {
const $connect = this.selectedElement.connect
$('.w').removeClass('jtk-tasks-active')
$(e.currentTarget).addClass('jtk-tasks-active')
if ($connect) {
setSvgColor($connect, '#2d8cf0')
this.selectedElement.connect = null
}
this.selectedElement.id = $(e.currentTarget).attr('id')
// Unbind copy and paste events
$body.unbind('copy').unbind('paste')
// Copy binding id
$id = self.selectedElement.id
$body.bind({
copy: function () {
$id = self.selectedElement.id
},
paste: function () {
$id && self.copyNodes($id)
}
})
}
}
/**
* Remove binding events
* paste
*/
JSP.prototype.removePaste = function () {
const $body = $('body')
// Unbind copy and paste events
$body.unbind('copy').unbind('paste')
// Remove selected node parameters
this.selectedElement.id = null
// Remove node selection effect
$('.w').removeClass('jtk-tasks-active')
}
/**
* Line click event
*/
JSP.prototype.connectClick = function (e) {
// Set svg color
setSvgColor(e, '#0097e0')
const $id = this.selectedElement.id
if ($id) {
$(`#${$id}`).removeClass('jtk-tasks-active')
this.selectedElement.id = null
}
this.selectedElement.connect = e
}
/**
* toolbarEvent
* @param {Pointer}
*/
JSP.prototype.handleEventPointer = function (is) {
this.setConfig({
isClick: is,
isAttachment: false
})
}
/**
* toolbarEvent
* @param {Line}
*/
JSP.prototype.handleEventLine = function (is) {
const wDom = $('.w')
this.setConfig({
isAttachment: is
})
is ? wDom.addClass('jtk-ep') : wDom.removeClass('jtk-ep')
}
/**
* toolbarEvent
* @param {Remove}
*/
JSP.prototype.handleEventRemove = function () {
const $id = this.selectedElement.id || null
const $connect = this.selectedElement.connect || null
if ($id) {
this.removeNodes(this.selectedElement.id)
} else {
this.removeConnect($connect)
}
// Monitor whether to edit DAG
store.commit('dag/setIsEditDag', true)
}
/**
* Delete node
*/
JSP.prototype.removeNodes = function ($id) {
// Delete node processing(data-targetarr)
_.map(tasksAll(), v => {
let targetarr = v.targetarr.split(',')
if (targetarr.length) {
let newArr = _.filter(targetarr, v1 => v1 !== $id)
$(`#${v.id}`).attr('data-targetarr', newArr.toString())
}
})
// delete node
this.JspInstance.remove($id)
// delete dom
$(`#${$id}`).remove()
// callback onRemoveNodes event
this.options && this.options.onRemoveNodes && this.options.onRemoveNodes($id)
let connects = []
_.map(this.JspInstance.getConnections(), v => {
connects.push({
endPointSourceId: v.sourceId,
endPointTargetId: v.targetId
})
})
// Storage line dependence
store.commit('dag/setConnects', connects)
}
/**
* Delete connection
*/
JSP.prototype.removeConnect = function ($connect) {
if (!$connect) {
return
}
// Remove connections and remove node and node dependencies
let targetId = $connect.targetId
let sourceId = $connect.sourceId
let targetarr = rtTargetarrArr(targetId)
if (targetarr.length) {
targetarr = _.filter(targetarr, v => v !== sourceId)
$(`#${targetId}`).attr('data-targetarr', targetarr.toString())
}
if ($(`#${sourceId}`).attr('data-tasks-type') === 'CONDITIONS') {
$(`#${sourceId}`).attr('data-nodenumber', Number($(`#${sourceId}`).attr('data-nodenumber')) - 1)
}
this.JspInstance.deleteConnection($connect)
this.selectedElement = {}
}
/**
* Copy node
*/
JSP.prototype.copyNodes = function ($id) {
let newNodeInfo = _.cloneDeep(_.find(store.state.dag.tasks, v => v.id === $id))
const newNodePors = store.state.dag.locations[$id]
// Unstored nodes do not allow replication
if (!newNodePors) {
return
}
// Generate random id
const newUuId = `${uuid() + uuid()}`
const id = newNodeInfo.id.length > 8 ? newNodeInfo.id.substr(0, 7) : newNodeInfo.id
const name = newNodeInfo.name.length > 8 ? newNodeInfo.name.substr(0, 7) : newNodeInfo.name
// new id
const newId = `${id || ''}-${newUuId}`
// new name
const newName = `${name || ''}-${newUuId}`
// coordinate x
const newX = newNodePors.x + 100
// coordinate y
const newY = newNodePors.y + 40
// Generate template node
$('#canvas').append(rtTasksTpl({
id: newId,
name: newName,
x: newX,
y: newY,
isAttachment: this.config.isAttachment,
taskType: newNodeInfo.type
}))
// Get the generated node
const thisDom = jsPlumb.getSelector('.statemachine-demo .w')
// Copy node information
newNodeInfo = Object.assign(newNodeInfo, {
id: newId,
name: newName
})
// Add new node
store.commit('dag/addTasks', newNodeInfo)
// Add node location information
store.commit('dag/addLocations', {
[newId]: {
name: newName,
targetarr: '',
nodenumber: 0,
x: newX,
y: newY
}
})
// Generating a connection node
this.JspInstance.batch(() => {
this.initNode(thisDom[thisDom.length - 1])
// Add events to nodes
this.tasksEvent(newId)
})
}
/**
* toolbarEvent
* @param {Screen}
*/
JSP.prototype.handleEventScreen = function ({ item, is }) {
let screenOpen = true
if (is) {
item.icon = 'ans-icon-min'
screenOpen = true
} else {
item.icon = 'ans-icon-max'
screenOpen = false
}
const $mainLayoutModel = $('.main-layout-model')
if (screenOpen) {
$mainLayoutModel.addClass('dag-screen')
} else {
$mainLayoutModel.removeClass('dag-screen')
}
}
/**
* save task
* @param tasks
* @param locations
* @param connects
*/
JSP.prototype.saveStore = function () {
return new Promise((resolve, reject) => {
const connects = []
const locations = {}
const tasks = []
const is = (id) => {
return !!_.filter(tasksAll(), v => v.id === id).length
}
// task
_.map(_.cloneDeep(store.state.dag.tasks), v => {
if (is(v.id)) {
let preTasks = []
let id = $(`#${v.id}`)
let tar = id.attr('data-targetarr')
let idDep = tar ? id.attr('data-targetarr').split(',') : []
if (idDep.length) {
_.map(idDep, v1 => {
preTasks.push($(`#${v1}`).find('.name-p').text())
})
}
let tasksParam = _.assign(v, {
preTasks: preTasks
})
// Sub-workflow has no retries and interval
if (v.type === 'SUB_PROCESS') {
tasksParam = _.omit(tasksParam, ['maxRetryTimes', 'retryInterval'])
}
tasks.push(tasksParam)
}
})
_.map(this.JspInstance.getConnections(), v => {
connects.push({
endPointSourceId: v.sourceId,
endPointTargetId: v.targetId
})
})
_.map(tasksAll(), v => {
locations[v.id] = {
name: v.name,
targetarr: v.targetarr,
nodenumber: v.nodenumber,
x: v.x,
y: v.y
}
})
// Storage node
store.commit('dag/setTasks', tasks)
// Store coordinate information
store.commit('dag/setLocations', locations)
// Storage line dependence
store.commit('dag/setConnects', connects)
resolve({
connects: connects,
tasks: tasks,
locations: locations
})
})
}
/**
* Event processing
*/
JSP.prototype.handleEvent = function () {
this.JspInstance.bind('beforeDrop', function (info) {
console.log(info)
let sourceId = info['sourceId']// 出
let targetId = info['targetId']// 入
console.log(sourceId,targetId)
/**
* When connecting, connection is prohibited when the sourceId and target nodes are empty
*/
if(!sourceId && !targetId) {
Vue.$message.warning(`${i18n.$t('This canvas is abnormal and the node connection cannot be made. Please save or exit the current workflow')}`)
return false
}
/**
* Recursive search for nodes
*/
let recursiveVal
const recursiveTargetarr = (arr, targetId) => {
for (let i in arr) {
if (arr[i] === targetId) {
recursiveVal = targetId
} else {
recursiveTargetarr(rtTargetarrArr(arr[i]), targetId)
}
}
return recursiveVal
}
// Connection to connected nodes is not allowed
if (_.findIndex(rtTargetarrArr(targetId), v => v === sourceId) !== -1) {
return false
}
// Recursive form to find if the target Targetarr has a sourceId
if (recursiveTargetarr(rtTargetarrArr(sourceId), targetId)) {
return false
}
if ($(`#${sourceId}`).attr('data-tasks-type') === 'CONDITIONS' && $(`#${sourceId}`).attr('data-nodenumber') === 2) {
return false
} else {
$(`#${sourceId}`).attr('data-nodenumber', Number($(`#${sourceId}`).attr('data-nodenumber')) + 1)
}
// Storage node dependency information
saveTargetarr(sourceId, targetId)
// Monitor whether to edit DAG
store.commit('dag/setIsEditDag', true)
return true
})
}
/**
* Backfill data processing
*/
JSP.prototype.jspBackfill = function ({ connects, locations, largeJson }) {
// Backfill nodes
this.jsonHandle({
largeJson: largeJson,
locations: locations
})
const wNodes = jsPlumb.getSelector('.statemachine-demo .w')
// Backfill line
this.JspInstance.batch(() => {
for (let i = 0; i < wNodes.length; i++) {
this.initNode(wNodes[i])
}
_.map(connects, v => {
let sourceId = v.endPointSourceId.split('-')
let targetId = v.endPointTargetId.split('-')
if (sourceId.length === 4 && targetId.length === 4) {
sourceId = `${sourceId[0]}-${sourceId[1]}-${sourceId[2]}`
targetId = `${targetId[0]}-${targetId[1]}-${targetId[2]}`
} else {
sourceId = v.endPointSourceId
targetId = v.endPointTargetId
}
if($(`#${sourceId}`).attr('data-tasks-type') === 'CONDITIONS' && $(`#${sourceId}`).attr('data-successnode') === $(`#${targetId}`).find('.name-p').text()) {
this.JspInstance.connect({
source: sourceId,
target: targetId,
type: 'basic',
paintStyle: { strokeWidth: 2, stroke: '#4caf50' },
HoverPaintStyle: {stroke: '#ccc', strokeWidth: 3},
overlays:[["Label", { label: i18n.$t('success'), location:0.5, id:"label"} ]]
})
} else if($(`#${sourceId}`).attr('data-tasks-type') === 'CONDITIONS' && $(`#${sourceId}`).attr('data-failednode') === $(`#${targetId}`).find('.name-p').text()) {
this.JspInstance.connect({
source: sourceId,
target: targetId,
type: 'basic',
paintStyle: { strokeWidth: 2, stroke: '#252d39' },
HoverPaintStyle: {stroke: '#ccc', strokeWidth: 3},
overlays:[["Label", { label: i18n.$t('failed'), location:0.5, id:"label"} ]]
})
} else {
this.JspInstance.connect({
source: sourceId,
target: targetId,
type: 'basic',
paintStyle: { strokeWidth: 2, stroke: '#2d8cf0' },
HoverPaintStyle: {stroke: '#ccc', strokeWidth: 3}
})
}
})
})
jsPlumb.fire('jsPlumbDemoLoaded', this.JspInstance)
// Connection monitoring
this.handleEvent()
// Drag and drop new nodes
this.draggable()
}
export default new JSP() |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,058 | [BUG] 1.3.0 branch The task running order in the process instance does not follow the topological order in the process definition | 1. process definition
![1593309494(1)](https://user-images.githubusercontent.com/9334358/85935770-d9ab4f00-b926-11ea-9f69-52a9e11ad699.jpg)
2. task instance order in database
![72423ea1aafbee8b832e4798046dff7](https://user-images.githubusercontent.com/9334358/85935784-fa73a480-b926-11ea-93a2-aa89f2718f29.png)
3. task instance order in web ui
![image](https://user-images.githubusercontent.com/9334358/85935791-0b241a80-b927-11ea-9288-a61e66d0486b.png)
4.task instance order in process instance
![image](https://user-images.githubusercontent.com/9334358/85935795-1aa36380-b927-11ea-9532-99462048e24c.png)
The task running order in the process instance does not follow the topological order in the process definition, Why?
| https://github.com/apache/dolphinscheduler/issues/3058 | https://github.com/apache/dolphinscheduler/pull/3464 | 46f058698170fff573bbd02af74762ded639769c | e2553e33d78f438f8519f6e3dce1f555b002db65 | "2020-06-28T02:07:54Z" | java | "2020-08-11T10:51:12Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/plugIn/util.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import _ from 'lodash'
import i18n from '@/module/i18n'
import store from '@/conf/home/store'
/**
* Node, to array
*/
const rtTargetarrArr = (id) => {
let ids = $(`#${id}`).attr('data-targetarr')
return ids ? ids.split(',') : []
}
/**
* Store node id to targetarr
*/
const saveTargetarr = (valId, domId) => {
let $target = $(`#${domId}`)
let targetStr = $target.attr('data-targetarr') ? $target.attr('data-targetarr') + `,${valId}` : `${valId}`;
$target.attr('data-targetarr', targetStr)
console.log(targetStr)
}
const rtBantpl = () => {
return `<em class="ans-icon-forbidden" data-toggle="tooltip" data-html="true" data-container="body" data-placement="left" title="${i18n.$t('Prohibition execution')}"></em>`
}
/**
* return node html
*/
const rtTasksTpl = ({ id, name, x, y, targetarr, isAttachment, taskType, runFlag, nodenumber, successNode, failedNode }) => {
let tpl = ''
tpl += `<div class="w jtk-draggable jtk-droppable jtk-endpoint-anchor jtk-connected ${isAttachment ? 'jtk-ep' : ''}" data-targetarr="${targetarr || ''}" data-successNode="${successNode || ''}" data-failedNode="${failedNode || ''}" data-nodenumber="${nodenumber || 0}" data-tasks-type="${taskType}" id="${id}" style="left: ${x}px; top: ${y}px;">`
tpl += '<div>'
tpl += '<div class="state-p"></div>'
tpl += `<div class="icos icos-${taskType}"></div>`
tpl += `<span class="name-p">${name}</span>`
tpl += '</div>'
tpl += '<div class="ep"></div>'
tpl += '<div class="ban-p">'
if (runFlag === 'FORBIDDEN') {
tpl += rtBantpl()
}
tpl += '</div>'
tpl += '</div>'
return tpl
}
/**
* Get all tasks nodes
*/
const tasksAll = () => {
let a = []
$('#canvas .w').each(function (idx, elem) {
let e = $(elem)
a.push({
id: e.attr('id'),
name: e.find('.name-p').text(),
targetarr: e.attr('data-targetarr') || '',
nodenumber: e.attr('data-nodenumber'),
x: parseInt(e.css('left'), 10),
y: parseInt(e.css('top'), 10)
})
})
return a
}
/**
* Determine if name is in the current dag map
* rely dom / backfill
*/
const isNameExDag = (name, rely) => {
if (rely === 'dom') {
return _.findIndex(tasksAll(), v => v.name === name) !== -1
} else {
return _.findIndex(store.state.dag.tasks, v => v.name === name) !== -1
}
}
/**
* Change svg line color
*/
const setSvgColor = (e, color) => {
// Traverse clear all colors
$('.jtk-connector').each((i, o) => {
_.map($(o)[0].childNodes, v => {
$(v).attr('fill', '#2d8cf0').attr('stroke', '#2d8cf0').attr('stroke-width', 2)
})
})
// Add color to the selection
_.map($(e.canvas)[0].childNodes, (v, i) => {
$(v).attr('fill', color).attr('stroke', color)
if ($(v).attr('class')) {
$(v).attr('stroke-width', 2)
}
})
}
/**
* Get all node ids
*/
const allNodesId = () => {
let idArr = []
$('.w').each((i, o) => {
let $obj = $(o)
let $span = $obj.find('.name-p').text()
if ($span) {
idArr.push({
id: $obj.attr('id'),
name: $span
})
}
})
return idArr
}
/**
* compute scale,because it cant get from jquery directly
* @param el element
* @returns {boolean|number}
*/
const computeScale = function (el) {
const matrix = el.css('transform')
if (!matrix || matrix === 'none') {
return false
}
const values = matrix.split('(')[1].split(')')[0].split(',')
return Math.sqrt(values[0] * values[0] + values[1] * values[1])
}
export {
rtTargetarrArr,
saveTargetarr,
rtTasksTpl,
tasksAll,
isNameExDag,
setSvgColor,
allNodesId,
rtBantpl,
computeScale
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,466 | [Bug][Worker] ZooKeeper path must not end with / character | **Describe the bug**
worker registry path end with / character, so it throws exeption
**To Reproduce**
**Expected behavior**
**Screenshots**
![image](https://user-images.githubusercontent.com/10829956/89892178-de8f3e00-dc08-11ea-9e60-20eaf7dd2273.png)
**Which version of Dolphin Scheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/3466 | https://github.com/apache/dolphinscheduler/pull/3470 | d6a32ac65225fa4f8d9a80c09c5ea8a29e392dac | a7a1156ff1002031c87c29bd737d8f9f0488cea1 | "2020-08-11T11:49:59Z" | java | "2020-08-12T09:16:43Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/registry/WorkerRegistry.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.registry;
import java.util.Date;
import java.util.Set;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.annotation.PostConstruct;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.state.ConnectionState;
import org.apache.curator.framework.state.ConnectionStateListener;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.NetUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.remote.utils.NamedThreadFactory;
import org.apache.dolphinscheduler.server.registry.HeartBeatTask;
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.google.common.collect.Sets;
import static org.apache.dolphinscheduler.common.Constants.*;
/**
* worker registry
*/
@Service
public class WorkerRegistry {
private final Logger logger = LoggerFactory.getLogger(WorkerRegistry.class);
/**
* zookeeper registry center
*/
@Autowired
private ZookeeperRegistryCenter zookeeperRegistryCenter;
/**
* worker config
*/
@Autowired
private WorkerConfig workerConfig;
/**
* heartbeat executor
*/
private ScheduledExecutorService heartBeatExecutor;
/**
* worker start time
*/
private String startTime;
private Set<String> workerGroups;
@PostConstruct
public void init() {
this.workerGroups = workerConfig.getWorkerGroups();
this.startTime = DateUtils.dateToString(new Date());
this.heartBeatExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("HeartBeatExecutor"));
}
/**
* registry
*/
public void registry() {
String address = NetUtils.getHost();
Set<String> workerZkPaths = getWorkerZkPaths();
int workerHeartbeatInterval = workerConfig.getWorkerHeartbeatInterval();
for (String workerZKPath : workerZkPaths) {
zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(workerZKPath, "");
zookeeperRegistryCenter.getZookeeperCachedOperator().getZkClient().getConnectionStateListenable().addListener(new ConnectionStateListener() {
@Override
public void stateChanged(CuratorFramework client, ConnectionState newState) {
if (newState == ConnectionState.LOST) {
logger.error("worker : {} connection lost from zookeeper", address);
} else if (newState == ConnectionState.RECONNECTED) {
logger.info("worker : {} reconnected to zookeeper", address);
zookeeperRegistryCenter.getZookeeperCachedOperator().persistEphemeral(workerZKPath, "");
} else if (newState == ConnectionState.SUSPENDED) {
logger.warn("worker : {} connection SUSPENDED ", address);
}
}
});
logger.info("worker node : {} registry to ZK {} successfully", address, workerZKPath);
}
HeartBeatTask heartBeatTask = new HeartBeatTask(this.startTime,
this.workerConfig.getWorkerReservedMemory(),
this.workerConfig.getWorkerMaxCpuloadAvg(),
workerZkPaths,
this.zookeeperRegistryCenter);
this.heartBeatExecutor.scheduleAtFixedRate(heartBeatTask, workerHeartbeatInterval, workerHeartbeatInterval, TimeUnit.SECONDS);
logger.info("worker node : {} heartbeat interval {} s", address, workerHeartbeatInterval);
}
/**
* remove registry info
*/
public void unRegistry() {
String address = getLocalAddress();
Set<String> workerZkPaths = getWorkerZkPaths();
for (String workerZkPath : workerZkPaths) {
zookeeperRegistryCenter.getZookeeperCachedOperator().remove(workerZkPath);
logger.info("worker node : {} unRegistry from ZK {}.", address, workerZkPath);
}
this.heartBeatExecutor.shutdownNow();
}
/**
* get worker path
*/
private Set<String> getWorkerZkPaths() {
Set<String> workerZkPaths = Sets.newHashSet();
String address = getLocalAddress();
String workerZkPathPrefix = this.zookeeperRegistryCenter.getWorkerPath();
String weight = getWorkerWeight();
for (String workGroup : this.workerGroups) {
StringBuilder workerZkPathBuilder = new StringBuilder(100);
workerZkPathBuilder.append(workerZkPathPrefix).append(SLASH);
if (StringUtils.isEmpty(workGroup)) {
workGroup = DEFAULT_WORKER_GROUP;
}
// trim and lower case is need
workerZkPathBuilder.append(workGroup.trim().toLowerCase()).append(SLASH);
workerZkPathBuilder.append(address);
workerZkPathBuilder.append(weight).append(SLASH);
workerZkPaths.add(workerZkPathBuilder.toString());
}
return workerZkPaths;
}
/**
* get local address
*/
private String getLocalAddress() {
return NetUtils.getHost() + ":" + workerConfig.getListenPort();
}
/**
* get Worker Weight
*/
private String getWorkerWeight() {
return ":" + workerConfig.getWeight();
}
}
|