status
stringclasses 1
value | repo_name
stringclasses 31
values | repo_url
stringclasses 31
values | issue_id
int64 1
104k
| title
stringlengths 4
369
| body
stringlengths 0
254k
⌀ | issue_url
stringlengths 37
56
| pull_url
stringlengths 37
54
| before_fix_sha
stringlengths 40
40
| after_fix_sha
stringlengths 40
40
| report_datetime
unknown | language
stringclasses 5
values | commit_datetime
unknown | updated_file
stringlengths 4
188
| file_content
stringlengths 0
5.12M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,138 | [Feature][Master] dispatch workgroup error add sleep time |
If the task group cannot be found when running a task, a large number of logs will be output
![image](https://user-images.githubusercontent.com/39816903/100732974-7ad38f80-3408-11eb-88b7-89d32a242acc.png)
It is suggested to add sleep operation
When there are tasks with assignment failure and the number of tasks in the current task queue is less than 10, sleep for 1 second
-------------------
如果运行任务的时候,任务的分组找不到,会输出大量的日志.
![image](https://user-images.githubusercontent.com/39816903/100732974-7ad38f80-3408-11eb-88b7-89d32a242acc.png)
建议增加休眠操作:
当存在分配失败的任务 , 并且当前任务队列[taskPriorityQueue]中的任务数量小于10, 休眠1秒.
| https://github.com/apache/dolphinscheduler/issues/4138 | https://github.com/apache/dolphinscheduler/pull/4139 | 0039b1bfcbf822cb851898a1ceb4844fd6943731 | b3120a74d2656f7ad2054ba8245262551063b549 | "2020-12-01T11:08:26Z" | java | "2020-12-11T07:35:40Z" | dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.master.consumer;
import java.util.Date;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.master.dispatch.ExecutorDispatcher;
import org.apache.dolphinscheduler.server.master.dispatch.executor.NettyExecutorManager;
import org.apache.dolphinscheduler.server.registry.DependencyConfig;
import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager;
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
import org.apache.dolphinscheduler.server.zk.SpringZKServer;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue;
import org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import org.apache.dolphinscheduler.service.zk.ZookeeperConfig;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes={DependencyConfig.class, SpringApplicationContext.class, SpringZKServer.class, CuratorZookeeperClient.class,
NettyExecutorManager.class, ExecutorDispatcher.class, ZookeeperRegistryCenter.class, TaskPriorityQueueConsumer.class,
ZookeeperNodeManager.class, ZookeeperCachedOperator.class, ZookeeperConfig.class, MasterConfig.class,
CuratorZookeeperClient.class})
public class TaskPriorityQueueConsumerTest {
@Autowired
private TaskPriorityQueue taskPriorityQueue;
@Autowired
private TaskPriorityQueueConsumer taskPriorityQueueConsumer;
@Autowired
private ProcessService processService;
@Autowired
private ExecutorDispatcher dispatcher;
@Before
public void init(){
Tenant tenant = new Tenant();
tenant.setId(1);
tenant.setTenantCode("journey");
tenant.setDescription("journey");
tenant.setQueueId(1);
tenant.setCreateTime(new Date());
tenant.setUpdateTime(new Date());
Mockito.doReturn(tenant).when(processService).getTenantForProcess(1,2);
Mockito.doReturn("default").when(processService).queryUserQueueByProcessInstanceId(1);
}
@Test
public void testSHELLTask() throws Exception {
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(1);
taskInstance.setTaskType("SHELL");
taskInstance.setProcessDefinitionId(1);
taskInstance.setProcessInstanceId(1);
taskInstance.setState(ExecutionStatus.KILL);
taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\",\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-55201\",\"maxRetryTimes\":0,\"name\":\"测试任务\",\"params\":\"{\\\"rawScript\\\":\\\"echo \\\\\\\"测试任务\\\\\\\"\\\",\\\"localParams\\\":[],\\\"resourceList\\\":[]}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"SHELL\",\"workerGroup\":\"default\"}");
taskInstance.setProcessInstancePriority(Priority.MEDIUM);
taskInstance.setWorkerGroup("default");
taskInstance.setExecutorId(2);
ProcessInstance processInstance = new ProcessInstance();
processInstance.setTenantId(1);
processInstance.setCommandType(CommandType.START_PROCESS);
taskInstance.setProcessInstance(processInstance);
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setUserId(2);
processDefinition.setProjectId(1);
taskInstance.setProcessDefine(processDefinition);
Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1);
taskPriorityQueue.put("2_1_2_1_default");
Thread.sleep(10000);
}
@Test
public void testSQLTask() throws Exception {
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(1);
taskInstance.setTaskType("SQL");
taskInstance.setProcessDefinitionId(1);
taskInstance.setProcessInstanceId(1);
taskInstance.setState(ExecutionStatus.KILL);
taskInstance.setTaskJson("{\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-3655\",\"maxRetryTimes\":0,\"name\":\"UDF测试\",\"params\":\"{\\\"postStatements\\\":[],\\\"connParams\\\":\\\"\\\",\\\"receiversCc\\\":\\\"\\\",\\\"udfs\\\":\\\"1\\\",\\\"type\\\":\\\"HIVE\\\",\\\"title\\\":\\\"test\\\",\\\"sql\\\":\\\"select id,name,ds,zodia(ds) from t_journey_user\\\",\\\"preStatements\\\":[],\\\"sqlType\\\":0,\\\"receivers\\\":\\\"825193156@qq.com\\\",\\\"datasource\\\":3,\\\"showType\\\":\\\"TABLE\\\",\\\"localParams\\\":[]}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"SQL\"}");
taskInstance.setProcessInstancePriority(Priority.MEDIUM);
taskInstance.setWorkerGroup("default");
taskInstance.setExecutorId(2);
ProcessInstance processInstance = new ProcessInstance();
processInstance.setTenantId(1);
processInstance.setCommandType(CommandType.START_PROCESS);
taskInstance.setProcessInstance(processInstance);
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setUserId(2);
processDefinition.setProjectId(1);
taskInstance.setProcessDefine(processDefinition);
Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1);
taskPriorityQueue.put("2_1_2_1_default");
DataSource dataSource = new DataSource();
dataSource.setId(1);
dataSource.setName("sqlDatasource");
dataSource.setType(DbType.MYSQL);
dataSource.setUserId(2);
dataSource.setConnectionParams("{\"address\":\"jdbc:mysql://192.168.221.185:3306\",\"database\":\"dolphinscheduler_qiaozhanwei\",\"jdbcUrl\":\"jdbc:mysql://192.168.221.185:3306/dolphinscheduler_qiaozhanwei\",\"user\":\"root\",\"password\":\"root@123\"}");
dataSource.setCreateTime(new Date());
dataSource.setUpdateTime(new Date());
Mockito.doReturn(dataSource).when(processService).findDataSourceById(1);
Thread.sleep(10000);
}
@Test
public void testDataxTask() throws Exception {
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(1);
taskInstance.setTaskType("DATAX");
taskInstance.setProcessDefinitionId(1);
taskInstance.setProcessInstanceId(1);
taskInstance.setState(ExecutionStatus.KILL);
taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\",\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-97625\",\"maxRetryTimes\":0,\"name\":\"MySQL数据相互导入\",\"params\":\"{\\\"targetTable\\\":\\\"pv2\\\",\\\"postStatements\\\":[],\\\"jobSpeedRecord\\\":1000,\\\"customConfig\\\":0,\\\"dtType\\\":\\\"MYSQL\\\",\\\"dsType\\\":\\\"MYSQL\\\",\\\"jobSpeedByte\\\":0,\\\"dataSource\\\":80,\\\"dataTarget\\\":80,\\\"sql\\\":\\\"SELECT dt,count FROM pv\\\",\\\"preStatements\\\":[]}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"DATAX\",\"workerGroup\":\"default\"}");
taskInstance.setProcessInstancePriority(Priority.MEDIUM);
taskInstance.setWorkerGroup("default");
taskInstance.setExecutorId(2);
ProcessInstance processInstance = new ProcessInstance();
processInstance.setTenantId(1);
processInstance.setCommandType(CommandType.START_PROCESS);
taskInstance.setProcessInstance(processInstance);
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setUserId(2);
processDefinition.setProjectId(1);
taskInstance.setProcessDefine(processDefinition);
Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1);
taskPriorityQueue.put("2_1_2_1_default");
DataSource dataSource = new DataSource();
dataSource.setId(80);
dataSource.setName("datax");
dataSource.setType(DbType.MYSQL);
dataSource.setUserId(2);
dataSource.setConnectionParams("{\"address\":\"jdbc:mysql://192.168.221.185:3306\",\"database\":\"dolphinscheduler_qiaozhanwei\",\"jdbcUrl\":\"jdbc:mysql://192.168.221.185:3306/dolphinscheduler_qiaozhanwei\",\"user\":\"root\",\"password\":\"root@123\"}");
dataSource.setCreateTime(new Date());
dataSource.setUpdateTime(new Date());
Mockito.doReturn(dataSource).when(processService).findDataSourceById(80);
Thread.sleep(10000);
}
@Test
public void testSqoopTask() throws Exception {
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(1);
taskInstance.setTaskType("SQOOP");
taskInstance.setProcessDefinitionId(1);
taskInstance.setProcessInstanceId(1);
taskInstance.setState(ExecutionStatus.KILL);
taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\",\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-63634\",\"maxRetryTimes\":0,\"name\":\"MySQL数据导入HDSF\",\"params\":\"{\\\"sourceType\\\":\\\"MYSQL\\\",\\\"targetType\\\":\\\"HDFS\\\",\\\"targetParams\\\":\\\"{\\\\\\\"targetPath\\\\\\\":\\\\\\\"/test/datatest\\\\\\\",\\\\\\\"deleteTargetDir\\\\\\\":true,\\\\\\\"fileType\\\\\\\":\\\\\\\"--as-textfile\\\\\\\",\\\\\\\"compressionCodec\\\\\\\":\\\\\\\"\\\\\\\",\\\\\\\"fieldsTerminated\\\\\\\":\\\\\\\",\\\\\\\",\\\\\\\"linesTerminated\\\\\\\":\\\\\\\"\\\\\\\\\\\\\\\\n\\\\\\\"}\\\",\\\"modelType\\\":\\\"import\\\",\\\"sourceParams\\\":\\\"{\\\\\\\"srcType\\\\\\\":\\\\\\\"MYSQL\\\\\\\",\\\\\\\"srcDatasource\\\\\\\":1,\\\\\\\"srcTable\\\\\\\":\\\\\\\"t_ds_user\\\\\\\",\\\\\\\"srcQueryType\\\\\\\":\\\\\\\"0\\\\\\\",\\\\\\\"srcQuerySql\\\\\\\":\\\\\\\"\\\\\\\",\\\\\\\"srcColumnType\\\\\\\":\\\\\\\"0\\\\\\\",\\\\\\\"srcColumns\\\\\\\":\\\\\\\"\\\\\\\",\\\\\\\"srcConditionList\\\\\\\":[],\\\\\\\"mapColumnHive\\\\\\\":[],\\\\\\\"mapColumnJava\\\\\\\":[]}\\\",\\\"localParams\\\":[],\\\"concurrency\\\":1}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"SQOOP\",\"workerGroup\":\"default\"}");
taskInstance.setProcessInstancePriority(Priority.MEDIUM);
taskInstance.setWorkerGroup("default");
taskInstance.setExecutorId(2);
ProcessInstance processInstance = new ProcessInstance();
processInstance.setTenantId(1);
processInstance.setCommandType(CommandType.START_PROCESS);
taskInstance.setProcessInstance(processInstance);
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setUserId(2);
processDefinition.setProjectId(1);
taskInstance.setProcessDefine(processDefinition);
Mockito.doReturn(taskInstance).when(processService).getTaskInstanceDetailByTaskId(1);
taskPriorityQueue.put("2_1_2_1_default");
DataSource dataSource = new DataSource();
dataSource.setId(1);
dataSource.setName("datax");
dataSource.setType(DbType.MYSQL);
dataSource.setUserId(2);
dataSource.setConnectionParams("{\"address\":\"jdbc:mysql://192.168.221.185:3306\",\"database\":\"dolphinscheduler_qiaozhanwei\",\"jdbcUrl\":\"jdbc:mysql://192.168.221.185:3306/dolphinscheduler_qiaozhanwei\",\"user\":\"root\",\"password\":\"root@123\"}");
dataSource.setCreateTime(new Date());
dataSource.setUpdateTime(new Date());
Mockito.doReturn(dataSource).when(processService).findDataSourceById(1);
Thread.sleep(10000);
}
@Test
public void testTaskInstanceIsFinalState(){
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(1);
taskInstance.setTaskType("SHELL");
taskInstance.setProcessDefinitionId(1);
taskInstance.setProcessInstanceId(1);
taskInstance.setState(ExecutionStatus.KILL);
taskInstance.setTaskJson("{\"conditionResult\":\"{\\\"successNode\\\":[\\\"\\\"],\\\"failedNode\\\":[\\\"\\\"]}\",\"conditionsTask\":false,\"depList\":[],\"dependence\":\"{}\",\"forbidden\":false,\"id\":\"tasks-55201\",\"maxRetryTimes\":0,\"name\":\"测试任务\",\"params\":\"{\\\"rawScript\\\":\\\"echo \\\\\\\"测试任务\\\\\\\"\\\",\\\"localParams\\\":[],\\\"resourceList\\\":[]}\",\"preTasks\":\"[]\",\"retryInterval\":1,\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\",\"taskTimeoutParameter\":{\"enable\":false,\"interval\":0},\"timeout\":\"{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}\",\"type\":\"SHELL\",\"workerGroup\":\"default\"}");
taskInstance.setProcessInstancePriority(Priority.MEDIUM);
taskInstance.setWorkerGroup("default");
taskInstance.setExecutorId(2);
Mockito.doReturn(taskInstance).when(processService).findTaskInstanceById(1);
taskPriorityQueueConsumer.taskInstanceIsFinalState(1);
}
@After
public void close() {
Stopper.stop();
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,172 | TaskKillProcessor throw NullPointException | 1.when worker runing more than 100 tasks , there are some tasks in queue.
with that ,master send a kill commad to this worker,TaskKillProcessor receive ,and get processId in cache(TaskExecutionContext).
But the taskInstance isn't running in this worker ,it is in queue.so the TaskKillProcessor throw NullPointException.
2.This exception will be catched in this method.The TaskKillProcessor will continue run to build command .In method "buildKillTaskResponseCommand" , get TaskInstanceId in cache(TaskExecutionContext),but this cache is null,so the command send to master with a null taskInstanceId. | https://github.com/apache/dolphinscheduler/issues/4172 | https://github.com/apache/dolphinscheduler/pull/4182 | b3120a74d2656f7ad2054ba8245262551063b549 | a13e737eb472b21b6b4c596ede9afc80fa3beb06 | "2020-12-07T08:19:05Z" | java | "2020-12-11T07:41:35Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskExecutionContext.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.entity;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.TaskExecuteRequestCommand;
import java.io.Serializable;
import java.util.Date;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonFormat;
/**
* master/worker task transport
*/
public class TaskExecutionContext implements Serializable {
/**
* task id
*/
private int taskInstanceId;
/**
* task name
*/
private String taskName;
/**
* task first submit time.
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date firstSubmitTime;
/**
* task start time
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date startTime;
/**
* task type
*/
private String taskType;
/**
* host
*/
private String host;
/**
* task execute path
*/
private String executePath;
/**
* log path
*/
private String logPath;
/**
* task json
*/
private String taskJson;
/**
* processId
*/
private int processId;
/**
* appIds
*/
private String appIds;
/**
* process instance id
*/
private int processInstanceId;
/**
* process instance schedule time
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date scheduleTime;
/**
* process instance global parameters
*/
private String globalParams;
/**
* execute user id
*/
private int executorId;
/**
* command type if complement
*/
private int cmdTypeIfComplement;
/**
* tenant code
*/
private String tenantCode;
/**
* task queue
*/
private String queue;
/**
* process define id
*/
private int processDefineId;
/**
* project id
*/
private int projectId;
/**
* taskParams
*/
private String taskParams;
/**
* envFile
*/
private String envFile;
/**
* definedParams
*/
private Map<String, String> definedParams;
/**
* task AppId
*/
private String taskAppId;
/**
* task timeout strategy
*/
private int taskTimeoutStrategy;
/**
* task timeout
*/
private int taskTimeout;
/**
* worker group
*/
private String workerGroup;
/**
* delay execution time.
*/
private int delayTime;
/**
* current execution status
*/
private ExecutionStatus currentExecutionStatus;
/**
* resources full name and tenant code
*/
private Map<String, String> resources;
/**
* sql TaskExecutionContext
*/
private SQLTaskExecutionContext sqlTaskExecutionContext;
/**
* datax TaskExecutionContext
*/
private DataxTaskExecutionContext dataxTaskExecutionContext;
/**
* dependence TaskExecutionContext
*/
private DependenceTaskExecutionContext dependenceTaskExecutionContext;
/**
* sqoop TaskExecutionContext
*/
private SqoopTaskExecutionContext sqoopTaskExecutionContext;
/**
* procedure TaskExecutionContext
*/
private ProcedureTaskExecutionContext procedureTaskExecutionContext;
public int getTaskInstanceId() {
return taskInstanceId;
}
public void setTaskInstanceId(int taskInstanceId) {
this.taskInstanceId = taskInstanceId;
}
public String getTaskName() {
return taskName;
}
public void setTaskName(String taskName) {
this.taskName = taskName;
}
public Date getFirstSubmitTime() {
return firstSubmitTime;
}
public void setFirstSubmitTime(Date firstSubmitTime) {
this.firstSubmitTime = firstSubmitTime;
}
public Date getStartTime() {
return startTime;
}
public void setStartTime(Date startTime) {
this.startTime = startTime;
}
public String getTaskType() {
return taskType;
}
public void setTaskType(String taskType) {
this.taskType = taskType;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public String getExecutePath() {
return executePath;
}
public void setExecutePath(String executePath) {
this.executePath = executePath;
}
public String getLogPath() {
return logPath;
}
public void setLogPath(String logPath) {
this.logPath = logPath;
}
public String getTaskJson() {
return taskJson;
}
public void setTaskJson(String taskJson) {
this.taskJson = taskJson;
}
public int getProcessId() {
return processId;
}
public void setProcessId(int processId) {
this.processId = processId;
}
public String getAppIds() {
return appIds;
}
public void setAppIds(String appIds) {
this.appIds = appIds;
}
public int getProcessInstanceId() {
return processInstanceId;
}
public void setProcessInstanceId(int processInstanceId) {
this.processInstanceId = processInstanceId;
}
public Date getScheduleTime() {
return scheduleTime;
}
public void setScheduleTime(Date scheduleTime) {
this.scheduleTime = scheduleTime;
}
public String getGlobalParams() {
return globalParams;
}
public void setGlobalParams(String globalParams) {
this.globalParams = globalParams;
}
public int getExecutorId() {
return executorId;
}
public void setExecutorId(int executorId) {
this.executorId = executorId;
}
public int getCmdTypeIfComplement() {
return cmdTypeIfComplement;
}
public void setCmdTypeIfComplement(int cmdTypeIfComplement) {
this.cmdTypeIfComplement = cmdTypeIfComplement;
}
public String getTenantCode() {
return tenantCode;
}
public void setTenantCode(String tenantCode) {
this.tenantCode = tenantCode;
}
public String getQueue() {
return queue;
}
public void setQueue(String queue) {
this.queue = queue;
}
public int getProcessDefineId() {
return processDefineId;
}
public void setProcessDefineId(int processDefineId) {
this.processDefineId = processDefineId;
}
public int getProjectId() {
return projectId;
}
public void setProjectId(int projectId) {
this.projectId = projectId;
}
public String getTaskParams() {
return taskParams;
}
public void setTaskParams(String taskParams) {
this.taskParams = taskParams;
}
public String getEnvFile() {
return envFile;
}
public void setEnvFile(String envFile) {
this.envFile = envFile;
}
public Map<String, String> getDefinedParams() {
return definedParams;
}
public void setDefinedParams(Map<String, String> definedParams) {
this.definedParams = definedParams;
}
public String getTaskAppId() {
return taskAppId;
}
public void setTaskAppId(String taskAppId) {
this.taskAppId = taskAppId;
}
public int getTaskTimeoutStrategy() {
return taskTimeoutStrategy;
}
public void setTaskTimeoutStrategy(int taskTimeoutStrategy) {
this.taskTimeoutStrategy = taskTimeoutStrategy;
}
public int getTaskTimeout() {
return taskTimeout;
}
public void setTaskTimeout(int taskTimeout) {
this.taskTimeout = taskTimeout;
}
public String getWorkerGroup() {
return workerGroup;
}
public void setWorkerGroup(String workerGroup) {
this.workerGroup = workerGroup;
}
public int getDelayTime() {
return delayTime;
}
public void setDelayTime(int delayTime) {
this.delayTime = delayTime;
}
public ExecutionStatus getCurrentExecutionStatus() {
return currentExecutionStatus;
}
public void setCurrentExecutionStatus(ExecutionStatus currentExecutionStatus) {
this.currentExecutionStatus = currentExecutionStatus;
}
public SQLTaskExecutionContext getSqlTaskExecutionContext() {
return sqlTaskExecutionContext;
}
public void setSqlTaskExecutionContext(SQLTaskExecutionContext sqlTaskExecutionContext) {
this.sqlTaskExecutionContext = sqlTaskExecutionContext;
}
public DataxTaskExecutionContext getDataxTaskExecutionContext() {
return dataxTaskExecutionContext;
}
public void setDataxTaskExecutionContext(DataxTaskExecutionContext dataxTaskExecutionContext) {
this.dataxTaskExecutionContext = dataxTaskExecutionContext;
}
public ProcedureTaskExecutionContext getProcedureTaskExecutionContext() {
return procedureTaskExecutionContext;
}
public void setProcedureTaskExecutionContext(ProcedureTaskExecutionContext procedureTaskExecutionContext) {
this.procedureTaskExecutionContext = procedureTaskExecutionContext;
}
public Command toCommand() {
TaskExecuteRequestCommand requestCommand = new TaskExecuteRequestCommand();
requestCommand.setTaskExecutionContext(JSONUtils.toJsonString(this));
return requestCommand.convert2Command();
}
public DependenceTaskExecutionContext getDependenceTaskExecutionContext() {
return dependenceTaskExecutionContext;
}
public void setDependenceTaskExecutionContext(DependenceTaskExecutionContext dependenceTaskExecutionContext) {
this.dependenceTaskExecutionContext = dependenceTaskExecutionContext;
}
public Map<String, String> getResources() {
return resources;
}
public void setResources(Map<String, String> resources) {
this.resources = resources;
}
public SqoopTaskExecutionContext getSqoopTaskExecutionContext() {
return sqoopTaskExecutionContext;
}
public void setSqoopTaskExecutionContext(SqoopTaskExecutionContext sqoopTaskExecutionContext) {
this.sqoopTaskExecutionContext = sqoopTaskExecutionContext;
}
@Override
public String toString() {
return "TaskExecutionContext{"
+ "taskInstanceId=" + taskInstanceId
+ ", taskName='" + taskName + '\''
+ ", currentExecutionStatus=" + currentExecutionStatus
+ ", firstSubmitTime=" + firstSubmitTime
+ ", startTime=" + startTime
+ ", taskType='" + taskType + '\''
+ ", host='" + host + '\''
+ ", executePath='" + executePath + '\''
+ ", logPath='" + logPath + '\''
+ ", taskJson='" + taskJson + '\''
+ ", processId=" + processId
+ ", appIds='" + appIds + '\''
+ ", processInstanceId=" + processInstanceId
+ ", scheduleTime=" + scheduleTime
+ ", globalParams='" + globalParams + '\''
+ ", executorId=" + executorId
+ ", cmdTypeIfComplement=" + cmdTypeIfComplement
+ ", tenantCode='" + tenantCode + '\''
+ ", queue='" + queue + '\''
+ ", processDefineId=" + processDefineId
+ ", projectId=" + projectId
+ ", taskParams='" + taskParams + '\''
+ ", envFile='" + envFile + '\''
+ ", definedParams=" + definedParams
+ ", taskAppId='" + taskAppId + '\''
+ ", taskTimeoutStrategy=" + taskTimeoutStrategy
+ ", taskTimeout=" + taskTimeout
+ ", workerGroup='" + workerGroup + '\''
+ ", delayTime=" + delayTime
+ ", resources=" + resources
+ ", sqlTaskExecutionContext=" + sqlTaskExecutionContext
+ ", dataxTaskExecutionContext=" + dataxTaskExecutionContext
+ ", dependenceTaskExecutionContext=" + dependenceTaskExecutionContext
+ ", sqoopTaskExecutionContext=" + sqoopTaskExecutionContext
+ ", procedureTaskExecutionContext=" + procedureTaskExecutionContext
+ '}';
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,172 | TaskKillProcessor throw NullPointException | 1.when worker runing more than 100 tasks , there are some tasks in queue.
with that ,master send a kill commad to this worker,TaskKillProcessor receive ,and get processId in cache(TaskExecutionContext).
But the taskInstance isn't running in this worker ,it is in queue.so the TaskKillProcessor throw NullPointException.
2.This exception will be catched in this method.The TaskKillProcessor will continue run to build command .In method "buildKillTaskResponseCommand" , get TaskInstanceId in cache(TaskExecutionContext),but this cache is null,so the command send to master with a null taskInstanceId. | https://github.com/apache/dolphinscheduler/issues/4172 | https://github.com/apache/dolphinscheduler/pull/4182 | b3120a74d2656f7ad2054ba8245262551063b549 | a13e737eb472b21b6b4c596ede9afc80fa3beb06 | "2020-12-07T08:19:05Z" | java | "2020-12-11T07:41:35Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/TaskExecutionContextCacheManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.cache;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
/**
* TaskExecutionContextCacheManager
*/
public interface TaskExecutionContextCacheManager {
/**
* get taskInstance by taskInstance id
*
* @param taskInstanceId taskInstanceId
* @return taskInstance
*/
TaskExecutionContext getByTaskInstanceId(Integer taskInstanceId);
/**
* cache taskInstance
*
* @param taskExecutionContext taskExecutionContext
*/
void cacheTaskExecutionContext(TaskExecutionContext taskExecutionContext);
/**
* remove taskInstance by taskInstanceId
* @param taskInstanceId taskInstanceId
*/
void removeByTaskInstanceId(Integer taskInstanceId);
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,172 | TaskKillProcessor throw NullPointException | 1.when worker runing more than 100 tasks , there are some tasks in queue.
with that ,master send a kill commad to this worker,TaskKillProcessor receive ,and get processId in cache(TaskExecutionContext).
But the taskInstance isn't running in this worker ,it is in queue.so the TaskKillProcessor throw NullPointException.
2.This exception will be catched in this method.The TaskKillProcessor will continue run to build command .In method "buildKillTaskResponseCommand" , get TaskInstanceId in cache(TaskExecutionContext),but this cache is null,so the command send to master with a null taskInstanceId. | https://github.com/apache/dolphinscheduler/issues/4172 | https://github.com/apache/dolphinscheduler/pull/4182 | b3120a74d2656f7ad2054ba8245262551063b549 | a13e737eb472b21b6b4c596ede9afc80fa3beb06 | "2020-12-07T08:19:05Z" | java | "2020-12-11T07:41:35Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/cache/impl/TaskExecutionContextCacheManagerImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.cache.impl;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager;
import org.springframework.stereotype.Service;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* TaskExecutionContextCache
*/
@Service
public class TaskExecutionContextCacheManagerImpl implements TaskExecutionContextCacheManager {
/**
* taskInstance cache
*/
private Map<Integer,TaskExecutionContext> taskExecutionContextCache = new ConcurrentHashMap<>();
/**
* get taskInstance by taskInstance id
*
* @param taskInstanceId taskInstanceId
* @return taskInstance
*/
@Override
public TaskExecutionContext getByTaskInstanceId(Integer taskInstanceId) {
return taskExecutionContextCache.get(taskInstanceId);
}
/**
* cache taskInstance
*
* @param taskExecutionContext taskExecutionContext
*/
@Override
public void cacheTaskExecutionContext(TaskExecutionContext taskExecutionContext) {
taskExecutionContextCache.put(taskExecutionContext.getTaskInstanceId(),taskExecutionContext);
}
/**
* remove taskInstance by taskInstanceId
* @param taskInstanceId taskInstanceId
*/
@Override
public void removeByTaskInstanceId(Integer taskInstanceId) {
taskExecutionContextCache.remove(taskInstanceId);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,172 | TaskKillProcessor throw NullPointException | 1.when worker runing more than 100 tasks , there are some tasks in queue.
with that ,master send a kill commad to this worker,TaskKillProcessor receive ,and get processId in cache(TaskExecutionContext).
But the taskInstance isn't running in this worker ,it is in queue.so the TaskKillProcessor throw NullPointException.
2.This exception will be catched in this method.The TaskKillProcessor will continue run to build command .In method "buildKillTaskResponseCommand" , get TaskInstanceId in cache(TaskExecutionContext),but this cache is null,so the command send to master with a null taskInstanceId. | https://github.com/apache/dolphinscheduler/issues/4172 | https://github.com/apache/dolphinscheduler/pull/4182 | b3120a74d2656f7ad2054ba8245262551063b549 | a13e737eb472b21b6b4c596ede9afc80fa3beb06 | "2020-12-07T08:19:05Z" | java | "2020-12-11T07:41:35Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.processor;
import org.apache.dolphinscheduler.common.enums.Event;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.FileUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.common.utils.NetUtils;
import org.apache.dolphinscheduler.common.utils.Preconditions;
import org.apache.dolphinscheduler.common.utils.RetryerUtils;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand;
import org.apache.dolphinscheduler.remote.command.TaskExecuteRequestCommand;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.utils.LogUtils;
import org.apache.dolphinscheduler.server.worker.cache.ResponceCache;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import java.util.Date;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.netty.channel.Channel;
/**
* worker request processor
*/
public class TaskExecuteProcessor implements NettyRequestProcessor {
private final Logger logger = LoggerFactory.getLogger(TaskExecuteProcessor.class);
/**
* thread executor service
*/
private final ExecutorService workerExecService;
/**
* worker config
*/
private final WorkerConfig workerConfig;
/**
* task callback service
*/
private final TaskCallbackService taskCallbackService;
public TaskExecuteProcessor() {
this.taskCallbackService = SpringApplicationContext.getBean(TaskCallbackService.class);
this.workerConfig = SpringApplicationContext.getBean(WorkerConfig.class);
this.workerExecService = ThreadUtils.newDaemonFixedThreadExecutor("Worker-Execute-Thread", workerConfig.getWorkerExecThreads());
}
@Override
public void process(Channel channel, Command command) {
Preconditions.checkArgument(CommandType.TASK_EXECUTE_REQUEST == command.getType(),
String.format("invalid command type : %s", command.getType()));
TaskExecuteRequestCommand taskRequestCommand = JSONUtils.parseObject(
command.getBody(), TaskExecuteRequestCommand.class);
logger.info("received command : {}", taskRequestCommand);
if (taskRequestCommand == null) {
logger.error("task execute request command is null");
return;
}
String contextJson = taskRequestCommand.getTaskExecutionContext();
TaskExecutionContext taskExecutionContext = JSONUtils.parseObject(contextJson, TaskExecutionContext.class);
if (taskExecutionContext == null) {
logger.error("task execution context is null");
return;
}
// custom logger
Logger taskLogger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX,
taskExecutionContext.getProcessDefineId(),
taskExecutionContext.getProcessInstanceId(),
taskExecutionContext.getTaskInstanceId()));
taskExecutionContext.setHost(NetUtils.getHost() + ":" + workerConfig.getListenPort());
taskExecutionContext.setStartTime(new Date());
taskExecutionContext.setLogPath(LogUtils.getTaskLogPath(taskExecutionContext));
taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.RUNNING_EXECUTION);
// local execute path
String execLocalPath = getExecLocalPath(taskExecutionContext);
logger.info("task instance local execute path : {} ", execLocalPath);
FileUtils.taskLoggerThreadLocal.set(taskLogger);
try {
FileUtils.createWorkDirAndUserIfAbsent(execLocalPath, taskExecutionContext.getTenantCode());
} catch (Throwable ex) {
String errorLog = String.format("create execLocalPath : %s", execLocalPath);
LoggerUtils.logError(Optional.ofNullable(logger), errorLog, ex);
LoggerUtils.logError(Optional.ofNullable(taskLogger), errorLog, ex);
}
FileUtils.taskLoggerThreadLocal.remove();
taskCallbackService.addRemoteChannel(taskExecutionContext.getTaskInstanceId(),
new NettyRemoteChannel(channel, command.getOpaque()));
this.doAck(taskExecutionContext);
// submit task
workerExecService.submit(new TaskExecuteThread(taskExecutionContext, taskCallbackService, taskLogger));
}
private void doAck(TaskExecutionContext taskExecutionContext){
// tell master that task is in executing
TaskExecuteAckCommand ackCommand = buildAckCommand(taskExecutionContext);
ResponceCache.get().cache(taskExecutionContext.getTaskInstanceId(),ackCommand.convert2Command(),Event.ACK);
taskCallbackService.sendAck(taskExecutionContext.getTaskInstanceId(), ackCommand.convert2Command());
}
/**
* build ack command
* @param taskExecutionContext taskExecutionContext
* @return TaskExecuteAckCommand
*/
private TaskExecuteAckCommand buildAckCommand(TaskExecutionContext taskExecutionContext) {
TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand();
ackCommand.setTaskInstanceId(taskExecutionContext.getTaskInstanceId());
ackCommand.setStatus(taskExecutionContext.getCurrentExecutionStatus().getCode());
ackCommand.setLogPath(LogUtils.getTaskLogPath(taskExecutionContext));
ackCommand.setHost(taskExecutionContext.getHost());
ackCommand.setStartTime(taskExecutionContext.getStartTime());
if (taskExecutionContext.getTaskType().equals(TaskType.SQL.name()) || taskExecutionContext.getTaskType().equals(TaskType.PROCEDURE.name())) {
ackCommand.setExecutePath(null);
} else {
ackCommand.setExecutePath(taskExecutionContext.getExecutePath());
}
taskExecutionContext.setLogPath(ackCommand.getLogPath());
return ackCommand;
}
/**
* get execute local path
* @param taskExecutionContext taskExecutionContext
* @return execute local path
*/
private String getExecLocalPath(TaskExecutionContext taskExecutionContext) {
return FileUtils.getProcessExecDir(taskExecutionContext.getProjectId(),
taskExecutionContext.getProcessDefineId(),
taskExecutionContext.getProcessInstanceId(),
taskExecutionContext.getTaskInstanceId());
}
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,172 | TaskKillProcessor throw NullPointException | 1.when worker runing more than 100 tasks , there are some tasks in queue.
with that ,master send a kill commad to this worker,TaskKillProcessor receive ,and get processId in cache(TaskExecutionContext).
But the taskInstance isn't running in this worker ,it is in queue.so the TaskKillProcessor throw NullPointException.
2.This exception will be catched in this method.The TaskKillProcessor will continue run to build command .In method "buildKillTaskResponseCommand" , get TaskInstanceId in cache(TaskExecutionContext),but this cache is null,so the command send to master with a null taskInstanceId. | https://github.com/apache/dolphinscheduler/issues/4172 | https://github.com/apache/dolphinscheduler/pull/4182 | b3120a74d2656f7ad2054ba8245262551063b549 | a13e737eb472b21b6b4c596ede9afc80fa3beb06 | "2020-12-07T08:19:05Z" | java | "2020-12-11T07:41:35Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.processor;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.common.utils.Preconditions;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.TaskKillRequestCommand;
import org.apache.dolphinscheduler.remote.command.TaskKillResponseCommand;
import org.apache.dolphinscheduler.remote.processor.NettyRequestProcessor;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.remote.utils.Pair;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.utils.ProcessUtils;
import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager;
import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.log.LogClientService;
import java.util.Collections;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.netty.channel.Channel;
/**
* task kill processor
*/
public class TaskKillProcessor implements NettyRequestProcessor {
private final Logger logger = LoggerFactory.getLogger(TaskKillProcessor.class);
/**
* worker config
*/
private final WorkerConfig workerConfig;
/**
* task callback service
*/
private final TaskCallbackService taskCallbackService;
/**
* taskExecutionContextCacheManager
*/
private TaskExecutionContextCacheManager taskExecutionContextCacheManager;
public TaskKillProcessor() {
this.taskCallbackService = SpringApplicationContext.getBean(TaskCallbackService.class);
this.workerConfig = SpringApplicationContext.getBean(WorkerConfig.class);
this.taskExecutionContextCacheManager = SpringApplicationContext.getBean(TaskExecutionContextCacheManagerImpl.class);
}
/**
* task kill process
*
* @param channel channel channel
* @param command command command
*/
@Override
public void process(Channel channel, Command command) {
Preconditions.checkArgument(CommandType.TASK_KILL_REQUEST == command.getType(), String.format("invalid command type : %s", command.getType()));
TaskKillRequestCommand killCommand = JSONUtils.parseObject(command.getBody(), TaskKillRequestCommand.class);
logger.info("received kill command : {}", killCommand);
Pair<Boolean, List<String>> result = doKill(killCommand);
taskCallbackService.addRemoteChannel(killCommand.getTaskInstanceId(),
new NettyRemoteChannel(channel, command.getOpaque()));
TaskKillResponseCommand taskKillResponseCommand = buildKillTaskResponseCommand(killCommand,result);
taskCallbackService.sendResult(taskKillResponseCommand.getTaskInstanceId(), taskKillResponseCommand.convert2Command());
taskExecutionContextCacheManager.removeByTaskInstanceId(taskKillResponseCommand.getTaskInstanceId());
}
/**
* do kill
* @param killCommand
* @return kill result
*/
private Pair<Boolean, List<String>> doKill(TaskKillRequestCommand killCommand) {
List<String> appIds = Collections.EMPTY_LIST;
try {
TaskExecutionContext taskExecutionContext = taskExecutionContextCacheManager.getByTaskInstanceId(killCommand.getTaskInstanceId());
Integer processId = taskExecutionContext.getProcessId();
if (processId == null || processId.equals(0)) {
logger.error("process kill failed, process id :{}, task id:{}", processId, killCommand.getTaskInstanceId());
return Pair.of(false, appIds);
}
String cmd = String.format("sudo kill -9 %s", ProcessUtils.getPidsStr(taskExecutionContext.getProcessId()));
logger.info("process id:{}, cmd:{}", taskExecutionContext.getProcessId(), cmd);
OSUtils.exeCmd(cmd);
// find log and kill yarn job
appIds = killYarnJob(Host.of(taskExecutionContext.getHost()).getIp(),
taskExecutionContext.getLogPath(),
taskExecutionContext.getExecutePath(),
taskExecutionContext.getTenantCode());
return Pair.of(true, appIds);
} catch (Exception e) {
logger.error("kill task error", e);
}
return Pair.of(false, appIds);
}
/**
* build TaskKillResponseCommand
*
* @param killCommand kill command
* @param result exe result
* @return build TaskKillResponseCommand
*/
private TaskKillResponseCommand buildKillTaskResponseCommand(TaskKillRequestCommand killCommand,
Pair<Boolean, List<String>> result) {
TaskKillResponseCommand taskKillResponseCommand = new TaskKillResponseCommand();
taskKillResponseCommand.setStatus(result.getLeft() ? ExecutionStatus.SUCCESS.getCode() : ExecutionStatus.FAILURE.getCode());
taskKillResponseCommand.setAppIds(result.getRight());
TaskExecutionContext taskExecutionContext = taskExecutionContextCacheManager.getByTaskInstanceId(killCommand.getTaskInstanceId());
if (taskExecutionContext != null) {
taskKillResponseCommand.setTaskInstanceId(taskExecutionContext.getTaskInstanceId());
taskKillResponseCommand.setHost(taskExecutionContext.getHost());
taskKillResponseCommand.setProcessId(taskExecutionContext.getProcessId());
}
return taskKillResponseCommand;
}
/**
* kill yarn job
*
* @param host host
* @param logPath logPath
* @param executePath executePath
* @param tenantCode tenantCode
* @return List<String> appIds
*/
private List<String> killYarnJob(String host, String logPath, String executePath, String tenantCode) {
LogClientService logClient = null;
try {
logClient = new LogClientService();
logger.info("view log host : {},logPath : {}", host,logPath);
String log = logClient.viewLog(host, Constants.RPC_PORT, logPath);
if (StringUtils.isNotEmpty(log)) {
List<String> appIds = LoggerUtils.getAppIds(log, logger);
if (StringUtils.isEmpty(executePath)) {
logger.error("task instance execute path is empty");
throw new RuntimeException("task instance execute path is empty");
}
if (appIds.size() > 0) {
ProcessUtils.cancelApplication(appIds, logger, tenantCode, executePath);
return appIds;
}
}
} catch (Exception e) {
logger.error("kill yarn job error",e);
} finally {
if (logClient != null) {
logClient.close();
}
}
return Collections.EMPTY_LIST;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,172 | TaskKillProcessor throw NullPointException | 1.when worker runing more than 100 tasks , there are some tasks in queue.
with that ,master send a kill commad to this worker,TaskKillProcessor receive ,and get processId in cache(TaskExecutionContext).
But the taskInstance isn't running in this worker ,it is in queue.so the TaskKillProcessor throw NullPointException.
2.This exception will be catched in this method.The TaskKillProcessor will continue run to build command .In method "buildKillTaskResponseCommand" , get TaskInstanceId in cache(TaskExecutionContext),but this cache is null,so the command send to master with a null taskInstanceId. | https://github.com/apache/dolphinscheduler/issues/4172 | https://github.com/apache/dolphinscheduler/pull/4182 | b3120a74d2656f7ad2054ba8245262551063b549 | a13e737eb472b21b6b4c596ede9afc80fa3beb06 | "2020-12-07T08:19:05Z" | java | "2020-12-11T07:41:35Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.task;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.utils.ProcessUtils;
import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager;
import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import java.io.*;
import java.lang.reflect.Field;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_FAILURE;
import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_SUCCESS;
/**
* abstract command executor
*/
public abstract class AbstractCommandExecutor {
/**
* rules for extracting application ID
*/
protected static final Pattern APPLICATION_REGEX = Pattern.compile(Constants.APPLICATION_REGEX);
protected StringBuilder varPool = new StringBuilder();
/**
* process
*/
private Process process;
/**
* log handler
*/
protected Consumer<List<String>> logHandler;
/**
* logger
*/
protected Logger logger;
/**
* log list
*/
protected final List<String> logBuffer;
/**
* taskExecutionContext
*/
protected TaskExecutionContext taskExecutionContext;
/**
* taskExecutionContextCacheManager
*/
private TaskExecutionContextCacheManager taskExecutionContextCacheManager;
public AbstractCommandExecutor(Consumer<List<String>> logHandler,
TaskExecutionContext taskExecutionContext ,
Logger logger){
this.logHandler = logHandler;
this.taskExecutionContext = taskExecutionContext;
this.logger = logger;
this.logBuffer = Collections.synchronizedList(new ArrayList<>());
this.taskExecutionContextCacheManager = SpringApplicationContext.getBean(TaskExecutionContextCacheManagerImpl.class);
}
/**
* build process
*
* @param commandFile command file
* @throws IOException IO Exception
*/
private void buildProcess(String commandFile) throws IOException {
// setting up user to run commands
List<String> command = new LinkedList<>();
//init process builder
ProcessBuilder processBuilder = new ProcessBuilder();
// setting up a working directory
processBuilder.directory(new File(taskExecutionContext.getExecutePath()));
// merge error information to standard output stream
processBuilder.redirectErrorStream(true);
// setting up user to run commands
command.add("sudo");
command.add("-u");
command.add(taskExecutionContext.getTenantCode());
command.add(commandInterpreter());
command.addAll(commandOptions());
command.add(commandFile);
// setting commands
processBuilder.command(command);
process = processBuilder.start();
// print command
printCommand(command);
}
/**
* task specific execution logic
*
* @param execCommand execCommand
* @return CommandExecuteResult
* @throws Exception if error throws Exception
*/
public CommandExecuteResult run(String execCommand) throws Exception{
CommandExecuteResult result = new CommandExecuteResult();
if (StringUtils.isEmpty(execCommand)) {
return result;
}
String commandFilePath = buildCommandFilePath();
// create command file if not exists
createCommandFileIfNotExists(execCommand, commandFilePath);
//build process
buildProcess(commandFilePath);
// parse process output
parseProcessOutput(process);
Integer processId = getProcessId(process);
result.setProcessId(processId);
// cache processId
taskExecutionContext.setProcessId(processId);
taskExecutionContextCacheManager.cacheTaskExecutionContext(taskExecutionContext);
// print process id
logger.info("process start, process id is: {}", processId);
// if timeout occurs, exit directly
long remainTime = getRemaintime();
// waiting for the run to finish
boolean status = process.waitFor(remainTime, TimeUnit.SECONDS);
logger.info("process has exited, execute path:{}, processId:{} ,exitStatusCode:{}",
taskExecutionContext.getExecutePath(),
processId
, result.getExitStatusCode());
// if SHELL task exit
if (status) {
// set appIds
List<String> appIds = getAppIds(taskExecutionContext.getLogPath());
result.setAppIds(String.join(Constants.COMMA, appIds));
// SHELL task state
result.setExitStatusCode(process.exitValue());
// if yarn task , yarn state is final state
if (process.exitValue() == 0){
result.setExitStatusCode(isSuccessOfYarnState(appIds) ? EXIT_CODE_SUCCESS : EXIT_CODE_FAILURE);
}
} else {
logger.error("process has failure , exitStatusCode : {} , ready to kill ...", result.getExitStatusCode());
ProcessUtils.kill(taskExecutionContext);
result.setExitStatusCode(EXIT_CODE_FAILURE);
}
return result;
}
public String getVarPool() {
return varPool.toString();
}
/**
* cancel application
* @throws Exception exception
*/
public void cancelApplication() throws Exception {
if (process == null) {
return;
}
// clear log
clear();
int processId = getProcessId(process);
logger.info("cancel process: {}", processId);
// kill , waiting for completion
boolean killed = softKill(processId);
if (!killed) {
// hard kill
hardKill(processId);
// destory
process.destroy();
process = null;
}
}
/**
* soft kill
* @param processId process id
* @return process is alive
* @throws InterruptedException interrupted exception
*/
private boolean softKill(int processId) {
if (processId != 0 && process.isAlive()) {
try {
// sudo -u user command to run command
String cmd = String.format("sudo kill %d", processId);
logger.info("soft kill task:{}, process id:{}, cmd:{}", taskExecutionContext.getTaskAppId(), processId, cmd);
Runtime.getRuntime().exec(cmd);
} catch (IOException e) {
logger.info("kill attempt failed", e);
}
}
return process.isAlive();
}
/**
* hard kill
* @param processId process id
*/
private void hardKill(int processId) {
if (processId != 0 && process.isAlive()) {
try {
String cmd = String.format("sudo kill -9 %d", processId);
logger.info("hard kill task:{}, process id:{}, cmd:{}", taskExecutionContext.getTaskAppId(), processId, cmd);
Runtime.getRuntime().exec(cmd);
} catch (IOException e) {
logger.error("kill attempt failed ", e);
}
}
}
/**
* print command
* @param commands process builder
*/
private void printCommand(List<String> commands) {
String cmdStr;
try {
cmdStr = ProcessUtils.buildCommandStr(commands);
logger.info("task run command:\n{}", cmdStr);
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
/**
* clear
*/
private void clear() {
List<String> markerList = new ArrayList<>();
markerList.add(ch.qos.logback.classic.ClassicConstants.FINALIZE_SESSION_MARKER.toString());
if (!logBuffer.isEmpty()) {
// log handle
logHandler.accept(logBuffer);
logBuffer.clear();
}
logHandler.accept(markerList);
}
/**
* get the standard output of the process
* @param process process
*/
private void parseProcessOutput(Process process) {
String threadLoggerInfoName = String.format(LoggerUtils.TASK_LOGGER_THREAD_NAME + "-%s", taskExecutionContext.getTaskAppId());
ExecutorService parseProcessOutputExecutorService = ThreadUtils.newDaemonSingleThreadExecutor(threadLoggerInfoName);
parseProcessOutputExecutorService.submit(new Runnable(){
@Override
public void run() {
BufferedReader inReader = null;
try {
inReader = new BufferedReader(new InputStreamReader(process.getInputStream()));
String line;
long lastFlushTime = System.currentTimeMillis();
while ((line = inReader.readLine()) != null) {
if (line.startsWith("${setValue(")) {
varPool.append(line.substring("${setValue(".length(), line.length() - 2));
varPool.append("$VarPool$");
} else {
logBuffer.add(line);
lastFlushTime = flush(lastFlushTime);
}
}
} catch (Exception e) {
logger.error(e.getMessage(),e);
} finally {
clear();
close(inReader);
}
}
});
parseProcessOutputExecutorService.shutdown();
}
/**
* check yarn state
*
* @param appIds application id list
* @return is success of yarn task state
*/
public boolean isSuccessOfYarnState(List<String> appIds) {
boolean result = true;
try {
for (String appId : appIds) {
while(Stopper.isRunning()){
ExecutionStatus applicationStatus = HadoopUtils.getInstance().getApplicationStatus(appId);
logger.info("appId:{}, final state:{}",appId,applicationStatus.name());
if (applicationStatus.equals(ExecutionStatus.FAILURE) ||
applicationStatus.equals(ExecutionStatus.KILL)) {
return false;
}
if (applicationStatus.equals(ExecutionStatus.SUCCESS)){
break;
}
Thread.sleep(Constants.SLEEP_TIME_MILLIS);
}
}
} catch (Exception e) {
logger.error(String.format("yarn applications: %s status failed ", appIds.toString()),e);
result = false;
}
return result;
}
public int getProcessId() {
return getProcessId(process);
}
/**
* get app links
*
* @param logPath log path
* @return app id list
*/
private List<String> getAppIds(String logPath) {
List<String> logs = convertFile2List(logPath);
List<String> appIds = new ArrayList<>();
/**
* analysis log?get submited yarn application id
*/
for (String log : logs) {
String appId = findAppId(log);
if (StringUtils.isNotEmpty(appId) && !appIds.contains(appId)) {
logger.info("find app id: {}", appId);
appIds.add(appId);
}
}
return appIds;
}
/**
* convert file to list
* @param filename file name
* @return line list
*/
private List<String> convertFile2List(String filename) {
List lineList = new ArrayList<String>(100);
File file=new File(filename);
if (!file.exists()){
return lineList;
}
BufferedReader br = null;
try {
br = new BufferedReader(new InputStreamReader(new FileInputStream(filename), StandardCharsets.UTF_8));
String line = null;
while ((line = br.readLine()) != null) {
lineList.add(line);
}
} catch (Exception e) {
logger.error(String.format("read file: %s failed : ",filename),e);
} finally {
if(br != null){
try {
br.close();
} catch (IOException e) {
logger.error(e.getMessage(),e);
}
}
}
return lineList;
}
/**
* find app id
* @param line line
* @return appid
*/
private String findAppId(String line) {
Matcher matcher = APPLICATION_REGEX.matcher(line);
if (matcher.find()) {
return matcher.group();
}
return null;
}
/**
* get remain time(s)
*
* @return remain time
*/
private long getRemaintime() {
long usedTime = (System.currentTimeMillis() - taskExecutionContext.getStartTime().getTime()) / 1000;
long remainTime = taskExecutionContext.getTaskTimeout() - usedTime;
if (remainTime < 0) {
throw new RuntimeException("task execution time out");
}
return remainTime;
}
/**
* get process id
*
* @param process process
* @return process id
*/
private int getProcessId(Process process) {
int processId = 0;
try {
Field f = process.getClass().getDeclaredField(Constants.PID);
f.setAccessible(true);
processId = f.getInt(process);
} catch (Throwable e) {
logger.error(e.getMessage(), e);
}
return processId;
}
/**
* when log buffer siz or flush time reach condition , then flush
*
* @param lastFlushTime last flush time
* @return last flush time
*/
private long flush(long lastFlushTime) {
long now = System.currentTimeMillis();
/**
* when log buffer siz or flush time reach condition , then flush
*/
if (logBuffer.size() >= Constants.DEFAULT_LOG_ROWS_NUM || now - lastFlushTime > Constants.DEFAULT_LOG_FLUSH_INTERVAL) {
lastFlushTime = now;
/** log handle */
logHandler.accept(logBuffer);
logBuffer.clear();
}
return lastFlushTime;
}
/**
* close buffer reader
*
* @param inReader in reader
*/
private void close(BufferedReader inReader) {
if (inReader != null) {
try {
inReader.close();
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
}
}
protected List<String> commandOptions() {
return Collections.emptyList();
}
protected abstract String buildCommandFilePath();
protected abstract String commandInterpreter();
protected abstract void createCommandFileIfNotExists(String execCommand, String commandFile) throws IOException;
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,172 | TaskKillProcessor throw NullPointException | 1.when worker runing more than 100 tasks , there are some tasks in queue.
with that ,master send a kill commad to this worker,TaskKillProcessor receive ,and get processId in cache(TaskExecutionContext).
But the taskInstance isn't running in this worker ,it is in queue.so the TaskKillProcessor throw NullPointException.
2.This exception will be catched in this method.The TaskKillProcessor will continue run to build command .In method "buildKillTaskResponseCommand" , get TaskInstanceId in cache(TaskExecutionContext),but this cache is null,so the command send to master with a null taskInstanceId. | https://github.com/apache/dolphinscheduler/issues/4172 | https://github.com/apache/dolphinscheduler/pull/4182 | b3120a74d2656f7ad2054ba8245262551063b549 | a13e737eb472b21b6b4c596ede9afc80fa3beb06 | "2020-12-07T08:19:05Z" | java | "2020-12-11T07:41:35Z" | dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/cache/TaskExecutionContextCacheManagerTest.java | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,172 | TaskKillProcessor throw NullPointException | 1.when worker runing more than 100 tasks , there are some tasks in queue.
with that ,master send a kill commad to this worker,TaskKillProcessor receive ,and get processId in cache(TaskExecutionContext).
But the taskInstance isn't running in this worker ,it is in queue.so the TaskKillProcessor throw NullPointException.
2.This exception will be catched in this method.The TaskKillProcessor will continue run to build command .In method "buildKillTaskResponseCommand" , get TaskInstanceId in cache(TaskExecutionContext),but this cache is null,so the command send to master with a null taskInstanceId. | https://github.com/apache/dolphinscheduler/issues/4172 | https://github.com/apache/dolphinscheduler/pull/4182 | b3120a74d2656f7ad2054ba8245262551063b549 | a13e737eb472b21b6b4c596ede9afc80fa3beb06 | "2020-12-07T08:19:05Z" | java | "2020-12-11T07:41:35Z" | dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskCallbackServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.processor;
import java.util.Date;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.remote.NettyRemotingClient;
import org.apache.dolphinscheduler.remote.NettyRemotingServer;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand;
import org.apache.dolphinscheduler.remote.command.TaskExecuteRequestCommand;
import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand;
import org.apache.dolphinscheduler.remote.config.NettyClientConfig;
import org.apache.dolphinscheduler.remote.config.NettyServerConfig;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.master.processor.TaskAckProcessor;
import org.apache.dolphinscheduler.server.master.processor.TaskResponseProcessor;
import org.apache.dolphinscheduler.server.master.processor.queue.TaskResponseService;
import org.apache.dolphinscheduler.server.master.registry.MasterRegistry;
import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager;
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry;
import org.apache.dolphinscheduler.server.zk.SpringZKServer;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import org.apache.dolphinscheduler.service.zk.ZookeeperConfig;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import io.netty.channel.Channel;
/**
* test task call back service
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes={
TaskCallbackServiceTestConfig.class,
SpringZKServer.class,
SpringApplicationContext.class,
MasterRegistry.class,
WorkerRegistry.class,
ZookeeperRegistryCenter.class,
MasterConfig.class,
WorkerConfig.class,
ZookeeperCachedOperator.class,
ZookeeperConfig.class,
ZookeeperNodeManager.class,
TaskCallbackService.class,
TaskResponseService.class,
TaskAckProcessor.class,
TaskResponseProcessor.class,
TaskExecuteProcessor.class,
CuratorZookeeperClient.class})
public class TaskCallbackServiceTest {
@Autowired
private TaskCallbackService taskCallbackService;
@Autowired
private MasterRegistry masterRegistry;
@Autowired
private TaskAckProcessor taskAckProcessor;
@Autowired
private TaskResponseProcessor taskResponseProcessor;
@Autowired
private TaskExecuteProcessor taskExecuteProcessor;
/**
* send ack test
* @throws Exception
*/
@Test
public void testSendAck() throws Exception{
final NettyServerConfig serverConfig = new NettyServerConfig();
serverConfig.setListenPort(30000);
NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig);
nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_ACK, taskAckProcessor);
nettyRemotingServer.start();
final NettyClientConfig clientConfig = new NettyClientConfig();
NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig);
Channel channel = nettyRemotingClient.getChannel(Host.of("localhost:30000"));
taskCallbackService.addRemoteChannel(1, new NettyRemoteChannel(channel, 1));
TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand();
ackCommand.setTaskInstanceId(1);
ackCommand.setStartTime(new Date());
taskCallbackService.sendAck(1, ackCommand.convert2Command());
Stopper.stop();
nettyRemotingServer.close();
nettyRemotingClient.close();
}
/**
* send result test
* @throws Exception
*/
@Test
public void testSendResult() throws Exception{
final NettyServerConfig serverConfig = new NettyServerConfig();
serverConfig.setListenPort(30000);
NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig);
nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_RESPONSE, taskResponseProcessor);
nettyRemotingServer.start();
final NettyClientConfig clientConfig = new NettyClientConfig();
NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig);
Channel channel = nettyRemotingClient.getChannel(Host.of("localhost:30000"));
taskCallbackService.addRemoteChannel(1, new NettyRemoteChannel(channel, 1));
TaskExecuteResponseCommand responseCommand = new TaskExecuteResponseCommand();
responseCommand.setTaskInstanceId(1);
responseCommand.setEndTime(new Date());
taskCallbackService.sendResult(1, responseCommand.convert2Command());
Thread.sleep(5000);
Stopper.stop();
Thread.sleep(5000);
nettyRemotingServer.close();
nettyRemotingClient.close();
}
// @Test(expected = IllegalArgumentException.class)
// public void testSendAckWithIllegalArgumentException(){
// TaskExecuteAckCommand ackCommand = Mockito.mock(TaskExecuteAckCommand.class);
// taskCallbackService.sendAck(1, ackCommand.convert2Command());
// Stopper.stop();
// }
@Test
public void testPause(){
Assert.assertEquals(5000, taskCallbackService.pause(3));;
}
@Test
public void testSendAck1(){
masterRegistry.registry();
final NettyServerConfig serverConfig = new NettyServerConfig();
serverConfig.setListenPort(30000);
NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig);
nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_ACK, taskAckProcessor);
nettyRemotingServer.start();
final NettyClientConfig clientConfig = new NettyClientConfig();
NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig);
Channel channel = nettyRemotingClient.getChannel(Host.of("localhost:30000"));
taskCallbackService.addRemoteChannel(1, new NettyRemoteChannel(channel, 1));
// channel.close();
TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand();
ackCommand.setTaskInstanceId(1);
ackCommand.setStartTime(new Date());
taskCallbackService.sendAck(1, ackCommand.convert2Command());
Assert.assertEquals(true, channel.isOpen());
Stopper.stop();
nettyRemotingServer.close();
nettyRemotingClient.close();
masterRegistry.unRegistry();
}
@Test
public void testTaskExecuteProcessor() throws Exception{
final NettyServerConfig serverConfig = new NettyServerConfig();
serverConfig.setListenPort(30000);
NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig);
nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_REQUEST, taskExecuteProcessor);
nettyRemotingServer.start();
final NettyClientConfig clientConfig = new NettyClientConfig();
NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig);
TaskExecuteRequestCommand taskExecuteRequestCommand = new TaskExecuteRequestCommand();
nettyRemotingClient.send(new Host("localhost",30000),taskExecuteRequestCommand.convert2Command());
taskExecuteRequestCommand.setTaskExecutionContext(JSONUtils.toJsonString(new TaskExecutionContext()));
nettyRemotingClient.send(new Host("localhost",30000),taskExecuteRequestCommand.convert2Command());
Thread.sleep(5000);
Stopper.stop();
Thread.sleep(5000);
nettyRemotingServer.close();
nettyRemotingClient.close();
}
// @Test(expected = IllegalStateException.class)
// public void testSendAckWithIllegalStateException2(){
// masterRegistry.registry();
// final NettyServerConfig serverConfig = new NettyServerConfig();
// serverConfig.setListenPort(30000);
// NettyRemotingServer nettyRemotingServer = new NettyRemotingServer(serverConfig);
// nettyRemotingServer.registerProcessor(CommandType.TASK_EXECUTE_ACK, taskAckProcessor);
// nettyRemotingServer.start();
//
// final NettyClientConfig clientConfig = new NettyClientConfig();
// NettyRemotingClient nettyRemotingClient = new NettyRemotingClient(clientConfig);
// Channel channel = nettyRemotingClient.getChannel(Host.of("localhost:30000"));
// taskCallbackService.addRemoteChannel(1, new NettyRemoteChannel(channel, 1));
// channel.close();
// TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand();
// ackCommand.setTaskInstanceId(1);
// ackCommand.setStartTime(new Date());
//
// nettyRemotingServer.close();
//
// taskCallbackService.sendAck(1, ackCommand.convert2Command());
// try {
// Thread.sleep(5000);
// } catch (InterruptedException e) {
// e.printStackTrace();
// }
//
// Stopper.stop();
//
// try {
// Thread.sleep(5000);
// } catch (InterruptedException e) {
// e.printStackTrace();
// }
// }
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,172 | TaskKillProcessor throw NullPointException | 1.when worker runing more than 100 tasks , there are some tasks in queue.
with that ,master send a kill commad to this worker,TaskKillProcessor receive ,and get processId in cache(TaskExecutionContext).
But the taskInstance isn't running in this worker ,it is in queue.so the TaskKillProcessor throw NullPointException.
2.This exception will be catched in this method.The TaskKillProcessor will continue run to build command .In method "buildKillTaskResponseCommand" , get TaskInstanceId in cache(TaskExecutionContext),but this cache is null,so the command send to master with a null taskInstanceId. | https://github.com/apache/dolphinscheduler/issues/4172 | https://github.com/apache/dolphinscheduler/pull/4182 | b3120a74d2656f7ad2054ba8245262551063b549 | a13e737eb472b21b6b4c596ede9afc80fa3beb06 | "2020-12-07T08:19:05Z" | java | "2020-12-11T07:41:35Z" | dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/processor/TaskKillProcessorTest.java | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,215 | [improvement] ProcessService invalid code removal | *For better global communication, please give priority to using English description, thx! *
*Please review https://dolphinscheduler.apache.org/en-us/docs/development/issue.html when describe an issue.*
**Describe the question**
A clear and concise description of what the question is.
org.apache.dolphinscheduler.service.process.ProcessService#checkTaskExistsInTaskQueue方法恒返回false
**Which version of DolphinScheduler:**
1.1.3
**Additional context**
Add any other context about the problem here.
![image](https://user-images.githubusercontent.com/41229600/101970917-6002e580-3c68-11eb-84c1-135b943ec38b.png)
**Requirement or improvement**
- Please describe about your requirements or improvement suggestions.
如果返回恒定值,业务逻辑是否完备 | https://github.com/apache/dolphinscheduler/issues/4215 | https://github.com/apache/dolphinscheduler/pull/4221 | b694474c78cafb298dd41d03cc0c54d2179dde2a | 426eb9af034ff481f22d5da0a03662f6c9c7e32c | "2020-12-12T02:53:50Z" | java | "2020-12-13T15:05:27Z" | dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.process;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_EMPTY_SUB_PROCESS;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID;
import static org.apache.dolphinscheduler.common.Constants.YYYY_MM_DD_HH_MM_SS;
import static java.util.stream.Collectors.toSet;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.AuthorizationType;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.CycleEnum;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.TaskDependType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.model.DateInterval;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Command;
import org.apache.dolphinscheduler.dao.entity.CycleDependency;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.ErrorCommand;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.ProcessInstanceMap;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.CommandMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.service.log.LogClientService;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import com.cronutils.model.Cron;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* process relative dao that some mappers in this.
*/
@Component
public class ProcessService {
private final Logger logger = LoggerFactory.getLogger(getClass());
private final int[] stateArray = new int[] {ExecutionStatus.SUBMITTED_SUCCESS.ordinal(),
ExecutionStatus.RUNNING_EXECUTION.ordinal(),
ExecutionStatus.DELAY_EXECUTION.ordinal(),
ExecutionStatus.READY_PAUSE.ordinal(),
ExecutionStatus.READY_STOP.ordinal()};
@Autowired
private UserMapper userMapper;
@Autowired
private ProcessDefinitionMapper processDefineMapper;
@Autowired
private ProcessInstanceMapper processInstanceMapper;
@Autowired
private DataSourceMapper dataSourceMapper;
@Autowired
private ProcessInstanceMapMapper processInstanceMapMapper;
@Autowired
private TaskInstanceMapper taskInstanceMapper;
@Autowired
private CommandMapper commandMapper;
@Autowired
private ScheduleMapper scheduleMapper;
@Autowired
private UdfFuncMapper udfFuncMapper;
@Autowired
private ResourceMapper resourceMapper;
@Autowired
private ErrorCommandMapper errorCommandMapper;
@Autowired
private TenantMapper tenantMapper;
@Autowired
private ProjectMapper projectMapper;
/**
* handle Command (construct ProcessInstance from Command) , wrapped in transaction
*
* @param logger logger
* @param host host
* @param validThreadNum validThreadNum
* @param command found command
* @return process instance
*/
@Transactional(rollbackFor = Exception.class)
public ProcessInstance handleCommand(Logger logger, String host, int validThreadNum, Command command) {
ProcessInstance processInstance = constructProcessInstance(command, host);
//cannot construct process instance, return null;
if (processInstance == null) {
logger.error("scan command, command parameter is error: {}", command);
moveToErrorCommand(command, "process instance is null");
return null;
}
if (!checkThreadNum(command, validThreadNum)) {
logger.info("there is not enough thread for this command: {}", command);
return setWaitingThreadProcess(command, processInstance);
}
processInstance.setCommandType(command.getCommandType());
processInstance.addHistoryCmd(command.getCommandType());
saveProcessInstance(processInstance);
this.setSubProcessParam(processInstance);
delCommandById(command.getId());
return processInstance;
}
/**
* save error command, and delete original command
*
* @param command command
* @param message message
*/
@Transactional(rollbackFor = Exception.class)
public void moveToErrorCommand(Command command, String message) {
ErrorCommand errorCommand = new ErrorCommand(command, message);
this.errorCommandMapper.insert(errorCommand);
delCommandById(command.getId());
}
/**
* set process waiting thread
*
* @param command command
* @param processInstance processInstance
* @return process instance
*/
private ProcessInstance setWaitingThreadProcess(Command command, ProcessInstance processInstance) {
processInstance.setState(ExecutionStatus.WAITTING_THREAD);
if (command.getCommandType() != CommandType.RECOVER_WAITTING_THREAD) {
processInstance.addHistoryCmd(command.getCommandType());
}
saveProcessInstance(processInstance);
this.setSubProcessParam(processInstance);
createRecoveryWaitingThreadCommand(command, processInstance);
return null;
}
/**
* check thread num
*
* @param command command
* @param validThreadNum validThreadNum
* @return if thread is enough
*/
private boolean checkThreadNum(Command command, int validThreadNum) {
int commandThreadCount = this.workProcessThreadNumCount(command.getProcessDefinitionId());
return validThreadNum >= commandThreadCount;
}
/**
* insert one command
*
* @param command command
* @return create result
*/
public int createCommand(Command command) {
int result = 0;
if (command != null) {
result = commandMapper.insert(command);
}
return result;
}
/**
* find one command from queue list
*
* @return command
*/
public Command findOneCommand() {
return commandMapper.getOneToRun();
}
/**
* check the input command exists in queue list
*
* @param command command
* @return create command result
*/
public Boolean verifyIsNeedCreateCommand(Command command) {
Boolean isNeedCreate = true;
Map<CommandType, Integer> cmdTypeMap = new HashMap<CommandType, Integer>();
cmdTypeMap.put(CommandType.REPEAT_RUNNING, 1);
cmdTypeMap.put(CommandType.RECOVER_SUSPENDED_PROCESS, 1);
cmdTypeMap.put(CommandType.START_FAILURE_TASK_PROCESS, 1);
CommandType commandType = command.getCommandType();
if (cmdTypeMap.containsKey(commandType)) {
ObjectNode cmdParamObj = JSONUtils.parseObject(command.getCommandParam());
int processInstanceId = cmdParamObj.path(CMD_PARAM_RECOVER_PROCESS_ID_STRING).asInt();
List<Command> commands = commandMapper.selectList(null);
// for all commands
for (Command tmpCommand : commands) {
if (cmdTypeMap.containsKey(tmpCommand.getCommandType())) {
ObjectNode tempObj = JSONUtils.parseObject(tmpCommand.getCommandParam());
if (tempObj != null && processInstanceId == tempObj.path(CMD_PARAM_RECOVER_PROCESS_ID_STRING).asInt()) {
isNeedCreate = false;
break;
}
}
}
}
return isNeedCreate;
}
/**
* find process instance detail by id
*
* @param processId processId
* @return process instance
*/
public ProcessInstance findProcessInstanceDetailById(int processId) {
return processInstanceMapper.queryDetailById(processId);
}
/**
* get task node list by definitionId
*
* @param defineId
* @return
*/
public List<TaskNode> getTaskNodeListByDefinitionId(Integer defineId) {
ProcessDefinition processDefinition = processDefineMapper.selectById(defineId);
if (processDefinition == null) {
logger.info("process define not exists");
return null;
}
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
//process data check
if (null == processData) {
logger.error("process data is null");
return new ArrayList<>();
}
return processData.getTasks();
}
/**
* find process instance by id
*
* @param processId processId
* @return process instance
*/
public ProcessInstance findProcessInstanceById(int processId) {
return processInstanceMapper.selectById(processId);
}
/**
* find process define by id.
*
* @param processDefinitionId processDefinitionId
* @return process definition
*/
public ProcessDefinition findProcessDefineById(int processDefinitionId) {
return processDefineMapper.selectById(processDefinitionId);
}
/**
* delete work process instance by id
*
* @param processInstanceId processInstanceId
* @return delete process instance result
*/
public int deleteWorkProcessInstanceById(int processInstanceId) {
return processInstanceMapper.deleteById(processInstanceId);
}
/**
* delete all sub process by parent instance id
*
* @param processInstanceId processInstanceId
* @return delete all sub process instance result
*/
public int deleteAllSubWorkProcessByParentId(int processInstanceId) {
List<Integer> subProcessIdList = processInstanceMapMapper.querySubIdListByParentId(processInstanceId);
for (Integer subId : subProcessIdList) {
deleteAllSubWorkProcessByParentId(subId);
deleteWorkProcessMapByParentId(subId);
removeTaskLogFile(subId);
deleteWorkProcessInstanceById(subId);
}
return 1;
}
/**
* remove task log file
*
* @param processInstanceId processInstanceId
*/
public void removeTaskLogFile(Integer processInstanceId) {
LogClientService logClient = null;
try {
logClient = new LogClientService();
List<TaskInstance> taskInstanceList = findValidTaskListByProcessId(processInstanceId);
if (CollectionUtils.isEmpty(taskInstanceList)) {
return;
}
for (TaskInstance taskInstance : taskInstanceList) {
String taskLogPath = taskInstance.getLogPath();
if (StringUtils.isEmpty(taskInstance.getHost())) {
continue;
}
int port = Constants.RPC_PORT;
String ip = "";
try {
ip = Host.of(taskInstance.getHost()).getIp();
} catch (Exception e) {
// compatible old version
ip = taskInstance.getHost();
}
// remove task log from loggerserver
logClient.removeTaskLog(ip, port, taskLogPath);
}
} finally {
if (logClient != null) {
logClient.close();
}
}
}
/**
* calculate sub process number in the process define.
*
* @param processDefinitionId processDefinitionId
* @return process thread num count
*/
private Integer workProcessThreadNumCount(Integer processDefinitionId) {
List<Integer> ids = new ArrayList<>();
recurseFindSubProcessId(processDefinitionId, ids);
return ids.size() + 1;
}
/**
* recursive query sub process definition id by parent id.
*
* @param parentId parentId
* @param ids ids
*/
public void recurseFindSubProcessId(int parentId, List<Integer> ids) {
ProcessDefinition processDefinition = processDefineMapper.selectById(parentId);
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = processData.getTasks();
if (taskNodeList != null && taskNodeList.size() > 0) {
for (TaskNode taskNode : taskNodeList) {
String parameter = taskNode.getParams();
ObjectNode parameterJson = JSONUtils.parseObject(parameter);
if (parameterJson.get(CMD_PARAM_SUB_PROCESS_DEFINE_ID) != null) {
SubProcessParameters subProcessParam = JSONUtils.parseObject(parameter, SubProcessParameters.class);
ids.add(subProcessParam.getProcessDefinitionId());
recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(), ids);
}
}
}
}
/**
* create recovery waiting thread command when thread pool is not enough for the process instance.
* sub work process instance need not to create recovery command.
* create recovery waiting thread command and delete origin command at the same time.
* if the recovery command is exists, only update the field update_time
*
* @param originCommand originCommand
* @param processInstance processInstance
*/
public void createRecoveryWaitingThreadCommand(Command originCommand, ProcessInstance processInstance) {
// sub process doesnot need to create wait command
if (processInstance.getIsSubProcess() == Flag.YES) {
if (originCommand != null) {
commandMapper.deleteById(originCommand.getId());
}
return;
}
Map<String, String> cmdParam = new HashMap<>();
cmdParam.put(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD, String.valueOf(processInstance.getId()));
// process instance quit by "waiting thread" state
if (originCommand == null) {
Command command = new Command(
CommandType.RECOVER_WAITTING_THREAD,
processInstance.getTaskDependType(),
processInstance.getFailureStrategy(),
processInstance.getExecutorId(),
processInstance.getProcessDefinitionId(),
JSONUtils.toJsonString(cmdParam),
processInstance.getWarningType(),
processInstance.getWarningGroupId(),
processInstance.getScheduleTime(),
processInstance.getWorkerGroup(),
processInstance.getProcessInstancePriority()
);
saveCommand(command);
return;
}
// update the command time if current command if recover from waiting
if (originCommand.getCommandType() == CommandType.RECOVER_WAITTING_THREAD) {
originCommand.setUpdateTime(new Date());
saveCommand(originCommand);
} else {
// delete old command and create new waiting thread command
commandMapper.deleteById(originCommand.getId());
originCommand.setId(0);
originCommand.setCommandType(CommandType.RECOVER_WAITTING_THREAD);
originCommand.setUpdateTime(new Date());
originCommand.setCommandParam(JSONUtils.toJsonString(cmdParam));
originCommand.setProcessInstancePriority(processInstance.getProcessInstancePriority());
saveCommand(originCommand);
}
}
/**
* get schedule time from command
*
* @param command command
* @param cmdParam cmdParam map
* @return date
*/
private Date getScheduleTime(Command command, Map<String, String> cmdParam) {
Date scheduleTime = command.getScheduleTime();
if (scheduleTime == null) {
if (cmdParam != null && cmdParam.containsKey(CMDPARAM_COMPLEMENT_DATA_START_DATE)) {
scheduleTime = DateUtils.stringToDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE));
}
}
return scheduleTime;
}
/**
* generate a new work process instance from command.
*
* @param processDefinition processDefinition
* @param command command
* @param cmdParam cmdParam map
* @return process instance
*/
private ProcessInstance generateNewProcessInstance(ProcessDefinition processDefinition,
Command command,
Map<String, String> cmdParam) {
ProcessInstance processInstance = new ProcessInstance(processDefinition);
processInstance.setState(ExecutionStatus.RUNNING_EXECUTION);
processInstance.setRecovery(Flag.NO);
processInstance.setStartTime(new Date());
processInstance.setRunTimes(1);
processInstance.setMaxTryTimes(0);
processInstance.setProcessDefinitionId(command.getProcessDefinitionId());
processInstance.setCommandParam(command.getCommandParam());
processInstance.setCommandType(command.getCommandType());
processInstance.setIsSubProcess(Flag.NO);
processInstance.setTaskDependType(command.getTaskDependType());
processInstance.setFailureStrategy(command.getFailureStrategy());
processInstance.setExecutorId(command.getExecutorId());
WarningType warningType = command.getWarningType() == null ? WarningType.NONE : command.getWarningType();
processInstance.setWarningType(warningType);
Integer warningGroupId = command.getWarningGroupId() == null ? 0 : command.getWarningGroupId();
processInstance.setWarningGroupId(warningGroupId);
// schedule time
Date scheduleTime = getScheduleTime(command, cmdParam);
if (scheduleTime != null) {
processInstance.setScheduleTime(scheduleTime);
}
processInstance.setCommandStartTime(command.getStartTime());
processInstance.setLocations(processDefinition.getLocations());
processInstance.setConnects(processDefinition.getConnects());
// curing global params
processInstance.setGlobalParams(ParameterUtils.curingGlobalParams(
processDefinition.getGlobalParamMap(),
processDefinition.getGlobalParamList(),
getCommandTypeIfComplement(processInstance, command),
processInstance.getScheduleTime()));
//copy process define json to process instance
processInstance.setProcessInstanceJson(processDefinition.getProcessDefinitionJson());
// set process instance priority
processInstance.setProcessInstancePriority(command.getProcessInstancePriority());
String workerGroup = StringUtils.isBlank(command.getWorkerGroup()) ? Constants.DEFAULT_WORKER_GROUP : command.getWorkerGroup();
processInstance.setWorkerGroup(workerGroup);
processInstance.setTimeout(processDefinition.getTimeout());
processInstance.setTenantId(processDefinition.getTenantId());
return processInstance;
}
/**
* get process tenant
* there is tenant id in definition, use the tenant of the definition.
* if there is not tenant id in the definiton or the tenant not exist
* use definition creator's tenant.
*
* @param tenantId tenantId
* @param userId userId
* @return tenant
*/
public Tenant getTenantForProcess(int tenantId, int userId) {
Tenant tenant = null;
if (tenantId >= 0) {
tenant = tenantMapper.queryById(tenantId);
}
if (userId == 0) {
return null;
}
if (tenant == null) {
User user = userMapper.selectById(userId);
tenant = tenantMapper.queryById(user.getTenantId());
}
return tenant;
}
/**
* check command parameters is valid
*
* @param command command
* @param cmdParam cmdParam map
* @return whether command param is valid
*/
private Boolean checkCmdParam(Command command, Map<String, String> cmdParam) {
if (command.getTaskDependType() == TaskDependType.TASK_ONLY || command.getTaskDependType() == TaskDependType.TASK_PRE) {
if (cmdParam == null
|| !cmdParam.containsKey(Constants.CMD_PARAM_START_NODE_NAMES)
|| cmdParam.get(Constants.CMD_PARAM_START_NODE_NAMES).isEmpty()) {
logger.error("command node depend type is {}, but start nodes is null ", command.getTaskDependType());
return false;
}
}
return true;
}
/**
* construct process instance according to one command.
*
* @param command command
* @param host host
* @return process instance
*/
private ProcessInstance constructProcessInstance(Command command, String host) {
ProcessInstance processInstance = null;
CommandType commandType = command.getCommandType();
Map<String, String> cmdParam = JSONUtils.toMap(command.getCommandParam());
ProcessDefinition processDefinition = null;
if (command.getProcessDefinitionId() != 0) {
processDefinition = processDefineMapper.selectById(command.getProcessDefinitionId());
if (processDefinition == null) {
logger.error("cannot find the work process define! define id : {}", command.getProcessDefinitionId());
return null;
}
}
if (cmdParam != null) {
Integer processInstanceId = 0;
// recover from failure or pause tasks
if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING)) {
String processId = cmdParam.get(Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING);
processInstanceId = Integer.parseInt(processId);
if (processInstanceId == 0) {
logger.error("command parameter is error, [ ProcessInstanceId ] is 0");
return null;
}
} else if (cmdParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS)) {
// sub process map
String pId = cmdParam.get(Constants.CMD_PARAM_SUB_PROCESS);
processInstanceId = Integer.parseInt(pId);
} else if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD)) {
// waiting thread command
String pId = cmdParam.get(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD);
processInstanceId = Integer.parseInt(pId);
}
if (processInstanceId == 0) {
processInstance = generateNewProcessInstance(processDefinition, command, cmdParam);
} else {
processInstance = this.findProcessInstanceDetailById(processInstanceId);
}
processDefinition = processDefineMapper.selectById(processInstance.getProcessDefinitionId());
processInstance.setProcessDefinition(processDefinition);
//reset command parameter
if (processInstance.getCommandParam() != null) {
Map<String, String> processCmdParam = JSONUtils.toMap(processInstance.getCommandParam());
for (Map.Entry<String, String> entry : processCmdParam.entrySet()) {
if (!cmdParam.containsKey(entry.getKey())) {
cmdParam.put(entry.getKey(), entry.getValue());
}
}
}
// reset command parameter if sub process
if (cmdParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS)) {
processInstance.setCommandParam(command.getCommandParam());
}
} else {
// generate one new process instance
processInstance = generateNewProcessInstance(processDefinition, command, cmdParam);
}
if (!checkCmdParam(command, cmdParam)) {
logger.error("command parameter check failed!");
return null;
}
if (command.getScheduleTime() != null) {
processInstance.setScheduleTime(command.getScheduleTime());
}
processInstance.setHost(host);
ExecutionStatus runStatus = ExecutionStatus.RUNNING_EXECUTION;
int runTime = processInstance.getRunTimes();
switch (commandType) {
case START_PROCESS:
break;
case START_FAILURE_TASK_PROCESS:
// find failed tasks and init these tasks
List<Integer> failedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.FAILURE);
List<Integer> toleranceList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.NEED_FAULT_TOLERANCE);
List<Integer> killedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.KILL);
cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING);
failedList.addAll(killedList);
failedList.addAll(toleranceList);
for (Integer taskId : failedList) {
initTaskInstance(this.findTaskInstanceById(taskId));
}
cmdParam.put(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING,
String.join(Constants.COMMA, convertIntListToString(failedList)));
processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam));
processInstance.setRunTimes(runTime + 1);
break;
case START_CURRENT_TASK_PROCESS:
break;
case RECOVER_WAITTING_THREAD:
break;
case RECOVER_SUSPENDED_PROCESS:
// find pause tasks and init task's state
cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING);
List<Integer> suspendedNodeList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.PAUSE);
List<Integer> stopNodeList = findTaskIdByInstanceState(processInstance.getId(),
ExecutionStatus.KILL);
suspendedNodeList.addAll(stopNodeList);
for (Integer taskId : suspendedNodeList) {
// initialize the pause state
initTaskInstance(this.findTaskInstanceById(taskId));
}
cmdParam.put(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING, String.join(",", convertIntListToString(suspendedNodeList)));
processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam));
processInstance.setRunTimes(runTime + 1);
break;
case RECOVER_TOLERANCE_FAULT_PROCESS:
// recover tolerance fault process
processInstance.setRecovery(Flag.YES);
runStatus = processInstance.getState();
break;
case COMPLEMENT_DATA:
// delete all the valid tasks when complement data
List<TaskInstance> taskInstanceList = this.findValidTaskListByProcessId(processInstance.getId());
for (TaskInstance taskInstance : taskInstanceList) {
taskInstance.setFlag(Flag.NO);
this.updateTaskInstance(taskInstance);
}
initComplementDataParam(processDefinition, processInstance, cmdParam);
break;
case REPEAT_RUNNING:
// delete the recover task names from command parameter
if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING)) {
cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING);
processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam));
}
// delete all the valid tasks when repeat running
List<TaskInstance> validTaskList = findValidTaskListByProcessId(processInstance.getId());
for (TaskInstance taskInstance : validTaskList) {
taskInstance.setFlag(Flag.NO);
updateTaskInstance(taskInstance);
}
processInstance.setStartTime(new Date());
processInstance.setEndTime(null);
processInstance.setRunTimes(runTime + 1);
initComplementDataParam(processDefinition, processInstance, cmdParam);
break;
case SCHEDULER:
break;
default:
break;
}
processInstance.setState(runStatus);
return processInstance;
}
/**
* return complement data if the process start with complement data
*
* @param processInstance processInstance
* @param command command
* @return command type
*/
private CommandType getCommandTypeIfComplement(ProcessInstance processInstance, Command command) {
if (CommandType.COMPLEMENT_DATA == processInstance.getCmdTypeIfComplement()) {
return CommandType.COMPLEMENT_DATA;
} else {
return command.getCommandType();
}
}
/**
* initialize complement data parameters
*
* @param processDefinition processDefinition
* @param processInstance processInstance
* @param cmdParam cmdParam
*/
private void initComplementDataParam(ProcessDefinition processDefinition,
ProcessInstance processInstance,
Map<String, String> cmdParam) {
if (!processInstance.isComplementData()) {
return;
}
Date startComplementTime = DateUtils.parse(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE),
YYYY_MM_DD_HH_MM_SS);
if (Flag.NO == processInstance.getIsSubProcess()) {
processInstance.setScheduleTime(startComplementTime);
}
processInstance.setGlobalParams(ParameterUtils.curingGlobalParams(
processDefinition.getGlobalParamMap(),
processDefinition.getGlobalParamList(),
CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime()));
}
/**
* set sub work process parameters.
* handle sub work process instance, update relation table and command parameters
* set sub work process flag, extends parent work process command parameters
*
* @param subProcessInstance subProcessInstance
* @return process instance
*/
public ProcessInstance setSubProcessParam(ProcessInstance subProcessInstance) {
String cmdParam = subProcessInstance.getCommandParam();
if (StringUtils.isEmpty(cmdParam)) {
return subProcessInstance;
}
Map<String, String> paramMap = JSONUtils.toMap(cmdParam);
// write sub process id into cmd param.
if (paramMap.containsKey(CMD_PARAM_SUB_PROCESS)
&& CMD_PARAM_EMPTY_SUB_PROCESS.equals(paramMap.get(CMD_PARAM_SUB_PROCESS))) {
paramMap.remove(CMD_PARAM_SUB_PROCESS);
paramMap.put(CMD_PARAM_SUB_PROCESS, String.valueOf(subProcessInstance.getId()));
subProcessInstance.setCommandParam(JSONUtils.toJsonString(paramMap));
subProcessInstance.setIsSubProcess(Flag.YES);
this.saveProcessInstance(subProcessInstance);
}
// copy parent instance user def params to sub process..
String parentInstanceId = paramMap.get(CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID);
if (StringUtils.isNotEmpty(parentInstanceId)) {
ProcessInstance parentInstance = findProcessInstanceDetailById(Integer.parseInt(parentInstanceId));
if (parentInstance != null) {
subProcessInstance.setGlobalParams(
joinGlobalParams(parentInstance.getGlobalParams(), subProcessInstance.getGlobalParams()));
this.saveProcessInstance(subProcessInstance);
} else {
logger.error("sub process command params error, cannot find parent instance: {} ", cmdParam);
}
}
ProcessInstanceMap processInstanceMap = JSONUtils.parseObject(cmdParam, ProcessInstanceMap.class);
if (processInstanceMap == null || processInstanceMap.getParentProcessInstanceId() == 0) {
return subProcessInstance;
}
// update sub process id to process map table
processInstanceMap.setProcessInstanceId(subProcessInstance.getId());
this.updateWorkProcessInstanceMap(processInstanceMap);
return subProcessInstance;
}
/**
* join parent global params into sub process.
* only the keys doesn't in sub process global would be joined.
*
* @param parentGlobalParams parentGlobalParams
* @param subGlobalParams subGlobalParams
* @return global params join
*/
private String joinGlobalParams(String parentGlobalParams, String subGlobalParams) {
List<Property> parentPropertyList = JSONUtils.toList(parentGlobalParams, Property.class);
List<Property> subPropertyList = JSONUtils.toList(subGlobalParams, Property.class);
Map<String, String> subMap = subPropertyList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue));
for (Property parent : parentPropertyList) {
if (!subMap.containsKey(parent.getProp())) {
subPropertyList.add(parent);
}
}
return JSONUtils.toJsonString(subPropertyList);
}
/**
* initialize task instance
*
* @param taskInstance taskInstance
*/
private void initTaskInstance(TaskInstance taskInstance) {
if (!taskInstance.isSubProcess()) {
if (taskInstance.getState().typeIsCancel() || taskInstance.getState().typeIsFailure()) {
taskInstance.setFlag(Flag.NO);
updateTaskInstance(taskInstance);
return;
}
}
taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS);
updateTaskInstance(taskInstance);
}
/**
* submit task to db
* submit sub process to command
*
* @param taskInstance taskInstance
* @return task instance
*/
@Transactional(rollbackFor = Exception.class)
public TaskInstance submitTask(TaskInstance taskInstance) {
ProcessInstance processInstance = this.findProcessInstanceDetailById(taskInstance.getProcessInstanceId());
logger.info("start submit task : {}, instance id:{}, state: {}",
taskInstance.getName(), taskInstance.getProcessInstanceId(), processInstance.getState());
//submit to db
TaskInstance task = submitTaskInstanceToDB(taskInstance, processInstance);
if (task == null) {
logger.error("end submit task to db error, task name:{}, process id:{} state: {} ",
taskInstance.getName(), taskInstance.getProcessInstance(), processInstance.getState());
return task;
}
if (!task.getState().typeIsFinished()) {
createSubWorkProcess(processInstance, task);
}
logger.info("end submit task to db successfully:{} state:{} complete, instance id:{} state: {} ",
taskInstance.getName(), task.getState(), processInstance.getId(), processInstance.getState());
return task;
}
/**
* set work process instance map
* consider o
* repeat running does not generate new sub process instance
* set map {parent instance id, task instance id, 0(child instance id)}
*
* @param parentInstance parentInstance
* @param parentTask parentTask
* @return process instance map
*/
private ProcessInstanceMap setProcessInstanceMap(ProcessInstance parentInstance, TaskInstance parentTask) {
ProcessInstanceMap processMap = findWorkProcessMapByParent(parentInstance.getId(), parentTask.getId());
if (processMap != null) {
return processMap;
}
if (parentInstance.getCommandType() == CommandType.REPEAT_RUNNING) {
// update current task id to map
processMap = findPreviousTaskProcessMap(parentInstance, parentTask);
if (processMap != null) {
processMap.setParentTaskInstanceId(parentTask.getId());
updateWorkProcessInstanceMap(processMap);
return processMap;
}
}
// new task
processMap = new ProcessInstanceMap();
processMap.setParentProcessInstanceId(parentInstance.getId());
processMap.setParentTaskInstanceId(parentTask.getId());
createWorkProcessInstanceMap(processMap);
return processMap;
}
/**
* find previous task work process map.
*
* @param parentProcessInstance parentProcessInstance
* @param parentTask parentTask
* @return process instance map
*/
private ProcessInstanceMap findPreviousTaskProcessMap(ProcessInstance parentProcessInstance,
TaskInstance parentTask) {
Integer preTaskId = 0;
List<TaskInstance> preTaskList = this.findPreviousTaskListByWorkProcessId(parentProcessInstance.getId());
for (TaskInstance task : preTaskList) {
if (task.getName().equals(parentTask.getName())) {
preTaskId = task.getId();
ProcessInstanceMap map = findWorkProcessMapByParent(parentProcessInstance.getId(), preTaskId);
if (map != null) {
return map;
}
}
}
logger.info("sub process instance is not found,parent task:{},parent instance:{}",
parentTask.getId(), parentProcessInstance.getId());
return null;
}
/**
* create sub work process command
*
* @param parentProcessInstance parentProcessInstance
* @param task task
*/
public void createSubWorkProcess(ProcessInstance parentProcessInstance, TaskInstance task) {
if (!task.isSubProcess()) {
return;
}
//check create sub work flow firstly
ProcessInstanceMap instanceMap = findWorkProcessMapByParent(parentProcessInstance.getId(), task.getId());
if (null != instanceMap && CommandType.RECOVER_TOLERANCE_FAULT_PROCESS == parentProcessInstance.getCommandType()) {
// recover failover tolerance would not create a new command when the sub command already have been created
return;
}
instanceMap = setProcessInstanceMap(parentProcessInstance, task);
ProcessInstance childInstance = null;
if (instanceMap.getProcessInstanceId() != 0) {
childInstance = findProcessInstanceById(instanceMap.getProcessInstanceId());
}
Command subProcessCommand = createSubProcessCommand(parentProcessInstance, childInstance, instanceMap, task);
updateSubProcessDefinitionByParent(parentProcessInstance, subProcessCommand.getProcessDefinitionId());
initSubInstanceState(childInstance);
createCommand(subProcessCommand);
logger.info("sub process command created: {} ", subProcessCommand);
}
/**
* complement data needs transform parent parameter to child.
*
* @param instanceMap
* @param parentProcessInstance
* @return
*/
private String getSubWorkFlowParam(ProcessInstanceMap instanceMap, ProcessInstance parentProcessInstance) {
// set sub work process command
String processMapStr = JSONUtils.toJsonString(instanceMap);
Map<String, String> cmdParam = JSONUtils.toMap(processMapStr);
if (parentProcessInstance.isComplementData()) {
Map<String, String> parentParam = JSONUtils.toMap(parentProcessInstance.getCommandParam());
String endTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE);
String startTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE);
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, endTime);
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, startTime);
processMapStr = JSONUtils.toJsonString(cmdParam);
}
return processMapStr;
}
/**
* create sub work process command
*
* @param parentProcessInstance
* @param childInstance
* @param instanceMap
* @param task
*/
public Command createSubProcessCommand(ProcessInstance parentProcessInstance,
ProcessInstance childInstance,
ProcessInstanceMap instanceMap,
TaskInstance task) {
CommandType commandType = getSubCommandType(parentProcessInstance, childInstance);
TaskNode taskNode = JSONUtils.parseObject(task.getTaskJson(), TaskNode.class);
Map<String, String> subProcessParam = JSONUtils.toMap(taskNode.getParams());
Integer childDefineId = Integer.parseInt(subProcessParam.get(Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID));
String processParam = getSubWorkFlowParam(instanceMap, parentProcessInstance);
return new Command(
commandType,
TaskDependType.TASK_POST,
parentProcessInstance.getFailureStrategy(),
parentProcessInstance.getExecutorId(),
childDefineId,
processParam,
parentProcessInstance.getWarningType(),
parentProcessInstance.getWarningGroupId(),
parentProcessInstance.getScheduleTime(),
task.getWorkerGroup(),
parentProcessInstance.getProcessInstancePriority()
);
}
/**
* initialize sub work flow state
* child instance state would be initialized when 'recovery from pause/stop/failure'
*
* @param childInstance
*/
private void initSubInstanceState(ProcessInstance childInstance) {
if (childInstance != null) {
childInstance.setState(ExecutionStatus.RUNNING_EXECUTION);
updateProcessInstance(childInstance);
}
}
/**
* get sub work flow command type
* child instance exist: child command = fatherCommand
* child instance not exists: child command = fatherCommand[0]
*
* @param parentProcessInstance
* @return
*/
private CommandType getSubCommandType(ProcessInstance parentProcessInstance, ProcessInstance childInstance) {
CommandType commandType = parentProcessInstance.getCommandType();
if (childInstance == null) {
String fatherHistoryCommand = parentProcessInstance.getHistoryCmd();
commandType = CommandType.valueOf(fatherHistoryCommand.split(Constants.COMMA)[0]);
}
return commandType;
}
/**
* update sub process definition
*
* @param parentProcessInstance parentProcessInstance
* @param childDefinitionId childDefinitionId
*/
private void updateSubProcessDefinitionByParent(ProcessInstance parentProcessInstance, int childDefinitionId) {
ProcessDefinition fatherDefinition = this.findProcessDefineById(parentProcessInstance.getProcessDefinitionId());
ProcessDefinition childDefinition = this.findProcessDefineById(childDefinitionId);
if (childDefinition != null && fatherDefinition != null) {
childDefinition.setReceivers(fatherDefinition.getReceivers());
childDefinition.setReceiversCc(fatherDefinition.getReceiversCc());
processDefineMapper.updateById(childDefinition);
}
}
/**
* submit task to mysql
*
* @param taskInstance taskInstance
* @param processInstance processInstance
* @return task instance
*/
public TaskInstance submitTaskInstanceToDB(TaskInstance taskInstance, ProcessInstance processInstance) {
ExecutionStatus processInstanceState = processInstance.getState();
if (taskInstance.getState().typeIsFailure()) {
if (taskInstance.isSubProcess()) {
taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1);
} else {
if (processInstanceState != ExecutionStatus.READY_STOP
&& processInstanceState != ExecutionStatus.READY_PAUSE) {
// failure task set invalid
taskInstance.setFlag(Flag.NO);
updateTaskInstance(taskInstance);
// crate new task instance
if (taskInstance.getState() != ExecutionStatus.NEED_FAULT_TOLERANCE) {
taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1);
}
taskInstance.setSubmitTime(null);
taskInstance.setStartTime(null);
taskInstance.setEndTime(null);
taskInstance.setFlag(Flag.YES);
taskInstance.setHost(null);
taskInstance.setId(0);
}
}
}
taskInstance.setExecutorId(processInstance.getExecutorId());
taskInstance.setProcessInstancePriority(processInstance.getProcessInstancePriority());
taskInstance.setState(getSubmitTaskState(taskInstance, processInstanceState));
if (taskInstance.getSubmitTime() == null) {
taskInstance.setSubmitTime(new Date());
}
if (taskInstance.getFirstSubmitTime() == null) {
taskInstance.setFirstSubmitTime(taskInstance.getSubmitTime());
}
boolean saveResult = saveTaskInstance(taskInstance);
if (!saveResult) {
return null;
}
return taskInstance;
}
/**
* ${processInstancePriority}_${processInstanceId}_${taskInstancePriority}_${taskInstanceId}_${task executed by ip1},${ip2}...
* The tasks with the highest priority are selected by comparing the priorities of the above four levels from high to low.
*
* @param taskInstance taskInstance
* @return task zk queue str
*/
public String taskZkInfo(TaskInstance taskInstance) {
String taskWorkerGroup = getTaskWorkerGroup(taskInstance);
ProcessInstance processInstance = this.findProcessInstanceById(taskInstance.getProcessInstanceId());
if (processInstance == null) {
logger.error("process instance is null. please check the task info, task id: " + taskInstance.getId());
return "";
}
StringBuilder sb = new StringBuilder(100);
sb.append(processInstance.getProcessInstancePriority().ordinal()).append(Constants.UNDERLINE)
.append(taskInstance.getProcessInstanceId()).append(Constants.UNDERLINE)
.append(taskInstance.getTaskInstancePriority().ordinal()).append(Constants.UNDERLINE)
.append(taskInstance.getId()).append(Constants.UNDERLINE)
.append(taskInstance.getWorkerGroup());
return sb.toString();
}
/**
* get submit task instance state by the work process state
* cannot modify the task state when running/kill/submit success, or this
* task instance is already exists in task queue .
* return pause if work process state is ready pause
* return stop if work process state is ready stop
* if all of above are not satisfied, return submit success
*
* @param taskInstance taskInstance
* @param processInstanceState processInstanceState
* @return process instance state
*/
public ExecutionStatus getSubmitTaskState(TaskInstance taskInstance, ExecutionStatus processInstanceState) {
ExecutionStatus state = taskInstance.getState();
if (
// running, delayed or killed
// the task already exists in task queue
// return state
state == ExecutionStatus.RUNNING_EXECUTION
|| state == ExecutionStatus.DELAY_EXECUTION
|| state == ExecutionStatus.KILL
|| checkTaskExistsInTaskQueue(taskInstance)
) {
return state;
}
//return pasue /stop if process instance state is ready pause / stop
// or return submit success
if (processInstanceState == ExecutionStatus.READY_PAUSE) {
state = ExecutionStatus.PAUSE;
} else if (processInstanceState == ExecutionStatus.READY_STOP
|| !checkProcessStrategy(taskInstance)) {
state = ExecutionStatus.KILL;
} else {
state = ExecutionStatus.SUBMITTED_SUCCESS;
}
return state;
}
/**
* check process instance strategy
*
* @param taskInstance taskInstance
* @return check strategy result
*/
private boolean checkProcessStrategy(TaskInstance taskInstance) {
ProcessInstance processInstance = this.findProcessInstanceById(taskInstance.getProcessInstanceId());
FailureStrategy failureStrategy = processInstance.getFailureStrategy();
if (failureStrategy == FailureStrategy.CONTINUE) {
return true;
}
List<TaskInstance> taskInstances = this.findValidTaskListByProcessId(taskInstance.getProcessInstanceId());
for (TaskInstance task : taskInstances) {
if (task.getState() == ExecutionStatus.FAILURE) {
return false;
}
}
return true;
}
/**
* check the task instance existing in queue
*
* @param taskInstance taskInstance
* @return whether taskinstance exists queue
*/
public boolean checkTaskExistsInTaskQueue(TaskInstance taskInstance) {
if (taskInstance.isSubProcess()) {
return false;
}
String taskZkInfo = taskZkInfo(taskInstance);
return false;
}
/**
* create a new process instance
*
* @param processInstance processInstance
*/
public void createProcessInstance(ProcessInstance processInstance) {
if (processInstance != null) {
processInstanceMapper.insert(processInstance);
}
}
/**
* insert or update work process instance to data base
*
* @param processInstance processInstance
*/
public void saveProcessInstance(ProcessInstance processInstance) {
if (processInstance == null) {
logger.error("save error, process instance is null!");
return;
}
if (processInstance.getId() != 0) {
processInstanceMapper.updateById(processInstance);
} else {
createProcessInstance(processInstance);
}
}
/**
* insert or update command
*
* @param command command
* @return save command result
*/
public int saveCommand(Command command) {
if (command.getId() != 0) {
return commandMapper.updateById(command);
} else {
return commandMapper.insert(command);
}
}
/**
* insert or update task instance
*
* @param taskInstance taskInstance
* @return save task instance result
*/
public boolean saveTaskInstance(TaskInstance taskInstance) {
if (taskInstance.getId() != 0) {
return updateTaskInstance(taskInstance);
} else {
return createTaskInstance(taskInstance);
}
}
/**
* insert task instance
*
* @param taskInstance taskInstance
* @return create task instance result
*/
public boolean createTaskInstance(TaskInstance taskInstance) {
int count = taskInstanceMapper.insert(taskInstance);
return count > 0;
}
/**
* update task instance
*
* @param taskInstance taskInstance
* @return update task instance result
*/
public boolean updateTaskInstance(TaskInstance taskInstance) {
int count = taskInstanceMapper.updateById(taskInstance);
return count > 0;
}
/**
* delete a command by id
*
* @param id id
*/
public void delCommandById(int id) {
commandMapper.deleteById(id);
}
/**
* find task instance by id
*
* @param taskId task id
* @return task intance
*/
public TaskInstance findTaskInstanceById(Integer taskId) {
return taskInstanceMapper.selectById(taskId);
}
/**
* package task instance,associate processInstance and processDefine
*
* @param taskInstId taskInstId
* @return task instance
*/
public TaskInstance getTaskInstanceDetailByTaskId(int taskInstId) {
// get task instance
TaskInstance taskInstance = findTaskInstanceById(taskInstId);
if (taskInstance == null) {
return taskInstance;
}
// get process instance
ProcessInstance processInstance = findProcessInstanceDetailById(taskInstance.getProcessInstanceId());
// get process define
ProcessDefinition processDefine = findProcessDefineById(taskInstance.getProcessDefinitionId());
taskInstance.setProcessInstance(processInstance);
taskInstance.setProcessDefine(processDefine);
return taskInstance;
}
/**
* get id list by task state
*
* @param instanceId instanceId
* @param state state
* @return task instance states
*/
public List<Integer> findTaskIdByInstanceState(int instanceId, ExecutionStatus state) {
return taskInstanceMapper.queryTaskByProcessIdAndState(instanceId, state.ordinal());
}
/**
* find valid task list by process definition id
*
* @param processInstanceId processInstanceId
* @return task instance list
*/
public List<TaskInstance> findValidTaskListByProcessId(Integer processInstanceId) {
return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.YES);
}
/**
* find previous task list by work process id
*
* @param processInstanceId processInstanceId
* @return task instance list
*/
public List<TaskInstance> findPreviousTaskListByWorkProcessId(Integer processInstanceId) {
return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.NO);
}
/**
* update work process instance map
*
* @param processInstanceMap processInstanceMap
* @return update process instance result
*/
public int updateWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap) {
return processInstanceMapMapper.updateById(processInstanceMap);
}
/**
* create work process instance map
*
* @param processInstanceMap processInstanceMap
* @return create process instance result
*/
public int createWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap) {
int count = 0;
if (processInstanceMap != null) {
return processInstanceMapMapper.insert(processInstanceMap);
}
return count;
}
/**
* find work process map by parent process id and parent task id.
*
* @param parentWorkProcessId parentWorkProcessId
* @param parentTaskId parentTaskId
* @return process instance map
*/
public ProcessInstanceMap findWorkProcessMapByParent(Integer parentWorkProcessId, Integer parentTaskId) {
return processInstanceMapMapper.queryByParentId(parentWorkProcessId, parentTaskId);
}
/**
* delete work process map by parent process id
*
* @param parentWorkProcessId parentWorkProcessId
* @return delete process map result
*/
public int deleteWorkProcessMapByParentId(int parentWorkProcessId) {
return processInstanceMapMapper.deleteByParentProcessId(parentWorkProcessId);
}
/**
* find sub process instance
*
* @param parentProcessId parentProcessId
* @param parentTaskId parentTaskId
* @return process instance
*/
public ProcessInstance findSubProcessInstance(Integer parentProcessId, Integer parentTaskId) {
ProcessInstance processInstance = null;
ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryByParentId(parentProcessId, parentTaskId);
if (processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0) {
return processInstance;
}
processInstance = findProcessInstanceById(processInstanceMap.getProcessInstanceId());
return processInstance;
}
/**
* find parent process instance
*
* @param subProcessId subProcessId
* @return process instance
*/
public ProcessInstance findParentProcessInstance(Integer subProcessId) {
ProcessInstance processInstance = null;
ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryBySubProcessId(subProcessId);
if (processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0) {
return processInstance;
}
processInstance = findProcessInstanceById(processInstanceMap.getParentProcessInstanceId());
return processInstance;
}
/**
* change task state
*
* @param state state
* @param startTime startTime
* @param host host
* @param executePath executePath
* @param logPath logPath
* @param taskInstId taskInstId
*/
public void changeTaskState(TaskInstance taskInstance, ExecutionStatus state, Date startTime, String host,
String executePath,
String logPath,
int taskInstId) {
taskInstance.setState(state);
taskInstance.setStartTime(startTime);
taskInstance.setHost(host);
taskInstance.setExecutePath(executePath);
taskInstance.setLogPath(logPath);
saveTaskInstance(taskInstance);
}
/**
* update process instance
*
* @param processInstance processInstance
* @return update process instance result
*/
public int updateProcessInstance(ProcessInstance processInstance) {
return processInstanceMapper.updateById(processInstance);
}
/**
* update the process instance
*
* @param processInstanceId processInstanceId
* @param processJson processJson
* @param globalParams globalParams
* @param scheduleTime scheduleTime
* @param flag flag
* @param locations locations
* @param connects connects
* @return update process instance result
*/
public int updateProcessInstance(Integer processInstanceId, String processJson,
String globalParams, Date scheduleTime, Flag flag,
String locations, String connects) {
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if (processInstance != null) {
processInstance.setProcessInstanceJson(processJson);
processInstance.setGlobalParams(globalParams);
processInstance.setScheduleTime(scheduleTime);
processInstance.setLocations(locations);
processInstance.setConnects(connects);
return processInstanceMapper.updateById(processInstance);
}
return 0;
}
/**
* change task state
*
* @param state state
* @param endTime endTime
* @param taskInstId taskInstId
* @param varPool varPool
*/
public void changeTaskState(TaskInstance taskInstance, ExecutionStatus state,
Date endTime,
int processId,
String appIds,
int taskInstId,
String varPool) {
taskInstance.setPid(processId);
taskInstance.setAppLink(appIds);
taskInstance.setState(state);
taskInstance.setEndTime(endTime);
taskInstance.setVarPool(varPool);
saveTaskInstance(taskInstance);
}
/**
* convert integer list to string list
*
* @param intList intList
* @return string list
*/
public List<String> convertIntListToString(List<Integer> intList) {
if (intList == null) {
return new ArrayList<>();
}
List<String> result = new ArrayList<String>(intList.size());
for (Integer intVar : intList) {
result.add(String.valueOf(intVar));
}
return result;
}
/**
* query schedule by id
*
* @param id id
* @return schedule
*/
public Schedule querySchedule(int id) {
return scheduleMapper.selectById(id);
}
/**
* query Schedule by processDefinitionId
*
* @param processDefinitionId processDefinitionId
* @see Schedule
*/
public List<Schedule> queryReleaseSchedulerListByProcessDefinitionId(int processDefinitionId) {
return scheduleMapper.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId);
}
/**
* query need failover process instance
*
* @param host host
* @return process instance list
*/
public List<ProcessInstance> queryNeedFailoverProcessInstances(String host) {
return processInstanceMapper.queryByHostAndStatus(host, stateArray);
}
/**
* process need failover process instance
*
* @param processInstance processInstance
*/
@Transactional(rollbackFor = RuntimeException.class)
public void processNeedFailoverProcessInstances(ProcessInstance processInstance) {
//1 update processInstance host is null
processInstance.setHost(Constants.NULL);
processInstanceMapper.updateById(processInstance);
//2 insert into recover command
Command cmd = new Command();
cmd.setProcessDefinitionId(processInstance.getProcessDefinitionId());
cmd.setCommandParam(String.format("{\"%s\":%d}", Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING, processInstance.getId()));
cmd.setExecutorId(processInstance.getExecutorId());
cmd.setCommandType(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS);
createCommand(cmd);
}
/**
* query all need failover task instances by host
*
* @param host host
* @return task instance list
*/
public List<TaskInstance> queryNeedFailoverTaskInstances(String host) {
return taskInstanceMapper.queryByHostAndStatus(host,
stateArray);
}
/**
* find data source by id
*
* @param id id
* @return datasource
*/
public DataSource findDataSourceById(int id) {
return dataSourceMapper.selectById(id);
}
/**
* update process instance state by id
*
* @param processInstanceId processInstanceId
* @param executionStatus executionStatus
* @return update process result
*/
public int updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) {
ProcessInstance instance = processInstanceMapper.selectById(processInstanceId);
instance.setState(executionStatus);
return processInstanceMapper.updateById(instance);
}
/**
* find process instance by the task id
*
* @param taskId taskId
* @return process instance
*/
public ProcessInstance findProcessInstanceByTaskId(int taskId) {
TaskInstance taskInstance = taskInstanceMapper.selectById(taskId);
if (taskInstance != null) {
return processInstanceMapper.selectById(taskInstance.getProcessInstanceId());
}
return null;
}
/**
* find udf function list by id list string
*
* @param ids ids
* @return udf function list
*/
public List<UdfFunc> queryUdfFunListByIds(int[] ids) {
return udfFuncMapper.queryUdfByIdStr(ids, null);
}
/**
* find tenant code by resource name
*
* @param resName resource name
* @param resourceType resource type
* @return tenant code
*/
public String queryTenantCodeByResName(String resName, ResourceType resourceType) {
// in order to query tenant code successful although the version is older
String fullName = resName.startsWith("/") ? resName : String.format("/%s", resName);
return resourceMapper.queryTenantCodeByResourceName(fullName, resourceType.ordinal());
}
/**
* find schedule list by process define id.
*
* @param ids ids
* @return schedule list
*/
public List<Schedule> selectAllByProcessDefineId(int[] ids) {
return scheduleMapper.selectAllByProcessDefineArray(
ids);
}
/**
* get dependency cycle by work process define id and scheduler fire time
*
* @param masterId masterId
* @param processDefinitionId processDefinitionId
* @param scheduledFireTime the time the task schedule is expected to trigger
* @return CycleDependency
* @throws Exception if error throws Exception
*/
public CycleDependency getCycleDependency(int masterId, int processDefinitionId, Date scheduledFireTime) throws Exception {
List<CycleDependency> list = getCycleDependencies(masterId, new int[] {processDefinitionId}, scheduledFireTime);
return list.size() > 0 ? list.get(0) : null;
}
/**
* get dependency cycle list by work process define id list and scheduler fire time
*
* @param masterId masterId
* @param ids ids
* @param scheduledFireTime the time the task schedule is expected to trigger
* @return CycleDependency list
* @throws Exception if error throws Exception
*/
public List<CycleDependency> getCycleDependencies(int masterId, int[] ids, Date scheduledFireTime) throws Exception {
List<CycleDependency> cycleDependencyList = new ArrayList<CycleDependency>();
if (null == ids || ids.length == 0) {
logger.warn("ids[] is empty!is invalid!");
return cycleDependencyList;
}
if (scheduledFireTime == null) {
logger.warn("scheduledFireTime is null!is invalid!");
return cycleDependencyList;
}
String strCrontab = "";
CronExpression depCronExpression;
Cron depCron;
List<Date> list;
List<Schedule> schedules = this.selectAllByProcessDefineId(ids);
// for all scheduling information
for (Schedule depSchedule : schedules) {
strCrontab = depSchedule.getCrontab();
depCronExpression = CronUtils.parse2CronExpression(strCrontab);
depCron = CronUtils.parse2Cron(strCrontab);
CycleEnum cycleEnum = CronUtils.getMiniCycle(depCron);
if (cycleEnum == null) {
logger.error("{} is not valid", strCrontab);
continue;
}
Calendar calendar = Calendar.getInstance();
switch (cycleEnum) {
/*case MINUTE:
calendar.add(Calendar.MINUTE,-61);*/
case HOUR:
calendar.add(Calendar.HOUR, -25);
break;
case DAY:
calendar.add(Calendar.DATE, -32);
break;
case WEEK:
calendar.add(Calendar.DATE, -32);
break;
case MONTH:
calendar.add(Calendar.MONTH, -13);
break;
default:
logger.warn("Dependent process definition's cycleEnum is {},not support!!", cycleEnum.name());
continue;
}
Date start = calendar.getTime();
if (depSchedule.getProcessDefinitionId() == masterId) {
list = CronUtils.getSelfFireDateList(start, scheduledFireTime, depCronExpression);
} else {
list = CronUtils.getFireDateList(start, scheduledFireTime, depCronExpression);
}
if (list.size() >= 1) {
start = list.get(list.size() - 1);
CycleDependency dependency = new CycleDependency(depSchedule.getProcessDefinitionId(), start, CronUtils.getExpirationTime(start, cycleEnum), cycleEnum);
cycleDependencyList.add(dependency);
}
}
return cycleDependencyList;
}
/**
* find last scheduler process instance in the date interval
*
* @param definitionId definitionId
* @param dateInterval dateInterval
* @return process instance
*/
public ProcessInstance findLastSchedulerProcessInterval(int definitionId, DateInterval dateInterval) {
return processInstanceMapper.queryLastSchedulerProcess(definitionId,
dateInterval.getStartTime(),
dateInterval.getEndTime());
}
/**
* find last manual process instance interval
*
* @param definitionId process definition id
* @param dateInterval dateInterval
* @return process instance
*/
public ProcessInstance findLastManualProcessInterval(int definitionId, DateInterval dateInterval) {
return processInstanceMapper.queryLastManualProcess(definitionId,
dateInterval.getStartTime(),
dateInterval.getEndTime());
}
/**
* find last running process instance
*
* @param definitionId process definition id
* @param startTime start time
* @param endTime end time
* @return process instance
*/
public ProcessInstance findLastRunningProcess(int definitionId, Date startTime, Date endTime) {
return processInstanceMapper.queryLastRunningProcess(definitionId,
startTime,
endTime,
stateArray);
}
/**
* query user queue by process instance id
*
* @param processInstanceId processInstanceId
* @return queue
*/
public String queryUserQueueByProcessInstanceId(int processInstanceId) {
String queue = "";
ProcessInstance processInstance = processInstanceMapper.selectById(processInstanceId);
if (processInstance == null) {
return queue;
}
User executor = userMapper.selectById(processInstance.getExecutorId());
if (executor != null) {
queue = executor.getQueue();
}
return queue;
}
/**
* get task worker group
*
* @param taskInstance taskInstance
* @return workerGroupId
*/
public String getTaskWorkerGroup(TaskInstance taskInstance) {
String workerGroup = taskInstance.getWorkerGroup();
if (StringUtils.isNotBlank(workerGroup)) {
return workerGroup;
}
int processInstanceId = taskInstance.getProcessInstanceId();
ProcessInstance processInstance = findProcessInstanceById(processInstanceId);
if (processInstance != null) {
return processInstance.getWorkerGroup();
}
logger.info("task : {} will use default worker group", taskInstance.getId());
return Constants.DEFAULT_WORKER_GROUP;
}
/**
* get have perm project list
*
* @param userId userId
* @return project list
*/
public List<Project> getProjectListHavePerm(int userId) {
List<Project> createProjects = projectMapper.queryProjectCreatedByUser(userId);
List<Project> authedProjects = projectMapper.queryAuthedProjectListByUserId(userId);
if (createProjects == null) {
createProjects = new ArrayList<>();
}
if (authedProjects != null) {
createProjects.addAll(authedProjects);
}
return createProjects;
}
/**
* get have perm project ids
*
* @param userId userId
* @return project ids
*/
public List<Integer> getProjectIdListHavePerm(int userId) {
List<Integer> projectIdList = new ArrayList<>();
for (Project project : getProjectListHavePerm(userId)) {
projectIdList.add(project.getId());
}
return projectIdList;
}
/**
* list unauthorized udf function
*
* @param userId user id
* @param needChecks data source id array
* @return unauthorized udf function list
*/
public <T> List<T> listUnauthorized(int userId, T[] needChecks, AuthorizationType authorizationType) {
List<T> resultList = new ArrayList<T>();
if (Objects.nonNull(needChecks) && needChecks.length > 0) {
Set<T> originResSet = new HashSet<T>(Arrays.asList(needChecks));
switch (authorizationType) {
case RESOURCE_FILE_ID:
Set<Integer> authorizedResourceFiles = resourceMapper.listAuthorizedResourceById(userId, needChecks).stream().map(t -> t.getId()).collect(toSet());
originResSet.removeAll(authorizedResourceFiles);
break;
case RESOURCE_FILE_NAME:
Set<String> authorizedResources = resourceMapper.listAuthorizedResource(userId, needChecks).stream().map(t -> t.getFullName()).collect(toSet());
originResSet.removeAll(authorizedResources);
break;
case UDF_FILE:
Set<Integer> authorizedUdfFiles = resourceMapper.listAuthorizedResourceById(userId, needChecks).stream().map(t -> t.getId()).collect(toSet());
originResSet.removeAll(authorizedUdfFiles);
break;
case DATASOURCE:
Set<Integer> authorizedDatasources = dataSourceMapper.listAuthorizedDataSource(userId, needChecks).stream().map(t -> t.getId()).collect(toSet());
originResSet.removeAll(authorizedDatasources);
break;
case UDF:
Set<Integer> authorizedUdfs = udfFuncMapper.listAuthorizedUdfFunc(userId, needChecks).stream().map(t -> t.getId()).collect(toSet());
originResSet.removeAll(authorizedUdfs);
break;
default:
break;
}
resultList.addAll(originResSet);
}
return resultList;
}
/**
* get user by user id
*
* @param userId user id
* @return User
*/
public User getUserById(int userId) {
return userMapper.selectById(userId);
}
/**
* get resource by resoruce id
*
* @param resoruceId resource id
* @return Resource
*/
public Resource getResourceById(int resoruceId) {
return resourceMapper.selectById(resoruceId);
}
/**
* list resources by ids
*
* @param resIds resIds
* @return resource list
*/
public List<Resource> listResourceByIds(Integer[] resIds) {
return resourceMapper.listResourceByIds(resIds);
}
/**
* format task app id in task instance
*
* @param taskInstance
* @return
*/
public String formatTaskAppId(TaskInstance taskInstance) {
ProcessDefinition definition = this.findProcessDefineById(taskInstance.getProcessDefinitionId());
ProcessInstance processInstanceById = this.findProcessInstanceById(taskInstance.getProcessInstanceId());
if (definition == null || processInstanceById == null) {
return "";
}
return String.format("%s_%s_%s",
definition.getId(),
processInstanceById.getId(),
taskInstance.getId());
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 1,682 | [suggest] The method getcurtaskparamsclass() of class abstracttask is the same as the function of class taskparametersutils Functions can be replaced | *For better global communication, please give priority to using English description, thx! *
**Describe the bug**
The method getCurTaskParamsClass() of class AbstractTask is the same as the function of class TaskParametersUtils Functions can be replaced
**To Reproduce**
**Expected behavior**
- Delete the method getcurtaskparamsclass() of class abstracttask
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Which version of Dolphin Scheduler:**
-[1.2.1-dev]
**Additional context**
**Requirement or improvement
Getcurtaskparamsclass() function of class abstracttask is replaced by taskparametersutils
| https://github.com/apache/dolphinscheduler/issues/1682 | https://github.com/apache/dolphinscheduler/pull/4262 | 3bcd3fbcd344983ed35f997bed1fea2c0cf6055d | 24864512212a94f53da32eb20dc56673f960547a | "2020-01-02T08:42:35Z" | java | "2020-12-20T13:52:52Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractTask.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.task;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.TaskRecordStatus;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters;
import org.apache.dolphinscheduler.common.task.datax.DataxParameters;
import org.apache.dolphinscheduler.common.task.flink.FlinkParameters;
import org.apache.dolphinscheduler.common.task.mr.MapreduceParameters;
import org.apache.dolphinscheduler.common.task.procedure.ProcedureParameters;
import org.apache.dolphinscheduler.common.task.python.PythonParameters;
import org.apache.dolphinscheduler.common.task.shell.ShellParameters;
import org.apache.dolphinscheduler.common.task.spark.SparkParameters;
import org.apache.dolphinscheduler.common.task.sql.SqlParameters;
import org.apache.dolphinscheduler.common.task.sqoop.SqoopParameters;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.TaskRecordDao;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.utils.ParamUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import java.util.List;
import java.util.Map;
import static ch.qos.logback.classic.ClassicConstants.FINALIZE_SESSION_MARKER;
/**
* executive task
*/
public abstract class AbstractTask {
/**
* varPool string
*/
protected String varPool;
/**
* taskExecutionContext
**/
TaskExecutionContext taskExecutionContext;
/**
* log record
*/
protected Logger logger;
/**
* SHELL process pid
*/
protected int processId;
/**
* other resource manager appId , for example : YARN etc
*/
protected String appIds;
/**
* cancel
*/
protected volatile boolean cancel = false;
/**
* exit code
*/
protected volatile int exitStatusCode = -1;
/**
* constructor
* @param taskExecutionContext taskExecutionContext
* @param logger logger
*/
protected AbstractTask(TaskExecutionContext taskExecutionContext, Logger logger) {
this.taskExecutionContext = taskExecutionContext;
this.logger = logger;
}
/**
* init task
* @throws Exception exception
*/
public void init() throws Exception {
}
/**
* task handle
* @throws Exception exception
*/
public abstract void handle() throws Exception;
/**
* cancel application
* @param status status
* @throws Exception exception
*/
public void cancelApplication(boolean status) throws Exception {
this.cancel = status;
}
/**
* log handle
* @param logs log list
*/
public void logHandle(List<String> logs) {
// note that the "new line" is added here to facilitate log parsing
if (logs.contains(FINALIZE_SESSION_MARKER.toString())) {
logger.info(FINALIZE_SESSION_MARKER, FINALIZE_SESSION_MARKER.toString());
} else {
logger.info(" -> {}", String.join("\n\t", logs));
}
}
public void setVarPool(String varPool) {
this.varPool = varPool;
}
public String getVarPool() {
return varPool;
}
/**
* get exit status code
* @return exit status code
*/
public int getExitStatusCode() {
return exitStatusCode;
}
public void setExitStatusCode(int exitStatusCode) {
this.exitStatusCode = exitStatusCode;
}
public String getAppIds() {
return appIds;
}
public void setAppIds(String appIds) {
this.appIds = appIds;
}
public int getProcessId() {
return processId;
}
public void setProcessId(int processId) {
this.processId = processId;
}
/**
* get task parameters
* @return AbstractParameters
*/
public abstract AbstractParameters getParameters();
/**
* result processing
*/
public void after(){
if (getExitStatusCode() == Constants.EXIT_CODE_SUCCESS){
// task recor flat : if true , start up qianfan
if (TaskRecordDao.getTaskRecordFlag()
&& TaskType.typeIsNormalTask(taskExecutionContext.getTaskType())){
AbstractParameters params = (AbstractParameters) JSONUtils.parseObject(taskExecutionContext.getTaskParams(), getCurTaskParamsClass());
// replace placeholder
Map<String, Property> paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()),
taskExecutionContext.getDefinedParams(),
params.getLocalParametersMap(),
CommandType.of(taskExecutionContext.getCmdTypeIfComplement()),
taskExecutionContext.getScheduleTime());
if (paramsMap != null && !paramsMap.isEmpty()
&& paramsMap.containsKey("v_proc_date")){
String vProcDate = paramsMap.get("v_proc_date").getValue();
if (!StringUtils.isEmpty(vProcDate)){
TaskRecordStatus taskRecordState = TaskRecordDao.getTaskRecordState(taskExecutionContext.getTaskName(), vProcDate);
logger.info("task record status : {}",taskRecordState);
if (taskRecordState == TaskRecordStatus.FAILURE){
setExitStatusCode(Constants.EXIT_CODE_FAILURE);
}
}
}
}
}else if (getExitStatusCode() == Constants.EXIT_CODE_KILL){
setExitStatusCode(Constants.EXIT_CODE_KILL);
}else {
setExitStatusCode(Constants.EXIT_CODE_FAILURE);
}
}
/**
* get current task parameter class
* @return Task Params Class
*/
private Class getCurTaskParamsClass(){
Class paramsClass = null;
// get task type
TaskType taskType = TaskType.valueOf(taskExecutionContext.getTaskType());
switch (taskType){
case SHELL:
paramsClass = ShellParameters.class;
break;
case SQL:
paramsClass = SqlParameters.class;
break;
case PROCEDURE:
paramsClass = ProcedureParameters.class;
break;
case MR:
paramsClass = MapreduceParameters.class;
break;
case SPARK:
paramsClass = SparkParameters.class;
break;
case FLINK:
paramsClass = FlinkParameters.class;
break;
case PYTHON:
paramsClass = PythonParameters.class;
break;
case DATAX:
paramsClass = DataxParameters.class;
break;
case SQOOP:
paramsClass = SqoopParameters.class;
break;
case CONDITIONS:
paramsClass = ConditionsParameters.class;
break;
default:
logger.error("not support this task type: {}", taskType);
throw new IllegalArgumentException("not support this task type");
}
return paramsClass;
}
/**
* get exit status according to exitCode
* @return exit status
*/
public ExecutionStatus getExitStatus(){
ExecutionStatus status;
switch (getExitStatusCode()){
case Constants.EXIT_CODE_SUCCESS:
status = ExecutionStatus.SUCCESS;
break;
case Constants.EXIT_CODE_KILL:
status = ExecutionStatus.KILL;
break;
default:
status = ExecutionStatus.FAILURE;
break;
}
return status;
}
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,878 | [Feature][ui] DS front-end new feature development plan | DS front-end new feature development plan
1. Ans-ui is replaced with element-ui
2. Alarm group plug-in (issues #3049)
3. Front-end dependent version upgrade
The current renovation plan is carried out on the alert_plugin_design branch. If you have a small partner who is interested in this piece, you can join in. | https://github.com/apache/dolphinscheduler/issues/3878 | https://github.com/apache/dolphinscheduler/pull/4280 | ef7d7b791ee48d7cac40c84e63895417a9a6c8e8 | 3a3df01abed81e23972eedc8e0d30ae2e9dfb6ff | "2020-10-10T01:55:09Z" | java | "2020-12-22T01:03:54Z" | dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/function/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%">
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column :label="$t('UDF Function Name')">
<template slot-scope="scope">
<el-popover trigger="hover" placement="top">
<p>{{ scope.row.funcName }}</p>
<div slot="reference" class="name-wrapper">
<a href="javascript:" class="links">{{ scope.row.funcName }}</a>
</div>
</el-popover>
</template>
</el-table-column>
<el-table-column prop="className" :label="$t('Class Name')"></el-table-column>
<el-table-column prop="type" :label="$t('type')"></el-table-column>
<el-table-column prop="description" :label="$t('Description')" width="200"></el-table-column>
<el-table-column prop="resourceName" :label="$t('Jar Package')"></el-table-column>
<el-table-column :label="$t('Update Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="150">
<template slot-scope="scope">
<el-tooltip :content="$t('Rename')" placement="top" :enterable="false">
<span><el-button type="primary" size="mini" icon="el-icon-edit-outline" @click="_edit(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('delete')" placement="top" :enterable="false">
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Delete?')"
@onConfirm="_delete(scope.row,scope.row.id)"
>
<el-button type="danger" size="mini" icon="el-icon-delete" circle slot="reference"></el-button>
</el-popconfirm>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
<el-dialog
:visible.sync="createUdfDialog"
width="60%">
<m-create-udf :item="item" @onUpdate="onUpdate" @close="close"></m-create-udf>
</el-dialog>
</div>
</template>
<script>
import { mapActions } from 'vuex'
import mCreateUdf from './createUdf'
export default {
name: 'udf-manage-list',
data () {
return {
list: [],
spinnerLoading: false,
createUdfDialog: false,
item: {}
}
},
props: {
udfFuncList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('resource', ['deleteUdf']),
_delete (item, i) {
this.spinnerLoading = true
this.deleteUdf({
id: item.id
}).then(res => {
this.$emit('on-update')
this.$message.success(res.msg)
this.spinnerLoading = false
}).catch(e => {
this.$message.error(e.msg || '')
this.spinnerLoading = false
})
},
_edit (item) {
this.item = item
this.createUdfDialog = true
},
onUpdate () {
this.$emit('on-update')
this.createUdfDialog = false
},
close () {
this.createUdfDialog = false
}
},
watch: {
udfFuncList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
},
mounted () {
this.list = this.udfFuncList
},
components: { mCreateUdf }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,878 | [Feature][ui] DS front-end new feature development plan | DS front-end new feature development plan
1. Ans-ui is replaced with element-ui
2. Alarm group plug-in (issues #3049)
3. Front-end dependent version upgrade
The current renovation plan is carried out on the alert_plugin_design branch. If you have a small partner who is interested in this piece, you can join in. | https://github.com/apache/dolphinscheduler/issues/3878 | https://github.com/apache/dolphinscheduler/pull/4280 | ef7d7b791ee48d7cac40c84e63895417a9a6c8e8 | 3a3df01abed81e23972eedc8e0d30ae2e9dfb6ff | "2020-10-10T01:55:09Z" | java | "2020-12-22T01:03:54Z" | dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/resource/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%">
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column :label="$t('UDF Resource Name')">
<template slot-scope="scope">
<el-popover trigger="hover" placement="top">
<p>{{ scope.row.alias }}</p>
<div slot="reference" class="name-wrapper">
<a href="javascript:" class="links" @click="_go(scope.row)">{{ scope.row.alias }}</a>
</div>
</el-popover>
</template>
</el-table-column>
<el-table-column :label="$t('Whether directory')" width="100">
<template slot-scope="scope">
{{scope.row.directory? $t('Yes') : $t('No')}}
</template>
</el-table-column>
<el-table-column prop="fileName" :label="$t('File Name')"></el-table-column>
<el-table-column :label="$t('File Size')">
<template slot-scope="scope">
{{_rtSize(scope.row.size)}}
</template>
</el-table-column>
<el-table-column prop="description" :label="$t('Description')" width="200"></el-table-column>
<el-table-column :label="$t('Create Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.createTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Update Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="150">
<template slot-scope="scope">
<el-tooltip :content="$t('Rename')" placement="top" :enterable="false">
<span><el-button type="primary" size="mini" icon="el-icon-edit" @click="_rename(scope.row,scope.$index)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Download')" placement="top" :enterable="false">
<span><el-button type="primary" size="mini" icon="el-icon-download" @click="_downloadFile(scope.row)" :disabled="scope.row.directory? true: false" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('delete')" placement="top" :enterable="false">
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Delete?')"
@onConfirm="_delete(scope.row,scope.row.id)"
>
<el-button type="danger" size="mini" icon="el-icon-delete" circle slot="reference"></el-button>
</el-popconfirm>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
<el-dialog
:visible.sync="renameDialog"
width="45%">
<m-rename :item="item" @onUpDate="onUpDate" @close="close"></m-rename>
</el-dialog>
</div>
</template>
<script>
import { mapActions } from 'vuex'
import mRename from './rename'
import { downloadFile } from '@/module/download'
import { bytesToSize } from '@/module/util/util'
import localStore from '@/module/util/localStorage'
export default {
name: 'udf-manage-list',
data () {
return {
list: [],
renameDialog: false,
index: null
}
},
props: {
udfResourcesList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('resource', ['deleteResource']),
_downloadFile (item) {
downloadFile('resources/download', {
id: item.id
})
},
_go (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
if (item.directory) {
localStore.setItem('currentDir', `${item.fullName}`)
this.$router.push({ path: `/resource/udf/subUdfDirectory/${item.id}` })
}
},
_rtSize (val) {
return bytesToSize(parseInt(val))
},
_delete (item, i) {
this.deleteResource({
id: item.id
}).then(res => {
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_rename (item, i) {
this.item = item
this.index = i
this.renameDialog = true
},
onUpDate (item) {
this.$set(this.list, this.index, item)
this.renameDialog = false
},
close () {
this.renameDialog = false
}
},
watch: {
udfResourcesList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
},
mounted () {
this.list = this.udfResourcesList
},
components: { mRename }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,878 | [Feature][ui] DS front-end new feature development plan | DS front-end new feature development plan
1. Ans-ui is replaced with element-ui
2. Alarm group plug-in (issues #3049)
3. Front-end dependent version upgrade
The current renovation plan is carried out on the alert_plugin_design branch. If you have a small partner who is interested in this piece, you can join in. | https://github.com/apache/dolphinscheduler/issues/3878 | https://github.com/apache/dolphinscheduler/pull/4280 | ef7d7b791ee48d7cac40c84e63895417a9a6c8e8 | 3a3df01abed81e23972eedc8e0d30ae2e9dfb6ff | "2020-10-10T01:55:09Z" | java | "2020-12-22T01:03:54Z" | dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/udf/pages/subUdfDirectory/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%">
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column :label="$t('UDF Resource Name')" min-width="200">
<template slot-scope="scope">
<el-popover trigger="hover" placement="top">
<p>{{ scope.row.alias }}</p>
<div slot="reference" class="name-wrapper">
<a href="javascript:" class="links" @click="_go(scope.row)">{{ scope.row.alias }}</a>
</div>
</el-popover>
</template>
</el-table-column>
<el-table-column :label="$t('Whether directory')" width="100">
<template slot-scope="scope">
{{scope.row.directory? $t('Yes') : $t('No')}}
</template>
</el-table-column>
<el-table-column prop="fileName" :label="$t('File Name')" min-width="200"></el-table-column>
<el-table-column :label="$t('File Size')">
<template slot-scope="scope">
{{_rtSize(scope.row.size)}}
</template>
</el-table-column>
<el-table-column prop="description" :label="$t('Description')" min-width="180"></el-table-column>
<el-table-column :label="$t('Create Time')" min-width="140">
<template slot-scope="scope">
<span>{{scope.row.createTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Update Time')" min-width="140">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="150">
<template slot-scope="scope">
<el-tooltip :content="$t('Rename')" placement="top" :enterable="false">
<span><el-button type="primary" size="mini" icon="el-icon-edit" @click="_rename(scope.row,scope.$index)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Download')" placement="top" :enterable="false">
<span><el-button type="primary" size="mini" icon="el-icon-download" @click="_downloadFile(scope.row)" :disabled="scope.row.directory? true: false" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('delete')" placement="top" :enterable="false">
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Delete?')"
@onConfirm="_delete(scope.row,scope.row.id)"
>
<el-button type="danger" size="mini" icon="el-icon-delete" circle slot="reference"></el-button>
</el-popconfirm>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
<el-dialog
:visible.sync="renameDialog"
width="45%">
<m-rename :item="item" @onUpDate="onUpDate" @close="close"></m-rename>
</el-dialog>
</div>
</template>
<script>
import { mapActions } from 'vuex'
import mRename from './rename'
import { downloadFile } from '@/module/download'
import { bytesToSize } from '@/module/util/util'
import localStore from '@/module/util/localStorage'
export default {
name: 'udf-manage-list',
data () {
return {
list: [],
renameDialog: false,
index: null
}
},
props: {
udfResourcesList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('resource', ['deleteResource']),
_downloadFile (item) {
downloadFile('resources/download', {
id: item.id
})
},
_go (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
if (item.directory) {
localStore.setItem('currentDir', `${item.fullName}`)
this.$router.push({ path: `/resource/udf/subUdfDirectory/${item.id}` })
}
},
_rtSize (val) {
return bytesToSize(parseInt(val))
},
_closeDelete (i) {
this.$refs[`poptip-${i}`][0].doClose()
},
_delete (item, i) {
this.deleteResource({
id: item.id
}).then(res => {
this.$refs[`poptip-${i}`][0].doClose()
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$refs[`poptip-${i}`][0].doClose()
this.$message.error(e.msg || '')
})
},
_rename (item, i) {
this.item = item
this.index = i
this.renameDialog = true
},
onUpDate (item) {
this.$set(this.list, this.index, item)
this.renameDialog = false
},
close () {
this.renameDialog = false
}
},
watch: {
udfResourcesList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
},
mounted () {
this.list = this.udfResourcesList
},
components: { mRename }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,878 | [Feature][ui] DS front-end new feature development plan | DS front-end new feature development plan
1. Ans-ui is replaced with element-ui
2. Alarm group plug-in (issues #3049)
3. Front-end dependent version upgrade
The current renovation plan is carried out on the alert_plugin_design branch. If you have a small partner who is interested in this piece, you can join in. | https://github.com/apache/dolphinscheduler/issues/3878 | https://github.com/apache/dolphinscheduler/pull/4280 | ef7d7b791ee48d7cac40c84e63895417a9a6c8e8 | 3a3df01abed81e23972eedc8e0d30ae2e9dfb6ff | "2020-10-10T01:55:09Z" | java | "2020-12-22T01:03:54Z" | dolphinscheduler-ui/src/js/conf/home/pages/security/pages/queue/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%">
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column prop="queueName" :label="$t('Name')"></el-table-column>
<el-table-column prop="queue" :label="$t('Queue value')"></el-table-column>
<el-table-column :label="$t('Create Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.createTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Update Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="130">
<template slot-scope="scope">
<el-tooltip :content="$t('Edit')" placement="top">
<el-button type="primary" size="mini" icon="el-icon-edit-outline" @click="_edit(scope.row)" circle></el-button>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
</div>
</template>
<script>
import { mapActions } from 'vuex'
export default {
name: 'tenement-list',
data () {
return {
list: []
}
},
props: {
queueList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('security', ['deleteQueue']),
_delete (item, i) {
this.deleteQueue({
id: item.id
}).then(res => {
this.list.splice(i, 1)
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_edit (item) {
this.$emit('on-edit', item)
}
},
watch: {
queueList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
this.list = this.queueList
},
mounted () {
},
components: { }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,878 | [Feature][ui] DS front-end new feature development plan | DS front-end new feature development plan
1. Ans-ui is replaced with element-ui
2. Alarm group plug-in (issues #3049)
3. Front-end dependent version upgrade
The current renovation plan is carried out on the alert_plugin_design branch. If you have a small partner who is interested in this piece, you can join in. | https://github.com/apache/dolphinscheduler/issues/3878 | https://github.com/apache/dolphinscheduler/pull/4280 | ef7d7b791ee48d7cac40c84e63895417a9a6c8e8 | 3a3df01abed81e23972eedc8e0d30ae2e9dfb6ff | "2020-10-10T01:55:09Z" | java | "2020-12-22T01:03:54Z" | dolphinscheduler-ui/src/js/conf/home/pages/security/pages/tenement/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%">
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column prop="tenantCode" :label="$t('Tenant Code')" width="180"></el-table-column>
<el-table-column prop="description" :label="$t('Description')" width="180"></el-table-column>
<el-table-column prop="queueName" :label="$t('Queue')" width="180"></el-table-column>
<el-table-column :label="$t('Create Time')">
<template slot-scope="scope">
<span>{{scope.row.createTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Update Time')">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="100">
<template slot-scope="scope">
<el-tooltip :content="$t('Edit')" placement="top">
<el-button type="primary" size="mini" icon="el-icon-edit-outline" @click="_edit(scope.row)" circle></el-button>
</el-tooltip>
<el-tooltip :content="$t('delete')" placement="top">
<el-button type="danger" size="mini" icon="el-icon-delete" circle></el-button>
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Delete?')"
@onConfirm="_delete(scope.row,scope.row.id)"
>
<el-button type="danger" size="mini" icon="el-icon-delete" circle slot="reference"></el-button>
</el-popconfirm>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
</div>
</template>
<script>
import { mapActions } from 'vuex'
export default {
name: 'tenement-list',
data () {
return {
list: []
}
},
props: {
tenementList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('security', ['deleteQueue']),
_delete (item, i) {
this.deleteQueue({
id: item.id
}).then(res => {
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_edit (item) {
this.$emit('on-edit', item)
}
},
watch: {
tenementList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
this.list = this.tenementList
},
mounted () {
},
components: { }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,878 | [Feature][ui] DS front-end new feature development plan | DS front-end new feature development plan
1. Ans-ui is replaced with element-ui
2. Alarm group plug-in (issues #3049)
3. Front-end dependent version upgrade
The current renovation plan is carried out on the alert_plugin_design branch. If you have a small partner who is interested in this piece, you can join in. | https://github.com/apache/dolphinscheduler/issues/3878 | https://github.com/apache/dolphinscheduler/pull/4280 | ef7d7b791ee48d7cac40c84e63895417a9a6c8e8 | 3a3df01abed81e23972eedc8e0d30ae2e9dfb6ff | "2020-10-10T01:55:09Z" | java | "2020-12-22T01:03:54Z" | dolphinscheduler-ui/src/js/conf/home/pages/security/pages/users/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model user-list-model">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%">
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column prop="userName" :label="$t('User Name')"></el-table-column>
<el-table-column :label="$t('User Type')">
<template slot-scope="scope">
<span>{{scope.row.userType === 'GENERAL_USER'? `${$t('Ordinary users')}` : `${$t('Administrator')}`}}</span>
</template>
</el-table-column>
<el-table-column prop="tenantCode" :label="$t('Tenant')" width="160"></el-table-column>
<el-table-column prop="queue" :label="$t('Queue')"></el-table-column>
<el-table-column prop="email" :label="$t('Email')" min-width="120"></el-table-column>
<el-table-column prop="phone" :label="$t('Phone')" min-width="90"></el-table-column>
<el-table-column :label="$t('State')">
<template slot-scope="scope">
<span>{{scope.row.state === 1? `${$t('Enable')}` : `${$t('Disable')}`}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Create Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.createTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Update Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="130">
<template slot-scope="scope">
<el-tooltip :content="$t('Authorize')" placement="top">
<el-dropdown trigger="click">
<el-button type="warning" size="mini" icon="el-icon-user" circle></el-button>
<el-dropdown-menu slot="dropdown">
<el-dropdown-item @click.native="_authProject(scope.row,scope.$index)">{{$t('Project')}}</el-dropdown-item>
<el-dropdown-item @click.native="_authFile(scope.row,scope.$index)">{{$t('Resources')}}</el-dropdown-item>
<el-dropdown-item @click.native="_authDataSource(scope.row,scope.$index)">{{$t('Datasource')}}</el-dropdown-item>
<el-dropdown-item @click.native="_authUdfFunc(scope.row,scope.$index)">{{$t('UDF Function')}}</el-dropdown-item>
</el-dropdown-menu>
</el-dropdown>
</el-tooltip>
<el-tooltip :content="$t('Edit')" placement="top">
<el-button type="primary" size="mini" icon="el-icon-edit-outline" @click="_edit(scope.row)" circle></el-button>
</el-tooltip>
<el-tooltip :content="$t('delete')" placement="top">
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Delete?')"
@onConfirm="_delete(scope.row,scope.row.id)"
>
<el-button type="danger" size="mini" icon="el-icon-delete" circle slot="reference"></el-button>
</el-popconfirm>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
<el-dialog
:visible.sync="authProjectDialog"
width="50%">
<m-transfer :transferData="transferData" @onUpdateAuthProject="onUpdateAuthProject" @closeAuthProject="closeAuthProject"></m-transfer>
</el-dialog>
<el-dialog
:visible.sync="authDataSourceDialog"
width="50%">
<m-transfer :transferData="transferData" @onUpdateAuthDataSource="onUpdateAuthDataSource" @closeAuthDataSource="closeAuthDataSource"></m-transfer>
</el-dialog>
<el-dialog
:visible.sync="authUdfFuncDialog"
width="50%">
<m-transfer :transferData="transferData" @onUpdateAuthUdfFunc="onUpdateAuthUdfFunc" @closeAuthUdfFunc="closeAuthUdfFunc"></m-transfer>
</el-dialog>
<el-dialog
:visible.sync="resourceDialog"
width="50%">
<m-resource :resourceData="resourceData" @onUpdateAuthResource="onUpdateAuthResource" @closeAuthResource="closeAuthResource"></m-resource>
</el-dialog>
</div>
</template>
<script>
import _ from 'lodash'
import i18n from '@/module/i18n'
import { mapActions } from 'vuex'
import mTransfer from '@/module/components/transfer/transfer'
import mResource from '@/module/components/transfer/resource'
export default {
name: 'user-list',
data () {
return {
list: [],
authProjectDialog: false,
transferData: {
sourceListPrs: [],
targetListPrs: [],
type: {
name: ''
}
},
item: {},
authDataSourceDialog: false,
authUdfFuncDialog: false,
resourceData: {
fileSourceList: [],
udfSourceList: [],
fileTargetList: [],
udfTargetList: [],
type: {
name: ''
}
},
resourceDialog: false
}
},
props: {
userList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('security', ['deleteUser', 'getAuthList', 'grantAuthorization', 'getResourceList']),
_delete (item, i) {
this.deleteUser({
id: item.id
}).then(res => {
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_edit (item) {
this.$emit('on-edit', item)
},
_authProject (item, i) {
this.getAuthList({
id: item.id,
type: 'project',
category: 'projects'
}).then(data => {
let sourceListPrs = _.map(data[0], v => {
return {
id: v.id,
name: v.name
}
})
let targetListPrs = _.map(data[1], v => {
return {
id: v.id,
name: v.name
}
})
this.item = item
this.transferData.sourceListPrs = sourceListPrs
this.transferData.targetListPrs = targetListPrs
this.transferData.type.name = `${i18n.$t('Project')}`
this.authProjectDialog = true
})
},
onUpdateAuthProject (projectIds) {
this._grantAuthorization('users/grant-project', {
userId: this.item.id,
projectIds: projectIds
})
this.authProjectDialog = false
},
closeAuthProject () {
this.authProjectDialog = false
},
/*
getAllLeaf
*/
getAllLeaf (data) {
let result = []
let getLeaf = (data) => {
data.forEach(item => {
if (item.children.length === 0) {
result.push(item)
} else {
getLeaf(item.children)
}
})
}
getLeaf(data)
return result
},
_authFile (item, i) {
this.getResourceList({
id: item.id,
type: 'file',
category: 'resources'
}).then(data => {
let fileSourceList = []
let udfSourceList = []
data[0].forEach((value, index, array) => {
if (value.type === 'FILE') {
fileSourceList.push(value)
} else {
udfSourceList.push(value)
}
})
let fileTargetList = []
let udfTargetList = []
let pathId = []
data[1].forEach(v => {
let arr = []
arr[0] = v
if (this.getAllLeaf(arr).length > 0) {
pathId.push(this.getAllLeaf(arr)[0])
}
})
data[1].forEach((value, index, array) => {
if (value.type === 'FILE') {
fileTargetList.push(value)
} else {
udfTargetList.push(value)
}
})
fileTargetList = _.map(fileTargetList, v => {
return v.id
})
udfTargetList = _.map(udfTargetList, v => {
return v.id
})
this.item = item
this.resourceData.fileSourceList = fileSourceList
this.resourceData.udfSourceList = udfSourceList
this.resourceData.fileTargetList = fileTargetList
this.resourceData.udfTargetList = udfTargetList
this.resourceData.type.name = `${i18n.$t('Resources')}`
this.resourceDialog = true
})
},
onUpdateAuthResource (resourceIds) {
this._grantAuthorization('users/grant-file', {
userId: this.item.id,
resourceIds: resourceIds
})
this.resourceDialog = false
},
closeAuthResource () {
this.resourceDialog = false
},
_authDataSource (item, i) {
this.getAuthList({
id: item.id,
type: 'datasource',
category: 'datasources'
}).then(data => {
let sourceListPrs = _.map(data[0], v => {
return {
id: v.id,
name: v.name
}
})
let targetListPrs = _.map(data[1], v => {
return {
id: v.id,
name: v.name
}
})
this.item = item
this.transferData.sourceListPrs = sourceListPrs
this.transferData.targetListPrs = targetListPrs
this.transferData.type.name = `${i18n.$t('Datasource')}`
this.authDataSourceDialog = true
})
},
onUpdateAuthDataSource (datasourceIds) {
this._grantAuthorization('users/grant-datasource', {
userId: this.item.id,
datasourceIds: datasourceIds
})
this.authDataSourceDialog = false
},
closeAuthDataSource () {
this.authDataSourceDialog = false
},
_authUdfFunc (item, i) {
this.getAuthList({
id: item.id,
type: 'udf-func',
category: 'resources'
}).then(data => {
let sourceListPrs = _.map(data[0], v => {
return {
id: v.id,
name: v.funcName
}
})
let targetListPrs = _.map(data[1], v => {
return {
id: v.id,
name: v.funcName
}
})
this.item = item
this.transferData.sourceListPrs = sourceListPrs
this.transferData.targetListPrs = targetListPrs
this.transferData.type.name = `${i18n.$t('UDF Function')}`
this.authUdfFuncDialog = true
})
},
onUpdateAuthUdfFunc (udfIds) {
this._grantAuthorization('users/grant-udf-func', {
userId: this.item.id,
udfIds: udfIds
})
this.authUdfFuncDialog = false
},
closeAuthUdfFunc () {
this.authUdfFuncDialog = false
},
_grantAuthorization (api, param) {
this.grantAuthorization({
api: api,
param: param
}).then(res => {
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
}
},
watch: {
userList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
this.list = this.userList
},
mounted () {
},
components: { mTransfer, mResource }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.user-list-model {
.user-list-poptip {
min-width: 90px !important;
.auth-select-box {
a {
font-size: 14px;
height: 28px;
line-height: 28px;
display: block;
}
}
}
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,878 | [Feature][ui] DS front-end new feature development plan | DS front-end new feature development plan
1. Ans-ui is replaced with element-ui
2. Alarm group plug-in (issues #3049)
3. Front-end dependent version upgrade
The current renovation plan is carried out on the alert_plugin_design branch. If you have a small partner who is interested in this piece, you can join in. | https://github.com/apache/dolphinscheduler/issues/3878 | https://github.com/apache/dolphinscheduler/pull/4280 | ef7d7b791ee48d7cac40c84e63895417a9a6c8e8 | 3a3df01abed81e23972eedc8e0d30ae2e9dfb6ff | "2020-10-10T01:55:09Z" | java | "2020-12-22T01:03:54Z" | dolphinscheduler-ui/src/js/conf/home/pages/security/pages/warningGroups/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%">
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column prop="groupName" :label="$t('Group Name')"></el-table-column>
<el-table-column :label="$t('Group Type')" width="100">
{{groupType === 'EMAIL' ? `${$t('Email')}` : `${$t('SMS')}`}}
</el-table-column>
<el-table-column prop="description" :label="$t('Remarks')" width="200"></el-table-column>
<el-table-column :label="$t('Create Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.createTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Update Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="130">
<template slot-scope="scope">
<el-tooltip :content="$t('Managing Users')" placement="top">
<el-button type="primary" size="mini" icon="el-icon-user" @click="_mangeUser(scope.row, scope.$index)" circle></el-button>
</el-tooltip>
<el-tooltip :content="$t('Edit')" placement="top">
<span><el-button type="primary" size="mini" icon="el-icon-edit-outline" @click="_edit(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('delete')" placement="top">
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Delete?')"
@onConfirm="_delete(scope.row,scope.row.id)"
>
<el-button type="danger" size="mini" icon="el-icon-delete" circle slot="reference" :disabled="scope.row.id==1?true: false"></el-button>
</el-popconfirm>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
<el-dialog
:visible.sync="transferDialog"
width="40%">
<m-transfer :transferData="transferData" @onUpdate="onUpdate" @close="close"></m-transfer>
</el-dialog>
</div>
</template>
<script>
import _ from 'lodash'
import i18n from '@/module/i18n'
import { mapActions } from 'vuex'
import mTransfer from '@/module/components/transfer/transfer'
export default {
name: 'user-list',
data () {
return {
list: [],
transferDialog: false,
item: {},
transferData: {
sourceListPrs: [],
targetListPrs: [],
type: {
name: `${i18n.$t('Managing Users')}`
}
}
}
},
props: {
alertgroupList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('security', ['deleteAlertgrou', 'getAuthList', 'grantAuthorization']),
_delete (item, i) {
this.deleteAlertgrou({
id: item.id
}).then(res => {
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_edit (item) {
this.$emit('on-edit', item)
},
_mangeUser (item, i) {
this.getAuthList({
id: item.id,
type: 'user',
category: 'users'
}).then(data => {
let sourceListPrs = _.map(data[0], v => {
return {
id: v.id,
name: v.userName
}
})
let targetListPrs = _.map(data[1], v => {
return {
id: v.id,
name: v.userName
}
})
this.item = item
this.transferData.sourceListPrs = sourceListPrs
this.transferData.targetListPrs = targetListPrs
this.transferDialog = true
})
},
onUpdate (userIds) {
this._grantAuthorization('alert-group/grant-user', {
userIds: userIds,
alertgroupId: this.item.id
})
this.transferDialog = false
},
close () {
this.transferDialog = false
},
_grantAuthorization (api, param) {
this.grantAuthorization({
api: api,
param: param
}).then(res => {
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
}
},
watch: {
alertgroupList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
this.list = this.alertgroupList
},
mounted () {
},
components: { mTransfer }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,878 | [Feature][ui] DS front-end new feature development plan | DS front-end new feature development plan
1. Ans-ui is replaced with element-ui
2. Alarm group plug-in (issues #3049)
3. Front-end dependent version upgrade
The current renovation plan is carried out on the alert_plugin_design branch. If you have a small partner who is interested in this piece, you can join in. | https://github.com/apache/dolphinscheduler/issues/3878 | https://github.com/apache/dolphinscheduler/pull/4280 | ef7d7b791ee48d7cac40c84e63895417a9a6c8e8 | 3a3df01abed81e23972eedc8e0d30ae2e9dfb6ff | "2020-10-10T01:55:09Z" | java | "2020-12-22T01:03:54Z" | dolphinscheduler-ui/src/js/conf/home/pages/security/pages/workerGroups/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%">
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column prop="name" :label="$t('Group')"></el-table-column>
<el-table-column label="IPList" width="300">
<template slot-scope="scope">
<span>{{scope.row.ipList.join(',')}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Create Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.createTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Update Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
</el-table>
</div>
</div>
</template>
<script>
import { mapActions } from 'vuex'
export default {
name: 'user-list',
data () {
return {
list: []
}
},
props: {
workerGroupList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('security', ['deleteWorkerGroups']),
_delete (item, i) {
this.deleteWorkerGroups({
id: item.id
}).then(res => {
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_edit (item) {
this.$emit('on-edit', item)
}
},
watch: {
workerGroupList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
this.list = this.workerGroupList
},
mounted () {},
components: {}
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,878 | [Feature][ui] DS front-end new feature development plan | DS front-end new feature development plan
1. Ans-ui is replaced with element-ui
2. Alarm group plug-in (issues #3049)
3. Front-end dependent version upgrade
The current renovation plan is carried out on the alert_plugin_design branch. If you have a small partner who is interested in this piece, you can join in. | https://github.com/apache/dolphinscheduler/issues/3878 | https://github.com/apache/dolphinscheduler/pull/4280 | ef7d7b791ee48d7cac40c84e63895417a9a6c8e8 | 3a3df01abed81e23972eedc8e0d30ae2e9dfb6ff | "2020-10-10T01:55:09Z" | java | "2020-12-22T01:03:54Z" | dolphinscheduler-ui/src/sass/common/_table.scss | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
.ansfont {
font-size: 16px;
font-style: normal;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
.as {
display: inline-block;
font-size: 14px;
text-rendering: auto;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
.as-spin {
-webkit-animation: as-spin 2s infinite linear;
animation: as-spin 2s infinite linear;
}
@-webkit-keyframes as-spin {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-webkit-transform: rotate(359deg);
transform: rotate(359deg);
}
}
@keyframes as-spin {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-webkit-transform: rotate(359deg);
transform: rotate(359deg);
}
}
.table-box {
border-top: 1px solid #ecf3ff;
.ans-checkbox-wrapper-disabled {
.ans-checkbox-inner {
background-color: #ccc;
}
}
.ans-checkbox {
.ans-checkbox-inner {
border: 1px solid #ccc;
}
}
.ans-checkbox-checked {
.ans-checkbox-inner {
border: 1px solid #1489e2;
}
}
.ellipsis {
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
display: block;
}
button {
.iconfont {
font-size: 16px;
}
.ans-icon {
font-size: 14px;
}
}
table {
width: 100%;
&.fixed {
table-layout: fixed;
}
tr {
height: 40px;
box-sizing: border-box;
background: #fff;
color: #333;
font-size: 12px;
border-bottom: 1px solid #ecf3ff;
&:hover {
background: #ddecff;
}
th,td{
&:nth-child(1) {
width: 50px;
text-align: center;
}
}
th {
&:nth-child(1) {
width: 60px;
text-align: center;
}
>span {
font-size: 12px;
color: #555;
}
}
td {
>span {
font-size: 12px;
color: #666;
}
&:nth-child(1) {
span {
text-align: center;
}
}
}
}
}
.links {
color: #2d8cf0;
&:hover {
text-decoration: underline;
}
}
}
.table-small-model {
padding: 0 10px;
table {
width: 100%;
tr{
background: #fff;
th,td {
padding-left: 8px;
}
th {
height: 36px;
line-height: 38px;
font-size: 12px;
font-weight: bold;
color: #333;
border-bottom: 2px solid #ECEDEC;
}
td {
height: 32px;
line-height: 32px;
border-bottom: 1px solid #ECEDEC;
span {
font-size: 12px;
color: #333;
}
.links {
color:#2d8cf0;
}
}
&:hover {
td {
background: #ddecff;
}
}
}
}
}
.el-table--enable-row-hover .el-table__body tr:hover>td {
background-color: #ddecff;
}
.el-table th>.cell {
color: #555;
}
.el-table td div {
color: #666;
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,294 | [Bug][dao] The database field of the t_ds_udfs table is the mysql DB keyword | **Describe the bug**
The database field of the t_ds_udfs table is the mysql DB keyword
**Screenshots**
![image](https://user-images.githubusercontent.com/37063904/102960654-5d807580-451d-11eb-9246-86c506b1cf7a.png)
**Which version of Dolphin Scheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4294 | https://github.com/apache/dolphinscheduler/pull/4295 | f0ada2aa2dba2ecdbfdd28c7f88a68fe06730d86 | 3f9dcd5d4489197b22027d752e2c0bd0d453c30c | "2020-12-23T04:54:47Z" | java | "2020-12-24T01:20:58Z" | dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/UdfFuncMapper.xml | <?xml version="1.0" encoding="UTF-8" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper">
<select id="selectUdfById" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
id, user_id, func_name, class_name, type, arg_types,
database, description, resource_id, resource_name, create_time, update_time
from t_ds_udfs
where id = #{id}
</select>
<select id="queryUdfByIdStr" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
id, user_id, func_name, class_name, type, arg_types,
database, description, resource_id, resource_name, create_time, update_time
from t_ds_udfs
where 1 = 1
<if test="ids != null and ids != ''">
and id in
<foreach collection="ids" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</if>
<if test="funcNames != null and funcNames != ''">
and func_name = #{funcNames}
</if>
order by id asc
</select>
<select id="queryUdfFuncPaging" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select id, user_id, func_name, class_name, type, arg_types, database, description, resource_id, resource_name, create_time, update_time
from t_ds_udfs
where 1=1
<if test="searchVal!= null and searchVal != ''">
and func_name like concat('%', #{searchVal}, '%')
</if>
<if test="userId != 0">
and id in (
select udf_id from t_ds_relation_udfs_user where user_id=#{userId}
union select id as udf_id from t_ds_udfs where user_id=#{userId})
</if>
order by create_time desc
</select>
<select id="getUdfFuncByType" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
id, user_id, func_name, class_name, type, arg_types,
database, description, resource_id, resource_name, create_time, update_time
from t_ds_udfs
where type=#{type}
<if test="userId != 0">
and id in (
select udf_id from t_ds_relation_udfs_user where user_id=#{userId}
union select id as udf_id from t_ds_udfs where user_id=#{userId})
</if>
</select>
<select id="queryUdfFuncExceptUserId" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
id, user_id, func_name, class_name, type, arg_types,
database, description, resource_id, resource_name, create_time, update_time
from t_ds_udfs
where user_id <![CDATA[ <> ]]> #{userId}
</select>
<select id="queryAuthedUdfFunc" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
SELECT
u.id, u.user_id, u.func_name, u.class_name, u.type, u.arg_types,
u.database, u.description, u.resource_id, u.resource_name, u.create_time, u.update_time
from t_ds_udfs u,t_ds_relation_udfs_user rel
WHERE u.id = rel.udf_id
AND rel.user_id = #{userId}
</select>
<select id="listAuthorizedUdfFunc" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
u.id, u.user_id, u.func_name, u.class_name, u.type, u.arg_types,
u.database, u.description, u.resource_id, u.resource_name, u.create_time, u.update_time
from t_ds_udfs u
where
id in (select udf_id from t_ds_relation_udfs_user where user_id=#{userId}
union select id as udf_id from t_ds_udfs where user_id=#{userId})
<if test="udfIds != null and udfIds != ''">
and id in
<foreach collection="udfIds" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</if>
</select>
<select id="listUdfByResourceId" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
u.id, u.user_id, u.func_name, u.class_name, u.type, u.arg_types,
u.database, u.description, u.resource_id, u.resource_name, u.create_time, u.update_time
from t_ds_udfs u
where 1=1
<if test="resourceIds != null and resourceIds != ''">
and resource_id in
<foreach collection="resourceIds" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</if>
</select>
<select id="listAuthorizedUdfByResourceId" resultType="org.apache.dolphinscheduler.dao.entity.UdfFunc">
select
u.id, u.user_id, u.func_name, u.class_name, u.type, u.arg_types,
u.database, u.description, u.resource_id, u.resource_name, u.create_time, u.update_time
from t_ds_udfs u
where
id in (select udf_id from t_ds_relation_udfs_user where user_id=#{userId}
union select id as udf_id from t_ds_udfs where user_id=#{userId})
<if test="resourceIds != null and resourceIds != ''">
and resource_id in
<foreach collection="resourceIds" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</if>
</select>
<update id="batchUpdateUdfFunc" parameterType="java.util.List">
<foreach collection="udfFuncList" item="udf" index="index" open="" close="" separator=";">
update t_ds_udfs
<set>
resource_name=#{udf.resourceName},
update_time=#{udf.updateTime}
</set>
<where>
id=#{udf.id}
</where>
</foreach>
</update>
</mapper>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,301 | [Bug][dao] The token management list does not display the user name |
**Describe the bug**
The token management list does not display the user name
**To Reproduce**
1. Click on the token Management menu
**Screenshots**
![image](https://user-images.githubusercontent.com/37063904/103061290-d64b0480-45e5-11eb-872d-cb9d002d62f1.png)
**Which version of Dolphin Scheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4301 | https://github.com/apache/dolphinscheduler/pull/4302 | 3f9dcd5d4489197b22027d752e2c0bd0d453c30c | ec84695323190a24a0d277b1598a3bdd2baa901f | "2020-12-24T04:46:45Z" | java | "2020-12-24T05:01:29Z" | dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapper.xml | <?xml version="1.0" encoding="UTF-8" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.AccessTokenMapper">
<select id="selectAccessTokenPage" resultType="org.apache.dolphinscheduler.dao.entity.AccessToken">
select t.id, t.user_id, t.token, t.expire_time, t.create_time, t.update_time
from t_ds_access_token t
left join t_ds_user u on t.user_id = u.id
where 1 = 1
<if test="userName != null and userName != ''">
and u.user_name like concat ('%', #{userName}, '%')
</if>
<if test="userId != 0">
and t.user_id = #{userId}
</if>
order by t.update_time desc
</select>
</mapper>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,306 | [Bug][UI] The version information closing function does not take effect | **Describe the bug**
1. The closing function of version information in workflow definition page does not take effect
2. The closing function of version information in DAG chart page does not take effect
**To Reproduce**
1. Click to enter the workflow definition page and click the version information button of a specific workflow definition
2. Clicking the Cancel button on the version information page does not take effect
**Which version of Dolphin Scheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4306 | https://github.com/apache/dolphinscheduler/pull/4307 | ede78950836b77cceda0caafb0503a47b10fbbbd | 48b5fc6ae46a061346cf985ca0c0614f2e325bd3 | "2020-12-25T05:14:03Z" | java | "2020-12-26T06:03:41Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/dag.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="clearfix dag-model" >
<div class="toolbar">
<div class="title"><span>{{$t('Toolbar')}}</span></div>
<div class="toolbar-btn">
<div class="bar-box roundedRect jtk-draggable jtk-droppable jtk-endpoint-anchor jtk-connected"
:class="v === dagBarId ? 'active' : ''"
:id="v"
:key="v"
v-for="(item,v) in tasksTypeList"
@mousedown="_getDagId(v)">
<div data-toggle="tooltip" :title="item.desc">
<div class="icos" :class="'icos-' + v" ></div>
</div>
</div>
</div>
</div>
<div class="dag-contect">
<div class="dag-toolbar">
<div class="assist-btn">
<el-button
style="vertical-align: middle;"
data-toggle="tooltip"
:title="$t('View variables')"
data-container="body"
type="primary"
size="mini"
:disabled="$route.name !== 'projects-instance-details'"
@click="_toggleView"
icon="el-icon-c-scale-to-original">
</el-button>
<span>
<el-button
style="vertical-align: middle;"
data-toggle="tooltip"
:title="$t('Startup parameter')"
data-container="body"
type="primary"
size="mini"
:disabled="$route.name !== 'projects-instance-details'"
@click="_toggleParam"
icon="el-icon-arrow-right">
</el-button>
</span>
<span class="name">{{name}}</span>
<span v-if="name" class="copy-name" @click="_copyName" :data-clipboard-text="name"><em class="el-icon-copy-document" data-container="body" data-toggle="tooltip" :title="$t('Copy name')" ></em></span>
</div>
<div class="save-btn">
<div class="operation" style="vertical-align: middle;">
<a href="javascript:"
v-for="(item,$index) in toolOperList"
:class="_operationClass(item)"
:id="item.code"
:key="$index"
@click="_ckOperation(item,$event)">
<el-button type="text" class="operBtn" data-container="body" :icon="item.icon" v-tooltip.light="item.desc"></el-button>
</a>
</div>
<el-button
type="primary"
v-tooltip.light="$t('Format DAG')"
icon="el-icon-caret-right"
size="mini"
data-container="body"
v-if="(type === 'instance' || 'definition') && urlParam.id !=undefined"
style="vertical-align: middle;"
@click="dagAutomaticLayout">
</el-button>
<span>
<el-button
v-tooltip.light="$t('Refresh DAG status')"
data-container="body"
style="vertical-align: middle;"
icon="el-icon-refresh"
type="primary"
:loading="isRefresh"
v-if="type === 'instance'"
@click="!isRefresh && _refresh()"
size="mini" >
</el-button>
</span>
<el-button
v-if="isRtTasks"
style="vertical-align: middle;"
type="primary"
size="mini"
icon="el-icon-back"
@click="_rtNodesDag" >
{{$t('Return_1')}}
</el-button>
<span>
<el-button
type="primary"
v-tooltip.light="$t('Close')"
icon="el-icon-switch-button"
size="mini"
data-container="body"
v-if="(type === 'instance' || 'definition') "
style="vertical-align: middle;"
@click="_closeDAG">
{{$t('Close')}}
</el-button>
</span>
<el-button
style="vertical-align: middle;"
type="primary"
size="mini"
:loading="spinnerLoading"
@click="_saveChart"
icon="el-icon-document-checked"
>
{{spinnerLoading ? 'Loading...' : $t('Save')}}
</el-button>
<span>
<el-button
style="vertical-align: middle;"
type="primary"
size="mini"
:loading="spinnerLoading"
@click="_version"
icon="el-icon-info">
{{spinnerLoading ? 'Loading...' : $t('Version Info')}}
</el-button>
</span>
</div>
</div>
<div class="scrollbar dag-container">
<div class="jtk-demo" id="jtk-demo">
<div class="jtk-demo-canvas canvas-wide statemachine-demo jtk-surface jtk-surface-nopan jtk-draggable" id="canvas" ></div>
</div>
</div>
<el-drawer
:visible.sync="drawer"
size="35%"
:with-header="false">
<m-versions :versionData = versionData @mVersionSwitchProcessDefinitionVersion="mVersionSwitchProcessDefinitionVersion" @mVersionGetProcessDefinitionVersionsPage="mVersionGetProcessDefinitionVersionsPage" @mVersionDeleteProcessDefinitionVersion="mVersionDeleteProcessDefinitionVersion"></m-versions>
</el-drawer>
<el-drawer
:visible.sync="nodeDrawer"
size="50%"
:with-header="false">
<m-form-model v-if="nodeDrawer" :nodeData=nodeData @seeHistory="seeHistory" @addTaskInfo="addTaskInfo" @cacheTaskInfo="cacheTaskInfo" @close="close" @onSubProcess="onSubProcess"></m-form-model>
</el-drawer>
<el-drawer
:visible.sync="lineDrawer"
size="50%"
:wrapperClosable="false"
:with-header="false">
<m-form-line-model :lineData = lineData @addLineInfo="addLineInfo" @cancel="cancel"></m-form-line-model>
</el-drawer>
<el-drawer
:visible.sync="udpDrawer"
size="50%"
:wrapperClosable="false"
:with-header="false">
<m-udp></m-udp>
</el-drawer>
<el-dialog
:title="$t('Set the DAG diagram name')"
:visible.sync="dialogVisible"
width="45%">
<m-udp @onUdp="onUdpDialog" @close="closeDialog"></m-udp>
</el-dialog>
<el-dialog
:title="$t('Please set the parameters before starting')"
:visible.sync="startDialog"
width="65%">
<m-start :startData= "startData" :startNodeList="startNodeList" :sourceType="sourceType" @onUpdateStart="onUpdateStart" @closeStart="closeStart"></m-start>
</el-dialog>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import Dag from './dag'
import mUdp from './udp/udp'
import i18n from '@/module/i18n'
import { jsPlumb } from 'jsplumb'
import Clipboard from 'clipboard'
import { allNodesId } from './plugIn/util'
import { toolOper, tasksType } from './config'
import mFormModel from './formModel/formModel'
import mFormLineModel from './formModel/formLineModel'
import { formatDate } from '@/module/filter/filter'
import { findComponentDownward } from '@/module/util/'
import disabledState from '@/module/mixin/disabledState'
import { mapActions, mapState, mapMutations } from 'vuex'
import mStart from '../../projects/pages/definition/pages/list/_source/start'
import mVersions from '../../projects/pages/definition/pages/list/_source/versions'
let eventModel
export default {
name: 'dag-chart',
data () {
return {
tasksTypeList: tasksType,
toolOperList: toolOper(this),
dagBarId: null,
toolOperCode: '',
spinnerLoading: false,
urlParam: {
id: this.$route.params.id || null
},
isRtTasks: false,
isRefresh: false,
isLoading: false,
taskId: null,
arg: false,
versionData: {
processDefinition: {
id: null,
version: '',
state: ''
},
processDefinitionVersions: [],
total: null,
pageNo: null,
pageSize: null
},
drawer: false,
nodeData: {
id: null,
taskType: '',
self: {},
preNode: [],
rearList: [],
instanceId: null
},
nodeDrawer: false,
lineData: {
id: null,
sourceId: '',
targetId: ''
},
lineDrawer: false,
udpDrawer: false,
dialogVisible: false,
startDialog: false,
startData: {},
startNodeList: '',
sourceType: ''
}
},
mixins: [disabledState],
props: {
type: String,
releaseState: String
},
methods: {
...mapActions('dag', ['saveDAGchart', 'updateInstance', 'updateDefinition', 'getTaskState', 'switchProcessDefinitionVersion', 'getProcessDefinitionVersionsPage', 'deleteProcessDefinitionVersion']),
...mapMutations('dag', ['addTasks', 'cacheTasks', 'resetParams', 'setIsEditDag', 'setName', 'addConnects']),
startRunning (item, startNodeList, sourceType) {
this.startData = item
this.startNodeList = startNodeList
this.sourceType = sourceType
this.startDialog = true
},
onUpdateStart () {
this.startDialog = false
},
closeStart () {
this.startDialog = false
},
// DAG automatic layout
dagAutomaticLayout () {
if (this.store.state.dag.isEditDag) {
this.$message.warning(`${i18n.$t('Please save the DAG before formatting')}`)
return false
}
$('#canvas').html('')
// Destroy round robin
Dag.init({
dag: this,
instance: jsPlumb.getInstance({
Endpoint: [
'Dot', { radius: 1, cssClass: 'dot-style' }
],
Connector: 'Bezier',
PaintStyle: { lineWidth: 2, stroke: '#456' }, // Connection style
ConnectionOverlays: [
[
'Arrow',
{
location: 1,
id: 'arrow',
length: 12,
foldback: 0.8
}
],
['Label', {
location: 0.5,
id: 'label'
}]
],
Container: 'canvas',
ConnectionsDetachable: true
})
})
if (this.tasks.length) {
Dag.backfill(true)
if (this.type === 'instance') {
this._getTaskState(false).then(res => {})
}
} else {
Dag.create()
}
},
init (args) {
if (this.tasks.length) {
Dag.backfill(args)
// Process instances can view status
if (this.type === 'instance') {
this._getTaskState(false).then(res => {})
// Round robin acquisition status
this.setIntervalP = setInterval(() => {
this._getTaskState(true).then(res => {})
}, 90000)
}
} else {
Dag.create()
}
},
/**
* copy name
*/
_copyName () {
let clipboard = new Clipboard('.copy-name')
clipboard.on('success', e => {
this.$message.success(`${i18n.$t('Copy success')}`)
// Free memory
clipboard.destroy()
})
clipboard.on('error', e => {
// Copy is not supported
this.$message.warning(`${i18n.$t('The browser does not support automatic copying')}`)
// Free memory
clipboard.destroy()
})
},
/**
* Get state interface
* @param isReset Whether to manually refresh
*/
_getTaskState (isReset) {
return new Promise((resolve, reject) => {
this.getTaskState(this.urlParam.id).then(res => {
let data = res.list
let state = res.processInstanceState
let taskList = res.taskList
let idArr = allNodesId()
const titleTpl = (item, desc) => {
let $item = _.filter(taskList, v => v.name === item.name)[0]
return `<div style="text-align: left">${i18n.$t('Name')}:${$item.name}</br>${i18n.$t('State')}:${desc}</br>${i18n.$t('type')}:${$item.taskType}</br>${i18n.$t('host')}:${$item.host || '-'}</br>${i18n.$t('Retry Count')}:${$item.retryTimes}</br>${i18n.$t('Submit Time')}:${formatDate($item.submitTime)}</br>${i18n.$t('Start Time')}:${formatDate($item.startTime)}</br>${i18n.$t('End Time')}:${$item.endTime ? formatDate($item.endTime) : '-'}</br></div>`
}
// remove tip state dom
$('.w').find('.state-p').html('')
data.forEach(v1 => {
idArr.forEach(v2 => {
if (v2.name === v1.name) {
let dom = $(`#${v2.id}`)
let state = dom.find('.state-p')
let depState = ''
taskList.forEach(item => {
if (item.name === v1.name) {
depState = item.state
}
})
dom.attr('data-state-id', v1.stateId)
dom.attr('data-dependent-result', v1.dependentResult || '')
dom.attr('data-dependent-depState', depState)
state.append(`<strong class="${v1.icoUnicode} ${v1.isSpin ? 'as as-spin' : ''}" style="color:${v1.color}" data-toggle="tooltip" data-html="true" data-container="body"></strong>`)
state.find('strong').attr('title', titleTpl(v2, v1.desc))
}
})
})
if (state === 'PAUSE' || state === 'STOP' || state === 'FAILURE' || this.state === 'SUCCESS') {
// Manual refresh does not regain large json
if (isReset) {
findComponentDownward(this.$root, `${this.type}-details`)._reset()
}
}
resolve()
})
})
},
/**
* Get the action bar id
* @param item
*/
_getDagId (v) {
// if (this.isDetails) {
// return
// }
this.dagBarId = v
},
/**
* operating
*/
_ckOperation (item) {
let is = true
let code = ''
if (item.disable) {
return
}
if (this.toolOperCode === item.code) {
this.toolOperCode = ''
code = item.code
is = false
} else {
this.toolOperCode = item.code
code = this.toolOperCode
is = true
}
// event type
Dag.toolbarEvent({
item: item,
code: code,
is: is
})
},
_operationClass (item) {
return this.toolOperCode === item.code ? 'active' : ''
// if (item.disable) {
// return this.toolOperCode === item.code ? 'active' : ''
// } else {
// return 'disable'
// }
},
/**
* Storage interface
*/
_save (sourceType) {
return new Promise((resolve, reject) => {
this.spinnerLoading = true
// Storage store
Dag.saveStore().then(res => {
if (this._verifConditions(res.tasks)) {
if (this.urlParam.id) {
/**
* Edit
* @param saveInstanceEditDAGChart => Process instance editing
* @param saveEditDAGChart => Process definition editing
*/
this[this.type === 'instance' ? 'updateInstance' : 'updateDefinition'](this.urlParam.id).then(res => {
// this.$message.success(res.msg)
this.$message({
message: res.msg,
type: 'success',
offset: 80
})
this.spinnerLoading = false
// Jump process definition
if (this.type === 'instance') {
this.$router.push({ path: `/projects/instance/list/${this.urlParam.id}?_t=${new Date().getTime()}` })
} else {
this.$router.push({ path: `/projects/definition/list/${this.urlParam.id}?_t=${new Date().getTime()}` })
}
resolve()
}).catch(e => {
this.$message.error(e.msg || '')
this.spinnerLoading = false
reject(e)
})
} else {
// New
this.saveDAGchart().then(res => {
this.$message.success(res.msg)
this.spinnerLoading = false
// source @/conf/home/pages/dag/_source/editAffirmModel/index.js
if (sourceType !== 'affirm') {
// Jump process definition
this.$router.push({ name: 'projects-definition-list' })
}
resolve()
}).catch(e => {
this.$message.error(e.msg || '')
this.setName('')
this.spinnerLoading = false
reject(e)
})
}
}
})
})
},
_closeDAG () {
let $name = this.$route.name
if ($name && $name.indexOf('definition') !== -1) {
this.$router.push({ name: 'projects-definition-list' })
} else {
this.$router.push({ name: 'projects-instance-list' })
}
},
_verifConditions (value) {
let tasks = value
let bool = true
tasks.map(v => {
if (v.type === 'CONDITIONS' && (v.conditionResult.successNode[0] === '' || v.conditionResult.successNode[0] === null || v.conditionResult.failedNode[0] === '' || v.conditionResult.failedNode[0] === null)) {
bool = false
return false
}
})
if (!bool) {
this.$message.warning(`${i18n.$t('Successful branch flow and failed branch flow are required')}`)
this.spinnerLoading = false
return false
}
return true
},
onUdpDialog () {
this._save()
this.dialogVisible = false
},
closeDialog () {
this.dialogVisible = false
},
/**
* Save chart
*/
_saveChart () {
// Verify node
if (!this.tasks.length) {
this.$message.warning(`${i18n.$t('Failed to create node to save')}`)
return
}
this.dialogVisible = true
},
/**
* Return to the previous child node
*/
_rtNodesDag () {
let getIds = this.$route.query.subProcessIds
let idsArr = getIds.split(',')
let ids = idsArr.slice(0, idsArr.length - 1)
let id = idsArr[idsArr.length - 1]
let query = {}
if (id !== idsArr[0]) {
query = { subProcessIds: ids.join(',') }
}
let $name = this.$route.name.split('-')
this.$router.push({ path: `/${$name[0]}/${$name[1]}/list/${id}`, query: query })
},
/**
* Subprocess processing
* @param subProcessId Subprocess ID
*/
_subProcessHandle (subProcessId) {
let subProcessIds = []
let getIds = this.$route.query.subProcessIds
if (getIds) {
let newId = getIds.split(',')
newId.push(this.urlParam.id)
subProcessIds = newId
} else {
subProcessIds.push(this.urlParam.id)
}
let $name = this.$route.name.split('-')
this.$router.push({ path: `/${$name[0]}/${$name[1]}/list/${subProcessId}`, query: { subProcessIds: subProcessIds.join(',') } })
},
/**
* Refresh data
*/
_refresh () {
this.isRefresh = true
this._getTaskState(false).then(res => {
setTimeout(() => {
this.isRefresh = false
this.$message.success(`${i18n.$t('Refresh status succeeded')}`)
}, 2200)
})
},
/**
* View variables
*/
_toggleView () {
findComponentDownward(this.$root, 'assist-dag-index')._toggleView()
},
/**
* Starting parameters
*/
_toggleParam () {
findComponentDownward(this.$root, 'starting-params-dag-index')._toggleParam()
},
addLineInfo ({ item, fromThis }) {
this.addConnects(item)
this.lineDrawer = false
},
cancel ({ fromThis }) {
this.lineDrawer = false
},
/**
* Create a node popup layer
* @param Object id
*/
_createLineLabel ({ id, sourceId, targetId }) {
this.lineData.id = id
this.lineData.sourceId = sourceId
this.lineData.targetId = targetId
this.lineDrawer = true
},
seeHistory (taskName) {
this.nodeData.self.$router.push({
name: 'task-instance',
query: {
processInstanceId: this.nodeData.self.$route.params.id,
taskName: taskName
}
})
},
addTaskInfo ({ item, fromThis }) {
this.addTasks(item)
this.nodeDrawer = false
},
cacheTaskInfo ({ item, fromThis }) {
this.cacheTasks(item)
},
close ({ item, flag, fromThis }) {
this.addTasks(item)
// Edit status does not allow deletion of nodes
if (flag) {
jsPlumb.remove(this.nodeData.id)
}
this.nodeDrawer = false
},
onSubProcess ({ subProcessId, fromThis }) {
this._subProcessHandle(subProcessId)
},
_createNodes ({ id, type }) {
let self = this
let preNode = []
let rearNode = []
let rearList = []
$('div[data-targetarr*="' + id + '"]').each(function () {
rearNode.push($(this).attr('id'))
})
if (rearNode.length > 0) {
rearNode.forEach(v => {
let rearobj = {}
rearobj.value = $(`#${v}`).find('.name-p').text()
rearobj.label = $(`#${v}`).find('.name-p').text()
rearList.push(rearobj)
})
} else {
rearList = []
}
let targetarr = $(`#${id}`).attr('data-targetarr')
if (targetarr) {
let nodearr = targetarr.split(',')
nodearr.forEach(v => {
let nodeobj = {}
nodeobj.value = $(`#${v}`).find('.name-p').text()
nodeobj.label = $(`#${v}`).find('.name-p').text()
preNode.push(nodeobj)
})
} else {
preNode = []
}
this.taskId = id
type = type || self.dagBarId
this.nodeData.id = id
this.nodeData.taskType = type
this.nodeData.self = self
this.nodeData.preNode = preNode
this.nodeData.rearList = rearList
this.nodeData.instanceId = this.$route.params.id
this.nodeDrawer = true
},
removeEventModelById ($id) {
if (eventModel && this.taskId === $id) {
eventModel.remove()
}
},
/**
* switch version in process definition version list
*
* @param version the version user want to change
* @param processDefinitionId the process definition id
* @param fromThis fromThis
*/
mVersionSwitchProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) {
this.$store.state.dag.isSwitchVersion = true
this.switchProcessDefinitionVersion({
version: version,
processDefinitionId: processDefinitionId
}).then(res => {
this.$message.success($t('Switch Version Successfully'))
this.$router.push({ path: `/projects/definition/list/${processDefinitionId}?_t=${new Date().getTime()}` })
}).catch(e => {
this.$store.state.dag.isSwitchVersion = false
this.$message.error(e.msg || '')
})
},
/**
* Paging event of process definition versions
*
* @param pageNo page number
* @param pageSize page size
* @param processDefinitionId the process definition id of page version
* @param fromThis fromThis
*/
mVersionGetProcessDefinitionVersionsPage ({ pageNo, pageSize, processDefinitionId, fromThis }) {
this.getProcessDefinitionVersionsPage({
pageNo: pageNo,
pageSize: pageSize,
processDefinitionId: processDefinitionId
}).then(res => {
this.versionData.processDefinitionVersions = res.data.lists
this.versionData.total = res.data.totalCount
this.versionData.pageSize = res.data.pageSize
this.versionData.pageNo = res.data.currentPage
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* delete one version of process definition
*
* @param version the version need to delete
* @param processDefinitionId the process definition id user want to delete
* @param fromThis fromThis
*/
mVersionDeleteProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) {
this.deleteProcessDefinitionVersion({
version: version,
processDefinitionId: processDefinitionId
}).then(res => {
this.$message.success(res.msg || '')
this.mVersionGetProcessDefinitionVersionsPage({
pageNo: 1,
pageSize: 10,
processDefinitionId: processDefinitionId,
fromThis: fromThis
})
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* query the process definition pagination version
*/
_version (item) {
this.getProcessDefinitionVersionsPage({
pageNo: 1,
pageSize: 10,
processDefinitionId: this.urlParam.id
}).then(res => {
let processDefinitionVersions = res.data.lists
let total = res.data.totalCount
let pageSize = res.data.pageSize
let pageNo = res.data.currentPage
this.versionData.processDefinition.id = this.urlParam.id
this.versionData.processDefinition.version = this.$store.state.dag.version
this.versionData.processDefinition.state = this.releaseState
this.versionData.processDefinitionVersions = processDefinitionVersions
this.versionData.total = total
this.versionData.pageNo = pageNo
this.versionData.pageSize = pageSize
this.drawer = true
}).catch(e => {
this.$message.error(e.msg || '')
})
}
},
watch: {
tasks: {
deep: true,
handler (o) {
// Edit state does not allow deletion of node a...
this.setIsEditDag(true)
}
}
},
created () {
// Edit state does not allow deletion of node a...
this.setIsEditDag(false)
if (this.$route.query.subProcessIds) {
this.isRtTasks = true
}
Dag.init({
dag: this,
instance: jsPlumb.getInstance({
Endpoint: [
'Dot', { radius: 1, cssClass: 'dot-style' }
],
Connector: 'Bezier',
PaintStyle: { lineWidth: 2, stroke: '#456' }, // Connection style
ConnectionOverlays: [
[
'Arrow',
{
location: 1,
id: 'arrow',
length: 12,
foldback: 0.8
}
],
['Label', {
location: 0.5,
id: 'label'
}]
],
Container: 'canvas',
ConnectionsDetachable: true
})
})
},
mounted () {
this.init(this.arg)
},
beforeDestroy () {
this.resetParams()
// Destroy round robin
clearInterval(this.setIntervalP)
},
destroyed () {
if (eventModel) {
eventModel.remove()
}
},
computed: {
...mapState('dag', ['tasks', 'locations', 'connects', 'isEditDag', 'name'])
},
components: { mVersions, mFormModel, mFormLineModel, mUdp, mStart }
}
</script>
<style lang="scss" rel="stylesheet/scss">
@import "./dag";
.operBtn {
padding: 8px 20px;
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,306 | [Bug][UI] The version information closing function does not take effect | **Describe the bug**
1. The closing function of version information in workflow definition page does not take effect
2. The closing function of version information in DAG chart page does not take effect
**To Reproduce**
1. Click to enter the workflow definition page and click the version information button of a specific workflow definition
2. Clicking the Cancel button on the version information page does not take effect
**Which version of Dolphin Scheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4306 | https://github.com/apache/dolphinscheduler/pull/4307 | ede78950836b77cceda0caafb0503a47b10fbbbd | 48b5fc6ae46a061346cf985ca0c0614f2e325bd3 | "2020-12-25T05:14:03Z" | java | "2020-12-26T06:03:41Z" | dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model" style="position: relative;">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%" @selection-change="_arrDelChange">
<el-table-column type="selection" width="50" :selectable="selectable"></el-table-column>
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column :label="$t('Process Name')" min-width="200">
<template slot-scope="scope">
<el-popover trigger="hover" placement="top">
<p>{{ scope.row.name }}</p>
<div slot="reference" class="name-wrapper">
<router-link :to="{ path: '/projects/definition/list/' + scope.row.id}" tag="a" class="links" :title="scope.row.name">
{{scope.row.name}}
</router-link>
</div>
</el-popover>
</template>
</el-table-column>
<el-table-column :label="$t('State')">
<template slot-scope="scope">
{{_rtPublishStatus(scope.row.releaseState)}}
</template>
</el-table-column>
<el-table-column :label="$t('Create Time')" width="135">
<template slot-scope="scope">
<span>{{scope.row.createTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Update Time')" width="135">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Description')">
<template slot-scope="scope">
<span>{{scope.row.description | filterNull}}</span>
</template>
</el-table-column>
<el-table-column prop="modifyBy" :label="$t('Modify User')"></el-table-column>
<el-table-column :label="$t('Timing state')">
<template slot-scope="scope">
<span v-if="scope.row.scheduleReleaseState === 'OFFLINE'">{{$t('offline')}}</span>
<span v-if="scope.row.scheduleReleaseState === 'ONLINE'">{{$t('online')}}</span>
<span v-if="!scope.row.scheduleReleaseState">-</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="335" fixed="right">
<template slot-scope="scope">
<el-tooltip :content="$t('Edit')" placement="top" :enterable="false">
<el-button type="primary" size="mini" icon="el-icon-edit-outline" :disabled="scope.row.releaseState === 'ONLINE'" @click="_edit(scope.row)" circle></el-button>
</el-tooltip>
<el-tooltip :content="$t('Start')" placement="top" :enterable="false">
<span><el-button type="success" size="mini" :disabled="scope.row.releaseState !== 'ONLINE'" icon="el-icon-video-play" @click="_start(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Timing')" placement="top" :enterable="false">
<el-button type="primary" size="mini" icon="el-icon-time" :disabled="scope.row.releaseState !== 'ONLINE' || scope.row.scheduleReleaseState !== null" @click="_timing(scope.row)" circle></el-button>
</el-tooltip>
<el-tooltip :content="$t('online')" placement="top" :enterable="false">
<span><el-button type="warning" size="mini" v-if="scope.row.releaseState === 'OFFLINE'" icon="el-icon-upload2" @click="_poponline(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('offline')" placement="top" :enterable="false">
<el-button type="danger" size="mini" icon="el-icon-download" v-if="scope.row.releaseState === 'ONLINE'" @click="_downline(scope.row)" circle></el-button>
</el-tooltip>
<el-tooltip :content="$t('Copy Workflow')" placement="top" :enterable="false">
<span><el-button type="primary" size="mini" :disabled="scope.row.releaseState === 'ONLINE'" icon="el-icon-document-copy" @click="_copyProcess(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Cron Manage')" placement="top" :enterable="false">
<el-button type="primary" size="mini" icon="el-icon-date" :disabled="scope.row.releaseState !== 'ONLINE'" @click="_timingManage(scope.row)" circle></el-button>
</el-tooltip>
<el-tooltip :content="$t('delete')" placement="top" :enterable="false">
<el-button type="danger" size="mini" icon="el-icon-delete" circle></el-button>
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Delete?')"
@onConfirm="_delete(scope.row,scope.row.id)"
>
<el-button type="danger" size="mini" icon="el-icon-delete" :disabled="scope.row.releaseState === 'ONLINE'" circle slot="reference"></el-button>
</el-popconfirm>
</el-tooltip>
<el-tooltip :content="$t('TreeView')" placement="top" :enterable="false">
<el-button type="primary" size="mini" icon="el-icon-s-data" @click="_treeView(scope.row)" circle></el-button>
</el-tooltip>
<el-tooltip :content="$t('Export')" placement="top" :enterable="false">
<span><el-button type="primary" size="mini" icon="el-icon-s-unfold" @click="_export(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Version Info')" placement="top" :enterable="false">
<el-button type="primary" size="mini" icon="el-icon-info" :disabled="scope.row.releaseState === 'ONLINE'" @click="_version(scope.row)" circle></el-button>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
<el-tooltip :content="$t('delete')" placement="top">
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
:title="$t('Delete?')"
@onConfirm="_delete({},-1)"
>
<el-button style="position: absolute; bottom: -48px; left: 19px;" type="primary" size="mini" :disabled="!strSelectIds" slot="reference">{{$t('Delete')}}</el-button>
</el-popconfirm>
</el-tooltip>
<el-button type="primary" size="mini" :disabled="!strSelectIds" style="position: absolute; bottom: -48px; left: 80px;" @click="_batchExport(item)" >{{$t('Export')}}</el-button>
<span><el-button type="primary" size="mini" :disabled="!strSelectIds" style="position: absolute; bottom: -48px; left: 140px;" @click="_batchCopy(item)" >{{$t('Batch copy')}}</el-button></span>
<el-button type="primary" size="mini" :disabled="!strSelectIds" style="position: absolute; bottom: -48px; left: 225px;" @click="_batchMove(item)" >{{$t('Batch move')}}</el-button>
<el-drawer
:visible.sync="drawer"
size="35%"
:with-header="false">
<m-versions :versionData = versionData @mVersionSwitchProcessDefinitionVersion="mVersionSwitchProcessDefinitionVersion" @mVersionGetProcessDefinitionVersionsPage="mVersionGetProcessDefinitionVersionsPage" @mVersionDeleteProcessDefinitionVersion="mVersionDeleteProcessDefinitionVersion"></m-versions>
</el-drawer>
<el-dialog
:title="$t('Please set the parameters before starting')"
:visible.sync="startDialog"
width="65%">
<m-start :startData= "startData" @onUpdateStart="onUpdateStart" @closeStart="closeStart"></m-start>
</el-dialog>
<el-dialog
:title="$t('Set parameters before timing')"
:visible.sync="timingDialog"
width="65%">
<m-timing :timingData="timingData" @onUpdateTiming="onUpdateTiming" @closeTiming="closeTiming"></m-timing>
</el-dialog>
<el-dialog
title="提示"
:visible.sync="relatedItemsDialog"
width="30%">
<m-related-items :tmp="tmp" @onBatchCopy="onBatchCopy" @onBatchMove="onBatchMove" @closeRelatedItems="closeRelatedItems"></m-related-items>
</el-dialog>
</div>
</template>
<script>
import _ from 'lodash'
import mStart from './start'
import mTiming from './timing'
import mRelatedItems from './relatedItems'
import { mapActions } from 'vuex'
import { publishStatus } from '@/conf/home/pages/dag/_source/config'
import mVersions from './versions'
export default {
name: 'definition-list',
data () {
return {
list: [],
strSelectIds: '',
checkAll: false,
drawer: false,
versionData: {
processDefinition: {},
processDefinitionVersions: [],
total: null,
pageNo: null,
pageSize: null
},
startDialog: false,
startData: {},
timingDialog: false,
timingData: {
item: {},
receiversD: [],
receiversCcD: [],
type: ''
},
relatedItemsDialog: false,
tmp: false
}
},
props: {
processList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('dag', ['editProcessState', 'getStartCheck', 'getReceiver', 'deleteDefinition', 'batchDeleteDefinition', 'exportDefinition', 'getProcessDefinitionVersionsPage', 'copyProcess', 'switchProcessDefinitionVersion', 'deleteProcessDefinitionVersion', 'moveProcess']),
...mapActions('security', ['getWorkerGroupsAll']),
selectable (row, index) {
if (row.releaseState === 'ONLINE') {
return false
} else {
return true
}
},
_rtPublishStatus (code) {
return _.filter(publishStatus, v => v.code === code)[0].desc
},
_treeView (item) {
this.$router.push({ path: `/projects/definition/tree/${item.id}` })
},
/**
* Start
*/
_start (item) {
this.getWorkerGroupsAll()
this.getStartCheck({ processDefinitionId: item.id }).then(res => {
this.startData = item
this.startDialog = true
}).catch(e => {
this.$message.error(e.msg || '')
})
},
onUpdateStart () {
this._onUpdate()
this.startDialog = false
},
closeStart () {
this.startDialog = false
},
/**
* get emial
*/
_getReceiver (id) {
return new Promise((resolve, reject) => {
this.getReceiver({ processDefinitionId: id }).then(res => {
resolve({
receivers: res.receivers && res.receivers.split(',') || [],
receiversCc: res.receiversCc && res.receiversCc.split(',') || []
})
})
})
},
/**
* timing
*/
_timing (item) {
this._getReceiver(item.id).then(res => {
this.timingData.item = item
this.timingData.receiversD = res.receivers
this.timingData.receiversCcD = res.receiversCc
this.timingData.type = 'timing'
this.timingDialog = true
})
},
onUpdateTiming () {
this._onUpdate()
this.timingDialog = false
},
closeTiming () {
this.timingDialog = false
},
/**
* Timing manage
*/
_timingManage (item) {
this.$router.push({ path: `/projects/definition/list/timing/${item.id}` })
},
/**
* delete
*/
_delete (item, i) {
// remove tow++
if (i < 0) {
this._batchDelete()
return
}
// remove one
this.deleteDefinition({
processDefinitionId: item.id
}).then(res => {
this._onUpdate()
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* edit
*/
_edit (item) {
this.$router.push({ path: `/projects/definition/list/${item.id}` })
},
/**
* Offline
*/
_downline (item) {
this._upProcessState({
processId: item.id,
releaseState: 0
})
},
/**
* online
*/
_poponline (item) {
this._upProcessState({
processId: item.id,
releaseState: 1
})
},
/**
* copy
*/
_copyProcess (item) {
this.copyProcess({
processDefinitionIds: item.id,
targetProjectId: item.projectId
}).then(res => {
this.strSelectIds = ''
this.$message.success(res.msg)
// $('body').find('.tooltip.fade.top.in').remove()
this._onUpdate()
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* move
*/
_moveProcess (item) {
this.moveProcess({
processDefinitionIds: item.id,
targetProjectId: item.projectId
}).then(res => {
this.strSelectIds = ''
this.$message.success(res.msg)
$('body').find('.tooltip.fade.top.in').remove()
this._onUpdate()
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_export (item) {
this.exportDefinition({
processDefinitionIds: item.id,
fileName: item.name
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* switch version in process definition version list
*
* @param version the version user want to change
* @param processDefinitionId the process definition id
* @param fromThis fromThis
*/
mVersionSwitchProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) {
this.switchProcessDefinitionVersion({
version: version,
processDefinitionId: processDefinitionId
}).then(res => {
this.$message.success($t('Switch Version Successfully'))
this.$router.push({ path: `/projects/definition/list/${processDefinitionId}` })
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* Paging event of process definition versions
*
* @param pageNo page number
* @param pageSize page size
* @param processDefinitionId the process definition id of page version
* @param fromThis fromThis
*/
mVersionGetProcessDefinitionVersionsPage ({ pageNo, pageSize, processDefinitionId, fromThis }) {
this.getProcessDefinitionVersionsPage({
pageNo: pageNo,
pageSize: pageSize,
processDefinitionId: processDefinitionId
}).then(res => {
this.versionData.processDefinitionVersions = res.data.lists
this.versionData.total = res.data.totalCount
this.versionData.pageSize = res.data.pageSize
this.versionData.pageNo = res.data.currentPage
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* delete one version of process definition
*
* @param version the version need to delete
* @param processDefinitionId the process definition id user want to delete
* @param fromThis fromThis
*/
mVersionDeleteProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) {
this.deleteProcessDefinitionVersion({
version: version,
processDefinitionId: processDefinitionId
}).then(res => {
this.$message.success(res.msg || '')
this.mVersionGetProcessDefinitionVersionsPage({
pageNo: 1,
pageSize: 10,
processDefinitionId: processDefinitionId,
fromThis: fromThis
})
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_version (item) {
this.getProcessDefinitionVersionsPage({
pageNo: 1,
pageSize: 10,
processDefinitionId: item.id
}).then(res => {
let processDefinitionVersions = res.data.lists
let total = res.data.totalCount
let pageSize = res.data.pageSize
let pageNo = res.data.currentPage
this.versionData.processDefinition = item
this.versionData.processDefinitionVersions = processDefinitionVersions
this.versionData.total = total
this.versionData.pageNo = pageNo
this.versionData.pageSize = pageSize
this.drawer = true
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_batchExport () {
this.exportDefinition({
processDefinitionIds: this.strSelectIds,
fileName: 'process_' + new Date().getTime()
}).then(res => {
this._onUpdate()
this.checkAll = false
this.strSelectIds = ''
}).catch(e => {
this.strSelectIds = ''
this.checkAll = false
this.$message.error(e.msg)
})
},
/**
* Batch Copy
*/
_batchCopy () {
this.relatedItemsDialog = true
this.tmp = false
},
onBatchCopy (item) {
this._copyProcess({ id: this.strSelectIds, projectId: item })
this.relatedItemsDialog = false
},
closeRelatedItems () {
this.relatedItemsDialog = false
},
/**
* _batchMove
*/
_batchMove () {
this.tmp = true
this.relatedItemsDialog = true
},
onBatchMove (item) {
this._moveProcess({ id: this.strSelectIds, projectId: item })
this.relatedItemsDialog = false
},
/**
* Edit state
*/
_upProcessState (o) {
this.editProcessState(o).then(res => {
this.$message.success(res.msg)
$('body').find('.tooltip.fade.top.in').remove()
this._onUpdate()
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_onUpdate () {
this.$emit('on-update')
},
/**
* the array that to be delete
*/
_arrDelChange (v) {
let arr = []
arr = _.map(v, 'id')
this.strSelectIds = _.join(arr, ',')
},
/**
* batch delete
*/
_batchDelete () {
this.batchDeleteDefinition({
processDefinitionIds: this.strSelectIds
}).then(res => {
this._onUpdate()
this.checkAll = false
this.strSelectIds = ''
this.$message.success(res.msg)
}).catch(e => {
this.strSelectIds = ''
this.checkAll = false
this.$message.error(e.msg || '')
})
}
},
watch: {
processList: {
handler (a) {
this.checkAll = false
this.list = []
setTimeout(() => {
this.list = _.cloneDeep(a)
})
},
immediate: true,
deep: true
},
pageNo () {
this.strSelectIds = ''
}
},
created () {
},
mounted () {
},
components: { mVersions, mStart, mTiming, mRelatedItems }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,306 | [Bug][UI] The version information closing function does not take effect | **Describe the bug**
1. The closing function of version information in workflow definition page does not take effect
2. The closing function of version information in DAG chart page does not take effect
**To Reproduce**
1. Click to enter the workflow definition page and click the version information button of a specific workflow definition
2. Clicking the Cancel button on the version information page does not take effect
**Which version of Dolphin Scheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4306 | https://github.com/apache/dolphinscheduler/pull/4307 | ede78950836b77cceda0caafb0503a47b10fbbbd | 48b5fc6ae46a061346cf985ca0c0614f2e325bd3 | "2020-12-25T05:14:03Z" | java | "2020-12-26T06:03:41Z" | dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/versions.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="container">
<div class="title-box">
<span class="name">{{$t('Version Info')}}</span>
</div>
<div class="table-box" v-if="versionData.processDefinitionVersions.length > 0">
<el-table :data="versionData.processDefinitionVersions" size="mini" style="width: 100%">
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column prop="userName" :label="$t('Version')">
<template slot-scope="scope">
<span v-if="scope.row.version">
<span v-if="scope.row.version === versionData.processDefinition.version" style="color: green"><strong>{{scope.row.version}} {{$t('Current Version')}}</strong></span>
<span v-else>{{scope.row.version}}</span>
</span>
<span v-else>-</span>
</template>
</el-table-column>
<el-table-column prop="description" :label="$t('Description')"></el-table-column>
<el-table-column :label="$t('Create Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.createTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="100">
<template slot-scope="scope">
<el-tooltip :content="$t('Switch To This Version')" placement="top">
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Confirm Switch To This Version?')"
@onConfirm="_mVersionSwitchProcessDefinitionVersion(scope.row)"
>
<el-button type="primary" size="mini" icon="el-icon-warning" circle slot="reference"></el-button>
</el-popconfirm>
</el-tooltip>
<el-tooltip :content="$t('delete')" placement="top">
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Delete?')"
@onConfirm="_mVersionDeleteProcessDefinitionVersion(scope.row,scope.row.id)"
>
<el-button type="danger" size="mini" icon="el-icon-delete" circle slot="reference"></el-button>
</el-popconfirm>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
<div v-if="versionData.processDefinitionVersions.length === 0">
<m-no-data><!----></m-no-data>
</div>
<div v-if="versionData.processDefinitionVersions.length > 0">
<div class="bottom-box">
<el-pagination
style="float:right"
background
@current-change="_mVersionGetProcessDefinitionVersionsPage"
layout="prev, pager, next"
:total="versionData.total">
</el-pagination>
<el-button type="text" size="mini" @click="_close()" style="float:right">{{$t('Cancel')}}</el-button>
</div>
</div>
</div>
</template>
<script>
import mNoData from '@/module/components/noData/noData'
export default {
name: 'versions',
data () {
return {
tableHeaders: [
{
label: 'version',
prop: 'version'
},
{
label: 'createTime',
prop: 'createTime'
}
]
}
},
props: {
versionData: Object
},
methods: {
/**
* switch version in process definition version list
*/
_mVersionSwitchProcessDefinitionVersion (item) {
this.$emit('mVersionSwitchProcessDefinitionVersion', {
version: item.version,
processDefinitionId: this.versionData.processDefinition.id,
fromThis: this
})
},
/**
* delete one version of process definition
*/
_mVersionDeleteProcessDefinitionVersion (item) {
this.$emit('mVersionDeleteProcessDefinitionVersion', {
version: item.version,
processDefinitionId: this.versionData.processDefinition.id,
fromThis: this
})
},
/**
* Paging event of process definition versions
*/
_mVersionGetProcessDefinitionVersionsPage (val) {
this.$emit('mVersionGetProcessDefinitionVersionsPage', {
pageNo: val,
pageSize: this.pageSize,
processDefinitionId: this.versionData.processDefinition.id,
fromThis: this
})
},
/**
* Close and destroy component and component internal events
*/
_close () {
// flag Whether to delete a node this.$destroy()
this.$emit('close', {
fromThis: this
})
}
},
created () {
},
mounted () {
},
components: { mNoData }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.container {
width: 500px;
position: relative;
.title-box {
height: 61px;
border-bottom: 1px solid #DCDEDC;
position: relative;
.name {
position: absolute;
left: 24px;
top: 18px;
font-size: 16px;
}
}
.bottom-box {
position: absolute;
bottom: 0;
left: 0;
width: 100%;
text-align: right;
height: 60px;
line-height: 60px;
border-top: 1px solid #DCDEDC;
background: #fff;
.ans-page {
display: inline-block;
}
}
.table-box {
overflow-y: scroll;
height: calc(100vh - 61px);
padding-bottom: 60px;
}
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,310 | [Feature][Alert-SPI] Plug-ins containing UI components provide display pages |
*Some plugins (such as alert plugin) need to provide UI interfaces to users.
* We use from-creat to dynamically generate UI interfaces. Related parameters are mainly provided by pluginParams.
* From-create can generate dynamic ui based on this parameter. | https://github.com/apache/dolphinscheduler/issues/4310 | https://github.com/apache/dolphinscheduler/pull/4311 | 0f31152aeedf56e3a16883ded012952dbcacfc6b | 17d44216a4795d9b5a93a35b1457a2ead1f2b2c8 | "2020-12-25T09:01:31Z" | java | "2020-12-28T03:25:00Z" | dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/plugin/AlertPluginManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.alert.plugin;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static com.google.common.base.Preconditions.checkState;
import org.apache.dolphinscheduler.dao.entity.PluginDefine;
import org.apache.dolphinscheduler.spi.DolphinSchedulerPlugin;
import org.apache.dolphinscheduler.spi.alert.AlertChannel;
import org.apache.dolphinscheduler.spi.alert.AlertChannelFactory;
import org.apache.dolphinscheduler.spi.classloader.ThreadContextClassLoader;
import org.apache.dolphinscheduler.spi.params.PluginParamsTransfer;
import org.apache.dolphinscheduler.spi.params.base.PluginParams;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* load the configured alert plugin and manager them
*/
public class AlertPluginManager extends AbstractDolphinPluginManager {
private static final Logger logger = LoggerFactory.getLogger(AlertPluginManager.class);
private final Map<String, AlertChannelFactory> alertChannelFactoryMap = new ConcurrentHashMap<>();
private final Map<String, AlertChannel> alertChannelMap = new ConcurrentHashMap<>();
public void addAlertChannelFactory(AlertChannelFactory alertChannelFactory) {
requireNonNull(alertChannelFactory, "alertChannelFactory is null");
if (alertChannelFactoryMap.putIfAbsent(alertChannelFactory.getName(), alertChannelFactory) != null) {
throw new IllegalArgumentException(format("Alert Plugin '{}' is already registered", alertChannelFactory.getName()));
}
try {
loadAlertChannel(alertChannelFactory.getName());
} catch (Exception e) {
throw new IllegalArgumentException(format("Alert Plugin '{}' is can not load .", alertChannelFactory.getName()));
}
}
protected void loadAlertChannel(String name) {
requireNonNull(name, "name is null");
AlertChannelFactory alertChannelFactory = alertChannelFactoryMap.get(name);
checkState(alertChannelFactory != null, "Alert Plugin {} is not registered", name);
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(alertChannelFactory.getClass().getClassLoader())) {
AlertChannel alertChannel = alertChannelFactory.create();
this.alertChannelMap.put(name, alertChannel);
}
logger.info("-- Loaded Alert Plugin {} --", name);
}
public Map<String, AlertChannelFactory> getAlertChannelFactoryMap() {
return alertChannelFactoryMap;
}
public Map<String, AlertChannel> getAlertChannelMap() {
return alertChannelMap;
}
@Override
public void installPlugin(DolphinSchedulerPlugin dolphinSchedulerPlugin) {
for (AlertChannelFactory alertChannelFactory : dolphinSchedulerPlugin.getAlertChannelFactorys()) {
logger.info("Registering Alert Plugin '{}'", alertChannelFactory.getName());
this.addAlertChannelFactory(alertChannelFactory);
List<PluginParams> params = alertChannelFactory.getParams();
String nameEn = alertChannelFactory.getName();
String paramsJson = PluginParamsTransfer.transferParamsToJson(params);
PluginDefine pluginDefine = new PluginDefine(nameEn, "alert", paramsJson);
pluginDao.addOrUpdatePluginDefine(pluginDefine);
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,310 | [Feature][Alert-SPI] Plug-ins containing UI components provide display pages |
*Some plugins (such as alert plugin) need to provide UI interfaces to users.
* We use from-creat to dynamically generate UI interfaces. Related parameters are mainly provided by pluginParams.
* From-create can generate dynamic ui based on this parameter. | https://github.com/apache/dolphinscheduler/issues/4310 | https://github.com/apache/dolphinscheduler/pull/4311 | 0f31152aeedf56e3a16883ded012952dbcacfc6b | 17d44216a4795d9b5a93a35b1457a2ead1f2b2c8 | "2020-12-25T09:01:31Z" | java | "2020-12-28T03:25:00Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/UiPluginController.java | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,310 | [Feature][Alert-SPI] Plug-ins containing UI components provide display pages |
*Some plugins (such as alert plugin) need to provide UI interfaces to users.
* We use from-creat to dynamically generate UI interfaces. Related parameters are mainly provided by pluginParams.
* From-create can generate dynamic ui based on this parameter. | https://github.com/apache/dolphinscheduler/issues/4310 | https://github.com/apache/dolphinscheduler/pull/4311 | 0f31152aeedf56e3a16883ded012952dbcacfc6b | 17d44216a4795d9b5a93a35b1457a2ead1f2b2c8 | "2020-12-25T09:01:31Z" | java | "2020-12-28T03:25:00Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/enums/Status.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.enums;
import java.util.Locale;
import org.springframework.context.i18n.LocaleContextHolder;
/**
* status enum
*/
public enum Status {
SUCCESS(0, "success", "成功"),
INTERNAL_SERVER_ERROR_ARGS(10000, "Internal Server Error: {0}", "服务端异常: {0}"),
REQUEST_PARAMS_NOT_VALID_ERROR(10001, "request parameter {0} is not valid", "请求参数[{0}]无效"),
TASK_TIMEOUT_PARAMS_ERROR(10002, "task timeout parameter is not valid", "任务超时参数无效"),
USER_NAME_EXIST(10003, "user name already exists", "用户名已存在"),
USER_NAME_NULL(10004, "user name is null", "用户名不能为空"),
HDFS_OPERATION_ERROR(10006, "hdfs operation error", "hdfs操作错误"),
TASK_INSTANCE_NOT_FOUND(10008, "task instance not found", "任务实例不存在"),
TENANT_NAME_EXIST(10009, "tenant code {0} already exists", "租户编码[{0}]已存在"),
USER_NOT_EXIST(10010, "user {0} not exists", "用户[{0}]不存在"),
ALERT_GROUP_NOT_EXIST(10011, "alarm group not found", "告警组不存在"),
ALERT_GROUP_EXIST(10012, "alarm group already exists", "告警组名称已存在"),
USER_NAME_PASSWD_ERROR(10013, "user name or password error", "用户名或密码错误"),
LOGIN_SESSION_FAILED(10014, "create session failed!", "创建session失败"),
DATASOURCE_EXIST(10015, "data source name already exists", "数据源名称已存在"),
DATASOURCE_CONNECT_FAILED(10016, "data source connection failed", "建立数据源连接失败"),
TENANT_NOT_EXIST(10017, "tenant not exists", "租户不存在"),
PROJECT_NOT_FOUNT(10018, "project {0} not found ", "项目[{0}]不存在"),
PROJECT_ALREADY_EXISTS(10019, "project {0} already exists", "项目名称[{0}]已存在"),
TASK_INSTANCE_NOT_EXISTS(10020, "task instance {0} does not exist", "任务实例[{0}]不存在"),
TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE(10021, "task instance {0} is not sub process instance", "任务实例[{0}]不是子流程实例"),
SCHEDULE_CRON_NOT_EXISTS(10022, "scheduler crontab {0} does not exist", "调度配置定时表达式[{0}]不存在"),
SCHEDULE_CRON_ONLINE_FORBID_UPDATE(10023, "online status does not allow update operations", "调度配置上线状态不允许修改"),
SCHEDULE_CRON_CHECK_FAILED(10024, "scheduler crontab expression validation failure: {0}", "调度配置定时表达式验证失败: {0}"),
MASTER_NOT_EXISTS(10025, "master does not exist", "无可用master节点"),
SCHEDULE_STATUS_UNKNOWN(10026, "unknown status: {0}", "未知状态: {0}"),
CREATE_ALERT_GROUP_ERROR(10027, "create alert group error", "创建告警组错误"),
QUERY_ALL_ALERTGROUP_ERROR(10028, "query all alertgroup error", "查询告警组错误"),
LIST_PAGING_ALERT_GROUP_ERROR(10029, "list paging alert group error", "分页查询告警组错误"),
UPDATE_ALERT_GROUP_ERROR(10030, "update alert group error", "更新告警组错误"),
DELETE_ALERT_GROUP_ERROR(10031, "delete alert group error", "删除告警组错误"),
ALERT_GROUP_GRANT_USER_ERROR(10032, "alert group grant user error", "告警组授权用户错误"),
CREATE_DATASOURCE_ERROR(10033, "create datasource error", "创建数据源错误"),
UPDATE_DATASOURCE_ERROR(10034, "update datasource error", "更新数据源错误"),
QUERY_DATASOURCE_ERROR(10035, "query datasource error", "查询数据源错误"),
CONNECT_DATASOURCE_FAILURE(10036, "connect datasource failure", "建立数据源连接失败"),
CONNECTION_TEST_FAILURE(10037, "connection test failure", "测试数据源连接失败"),
DELETE_DATA_SOURCE_FAILURE(10038, "delete data source failure", "删除数据源失败"),
VERIFY_DATASOURCE_NAME_FAILURE(10039, "verify datasource name failure", "验证数据源名称失败"),
UNAUTHORIZED_DATASOURCE(10040, "unauthorized datasource", "未经授权的数据源"),
AUTHORIZED_DATA_SOURCE(10041, "authorized data source", "授权数据源失败"),
LOGIN_SUCCESS(10042, "login success", "登录成功"),
USER_LOGIN_FAILURE(10043, "user login failure", "用户登录失败"),
LIST_WORKERS_ERROR(10044, "list workers error", "查询worker列表错误"),
LIST_MASTERS_ERROR(10045, "list masters error", "查询master列表错误"),
UPDATE_PROJECT_ERROR(10046, "update project error", "更新项目信息错误"),
QUERY_PROJECT_DETAILS_BY_ID_ERROR(10047, "query project details by id error", "查询项目详细信息错误"),
CREATE_PROJECT_ERROR(10048, "create project error", "创建项目错误"),
LOGIN_USER_QUERY_PROJECT_LIST_PAGING_ERROR(10049, "login user query project list paging error", "分页查询项目列表错误"),
DELETE_PROJECT_ERROR(10050, "delete project error", "删除项目错误"),
QUERY_UNAUTHORIZED_PROJECT_ERROR(10051, "query unauthorized project error", "查询未授权项目错误"),
QUERY_AUTHORIZED_PROJECT(10052, "query authorized project", "查询授权项目错误"),
QUERY_QUEUE_LIST_ERROR(10053, "query queue list error", "查询队列列表错误"),
CREATE_RESOURCE_ERROR(10054, "create resource error", "创建资源错误"),
UPDATE_RESOURCE_ERROR(10055, "update resource error", "更新资源错误"),
QUERY_RESOURCES_LIST_ERROR(10056, "query resources list error", "查询资源列表错误"),
QUERY_RESOURCES_LIST_PAGING(10057, "query resources list paging", "分页查询资源列表错误"),
DELETE_RESOURCE_ERROR(10058, "delete resource error", "删除资源错误"),
VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR(10059, "verify resource by name and type error", "资源名称或类型验证错误"),
VIEW_RESOURCE_FILE_ON_LINE_ERROR(10060, "view resource file online error", "查看资源文件错误"),
CREATE_RESOURCE_FILE_ON_LINE_ERROR(10061, "create resource file online error", "创建资源文件错误"),
RESOURCE_FILE_IS_EMPTY(10062, "resource file is empty", "资源文件内容不能为空"),
EDIT_RESOURCE_FILE_ON_LINE_ERROR(10063, "edit resource file online error", "更新资源文件错误"),
DOWNLOAD_RESOURCE_FILE_ERROR(10064, "download resource file error", "下载资源文件错误"),
CREATE_UDF_FUNCTION_ERROR(10065, "create udf function error", "创建UDF函数错误"),
VIEW_UDF_FUNCTION_ERROR(10066, "view udf function error", "查询UDF函数错误"),
UPDATE_UDF_FUNCTION_ERROR(10067, "update udf function error", "更新UDF函数错误"),
QUERY_UDF_FUNCTION_LIST_PAGING_ERROR(10068, "query udf function list paging error", "分页查询UDF函数列表错误"),
QUERY_DATASOURCE_BY_TYPE_ERROR(10069, "query datasource by type error", "查询数据源信息错误"),
VERIFY_UDF_FUNCTION_NAME_ERROR(10070, "verify udf function name error", "UDF函数名称验证错误"),
DELETE_UDF_FUNCTION_ERROR(10071, "delete udf function error", "删除UDF函数错误"),
AUTHORIZED_FILE_RESOURCE_ERROR(10072, "authorized file resource error", "授权资源文件错误"),
AUTHORIZE_RESOURCE_TREE(10073, "authorize resource tree display error", "授权资源目录树错误"),
UNAUTHORIZED_UDF_FUNCTION_ERROR(10074, "unauthorized udf function error", "查询未授权UDF函数错误"),
AUTHORIZED_UDF_FUNCTION_ERROR(10075, "authorized udf function error", "授权UDF函数错误"),
CREATE_SCHEDULE_ERROR(10076, "create schedule error", "创建调度配置错误"),
UPDATE_SCHEDULE_ERROR(10077, "update schedule error", "更新调度配置错误"),
PUBLISH_SCHEDULE_ONLINE_ERROR(10078, "publish schedule online error", "上线调度配置错误"),
OFFLINE_SCHEDULE_ERROR(10079, "offline schedule error", "下线调度配置错误"),
QUERY_SCHEDULE_LIST_PAGING_ERROR(10080, "query schedule list paging error", "分页查询调度配置列表错误"),
QUERY_SCHEDULE_LIST_ERROR(10081, "query schedule list error", "查询调度配置列表错误"),
QUERY_TASK_LIST_PAGING_ERROR(10082, "query task list paging error", "分页查询任务列表错误"),
QUERY_TASK_RECORD_LIST_PAGING_ERROR(10083, "query task record list paging error", "分页查询任务记录错误"),
CREATE_TENANT_ERROR(10084, "create tenant error", "创建租户错误"),
QUERY_TENANT_LIST_PAGING_ERROR(10085, "query tenant list paging error", "分页查询租户列表错误"),
QUERY_TENANT_LIST_ERROR(10086, "query tenant list error", "查询租户列表错误"),
UPDATE_TENANT_ERROR(10087, "update tenant error", "更新租户错误"),
DELETE_TENANT_BY_ID_ERROR(10088, "delete tenant by id error", "删除租户错误"),
VERIFY_TENANT_CODE_ERROR(10089, "verify tenant code error", "租户编码验证错误"),
CREATE_USER_ERROR(10090, "create user error", "创建用户错误"),
QUERY_USER_LIST_PAGING_ERROR(10091, "query user list paging error", "分页查询用户列表错误"),
UPDATE_USER_ERROR(10092, "update user error", "更新用户错误"),
DELETE_USER_BY_ID_ERROR(10093, "delete user by id error", "删除用户错误"),
GRANT_PROJECT_ERROR(10094, "grant project error", "授权项目错误"),
GRANT_RESOURCE_ERROR(10095, "grant resource error", "授权资源错误"),
GRANT_UDF_FUNCTION_ERROR(10096, "grant udf function error", "授权UDF函数错误"),
GRANT_DATASOURCE_ERROR(10097, "grant datasource error", "授权数据源错误"),
GET_USER_INFO_ERROR(10098, "get user info error", "获取用户信息错误"),
USER_LIST_ERROR(10099, "user list error", "查询用户列表错误"),
VERIFY_USERNAME_ERROR(10100, "verify username error", "用户名验证错误"),
UNAUTHORIZED_USER_ERROR(10101, "unauthorized user error", "查询未授权用户错误"),
AUTHORIZED_USER_ERROR(10102, "authorized user error", "查询授权用户错误"),
QUERY_TASK_INSTANCE_LOG_ERROR(10103, "view task instance log error", "查询任务实例日志错误"),
DOWNLOAD_TASK_INSTANCE_LOG_FILE_ERROR(10104, "download task instance log file error", "下载任务日志文件错误"),
CREATE_PROCESS_DEFINITION(10105, "create process definition", "创建工作流错误"),
VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR(10106, "verify process definition name unique error", "工作流名称已存在"),
UPDATE_PROCESS_DEFINITION_ERROR(10107, "update process definition error", "更新工作流定义错误"),
RELEASE_PROCESS_DEFINITION_ERROR(10108, "release process definition error", "上线工作流错误"),
QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR(10109, "query datail of process definition error", "查询工作流详细信息错误"),
QUERY_PROCESS_DEFINITION_LIST(10110, "query process definition list", "查询工作流列表错误"),
ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR(10111, "encapsulation treeview structure error", "查询工作流树形图数据错误"),
GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR(10112, "get tasks list by process definition id error", "查询工作流定义节点信息错误"),
QUERY_PROCESS_INSTANCE_LIST_PAGING_ERROR(10113, "query process instance list paging error", "分页查询工作流实例列表错误"),
QUERY_TASK_LIST_BY_PROCESS_INSTANCE_ID_ERROR(10114, "query task list by process instance id error", "查询任务实例列表错误"),
UPDATE_PROCESS_INSTANCE_ERROR(10115, "update process instance error", "更新工作流实例错误"),
QUERY_PROCESS_INSTANCE_BY_ID_ERROR(10116, "query process instance by id error", "查询工作流实例错误"),
DELETE_PROCESS_INSTANCE_BY_ID_ERROR(10117, "delete process instance by id error", "删除工作流实例错误"),
QUERY_SUB_PROCESS_INSTANCE_DETAIL_INFO_BY_TASK_ID_ERROR(10118, "query sub process instance detail info by task id error", "查询子流程任务实例错误"),
QUERY_PARENT_PROCESS_INSTANCE_DETAIL_INFO_BY_SUB_PROCESS_INSTANCE_ID_ERROR(10119, "query parent process instance detail info by sub process instance id error", "查询子流程该工作流实例错误"),
QUERY_PROCESS_INSTANCE_ALL_VARIABLES_ERROR(10120, "query process instance all variables error", "查询工作流自定义变量信息错误"),
ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR(10121, "encapsulation process instance gantt structure error", "查询工作流实例甘特图数据错误"),
QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR(10122, "query process definition list paging error", "分页查询工作流定义列表错误"),
SIGN_OUT_ERROR(10123, "sign out error", "退出错误"),
TENANT_CODE_HAS_ALREADY_EXISTS(10124, "tenant code has already exists", "租户编码已存在"),
IP_IS_EMPTY(10125, "ip is empty", "IP地址不能为空"),
SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE(10126, "schedule release is already {0}", "调度配置上线错误[{0}]"),
CREATE_QUEUE_ERROR(10127, "create queue error", "创建队列错误"),
QUEUE_NOT_EXIST(10128, "queue {0} not exists", "队列ID[{0}]不存在"),
QUEUE_VALUE_EXIST(10129, "queue value {0} already exists", "队列值[{0}]已存在"),
QUEUE_NAME_EXIST(10130, "queue name {0} already exists", "队列名称[{0}]已存在"),
UPDATE_QUEUE_ERROR(10131, "update queue error", "更新队列信息错误"),
NEED_NOT_UPDATE_QUEUE(10132, "no content changes, no updates are required", "数据未变更,不需要更新队列信息"),
VERIFY_QUEUE_ERROR(10133, "verify queue error", "验证队列信息错误"),
NAME_NULL(10134, "name must be not null", "名称不能为空"),
NAME_EXIST(10135, "name {0} already exists", "名称[{0}]已存在"),
SAVE_ERROR(10136, "save error", "保存错误"),
DELETE_PROJECT_ERROR_DEFINES_NOT_NULL(10137, "please delete the process definitions in project first!", "请先删除全部工作流定义"),
BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117, "batch delete process instance by ids {0} error", "批量删除工作流实例错误"),
PREVIEW_SCHEDULE_ERROR(10139, "preview schedule error", "预览调度配置错误"),
PARSE_TO_CRON_EXPRESSION_ERROR(10140, "parse cron to cron expression error", "解析调度表达式错误"),
SCHEDULE_START_TIME_END_TIME_SAME(10141, "The start time must not be the same as the end", "开始时间不能和结束时间一样"),
DELETE_TENANT_BY_ID_FAIL(10142, "delete tenant by id fail, for there are {0} process instances in executing using it", "删除租户失败,有[{0}]个运行中的工作流实例正在使用"),
DELETE_TENANT_BY_ID_FAIL_DEFINES(10143, "delete tenant by id fail, for there are {0} process definitions using it", "删除租户失败,有[{0}]个工作流定义正在使用"),
DELETE_TENANT_BY_ID_FAIL_USERS(10144, "delete tenant by id fail, for there are {0} users using it", "删除租户失败,有[{0}]个用户正在使用"),
DELETE_WORKER_GROUP_BY_ID_FAIL(10145, "delete worker group by id fail, for there are {0} process instances in executing using it", "删除Worker分组失败,有[{0}]个运行中的工作流实例正在使用"),
QUERY_WORKER_GROUP_FAIL(10146, "query worker group fail ", "查询worker分组失败"),
DELETE_WORKER_GROUP_FAIL(10147, "delete worker group fail ", "删除worker分组失败"),
USER_DISABLED(10148, "The current user is disabled", "当前用户已停用"),
COPY_PROCESS_DEFINITION_ERROR(10149, "copy process definition from {0} to {1} error : {2}", "从{0}复制工作流到{1}错误 : {2}"),
MOVE_PROCESS_DEFINITION_ERROR(10150, "move process definition from {0} to {1} error : {2}", "从{0}移动工作流到{1}错误 : {2}"),
SWITCH_PROCESS_DEFINITION_VERSION_ERROR(10151, "Switch process definition version error", "切换工作流版本出错"),
SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR(10152
, "Switch process definition version error: not exists process definition, [process definition id {0}]", "切换工作流版本出错:工作流不存在,[工作流id {0}]"),
SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR(10153
, "Switch process definition version error: not exists process definition version, [process definition id {0}] [version number {1}]", "切换工作流版本出错:工作流版本信息不存在,[工作流id {0}] [版本号 {1}]"),
QUERY_PROCESS_DEFINITION_VERSIONS_ERROR(10154, "query process definition versions error", "查询工作流历史版本信息出错"),
QUERY_PROCESS_DEFINITION_VERSIONS_PAGE_NO_OR_PAGE_SIZE_LESS_THAN_1_ERROR(10155
, "query process definition versions error: [page number:{0}] < 1 or [page size:{1}] < 1", "查询工作流历史版本出错:[pageNo:{0}] < 1 或 [pageSize:{1}] < 1"),
DELETE_PROCESS_DEFINITION_VERSION_ERROR(10156, "delete process definition version error", "删除工作流历史版本出错"),
QUERY_USER_CREATED_PROJECT_ERROR(10157, "query user created project error error", "查询用户创建的项目错误"),
PROCESS_DEFINITION_IDS_IS_EMPTY(10158, "process definition ids is empty", "工作流IDS不能为空"),
BATCH_COPY_PROCESS_DEFINITION_ERROR(10159, "batch copy process definition error", "复制工作流错误"),
BATCH_MOVE_PROCESS_DEFINITION_ERROR(10160, "batch move process definition error", "移动工作流错误"),
QUERY_WORKFLOW_LINEAGE_ERROR(10161, "query workflow lineage error", "查询血缘失败"),
DELETE_PROCESS_DEFINITION_BY_ID_FAIL(10162, "delete process definition by id fail, for there are {0} process instances in executing using it", "删除工作流定义失败,有[{0}]个运行中的工作流实例正在使用"),
CHECK_TENANT_CODE_ERROR(10163, "Please enter the English tenant code", "请输入英文租户编码"),
CREATE_ALERT_PLUGIN_INSTANCE_ERROR(10164, "create alert plugin instance error", "创建告警组和告警组插件实例错误"),
UPDATE_ALERT_PLUGIN_INSTANCE_ERROR(10165, "update alert plugin instance error", "更新告警组和告警组插件实例错误"),
DELETE_ALERT_PLUGIN_INSTANCE_ERROR(10166, "delete alert plugin instance error", "删除告警组和告警组插件实例错误"),
GET_ALERT_PLUGIN_INSTANCE_ERROR(10167, "get alert plugin instance error", "获取告警组和告警组插件实例错误"),
UDF_FUNCTION_NOT_EXIST(20001, "UDF function not found", "UDF函数不存在"),
UDF_FUNCTION_EXISTS(20002, "UDF function already exists", "UDF函数已存在"),
RESOURCE_NOT_EXIST(20004, "resource not exist", "资源不存在"),
RESOURCE_EXIST(20005, "resource already exists", "资源已存在"),
RESOURCE_SUFFIX_NOT_SUPPORT_VIEW(20006, "resource suffix do not support online viewing", "资源文件后缀不支持查看"),
RESOURCE_SIZE_EXCEED_LIMIT(20007, "upload resource file size exceeds limit", "上传资源文件大小超过限制"),
RESOURCE_SUFFIX_FORBID_CHANGE(20008, "resource suffix not allowed to be modified", "资源文件后缀不支持修改"),
UDF_RESOURCE_SUFFIX_NOT_JAR(20009, "UDF resource suffix name must be jar", "UDF资源文件后缀名只支持[jar]"),
HDFS_COPY_FAIL(20010, "hdfs copy {0} -> {1} fail", "hdfs复制失败:[{0}] -> [{1}]"),
RESOURCE_FILE_EXIST(20011, "resource file {0} already exists in hdfs,please delete it or change name!", "资源文件[{0}]在hdfs中已存在,请删除或修改资源名"),
RESOURCE_FILE_NOT_EXIST(20012, "resource file {0} not exists in hdfs!", "资源文件[{0}]在hdfs中不存在"),
UDF_RESOURCE_IS_BOUND(20013, "udf resource file is bound by UDF functions:{0}", "udf函数绑定了资源文件[{0}]"),
RESOURCE_IS_USED(20014, "resource file is used by process definition", "资源文件被上线的流程定义使用了"),
PARENT_RESOURCE_NOT_EXIST(20015, "parent resource not exist", "父资源文件不存在"),
RESOURCE_NOT_EXIST_OR_NO_PERMISSION(20016, "resource not exist or no permission,please view the task node and remove error resource", "请检查任务节点并移除无权限或者已删除的资源"),
RESOURCE_IS_AUTHORIZED(20017, "resource is authorized to user {0},suffix not allowed to be modified", "资源文件已授权其他用户[{0}],后缀不允许修改"),
USER_NO_OPERATION_PERM(30001, "user has no operation privilege", "当前用户没有操作权限"),
USER_NO_OPERATION_PROJECT_PERM(30002, "user {0} is not has project {1} permission", "当前用户[{0}]没有[{1}]项目的操作权限"),
PROCESS_INSTANCE_NOT_EXIST(50001, "process instance {0} does not exist", "工作流实例[{0}]不存在"),
PROCESS_INSTANCE_EXIST(50002, "process instance {0} already exists", "工作流实例[{0}]已存在"),
PROCESS_DEFINE_NOT_EXIST(50003, "process definition {0} does not exist", "工作流定义[{0}]不存在"),
PROCESS_DEFINE_NOT_RELEASE(50004, "process definition {0} not on line", "工作流定义[{0}]不是上线状态"),
PROCESS_INSTANCE_ALREADY_CHANGED(50005, "the status of process instance {0} is already {1}", "工作流实例[{0}]的状态已经是[{1}]"),
PROCESS_INSTANCE_STATE_OPERATION_ERROR(50006, "the status of process instance {0} is {1},Cannot perform {2} operation", "工作流实例[{0}]的状态是[{1}],无法执行[{2}]操作"),
SUB_PROCESS_INSTANCE_NOT_EXIST(50007, "the task belong to process instance does not exist", "子工作流实例不存在"),
PROCESS_DEFINE_NOT_ALLOWED_EDIT(50008, "process definition {0} does not allow edit", "工作流定义[{0}]不允许修改"),
PROCESS_INSTANCE_EXECUTING_COMMAND(50009, "process instance {0} is executing the command, please wait ...", "工作流实例[{0}]正在执行命令,请稍等..."),
PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE(50010, "process instance {0} is not sub process instance", "工作流实例[{0}]不是子工作流实例"),
TASK_INSTANCE_STATE_COUNT_ERROR(50011, "task instance state count error", "查询各状态任务实例数错误"),
COUNT_PROCESS_INSTANCE_STATE_ERROR(50012, "count process instance state error", "查询各状态流程实例数错误"),
COUNT_PROCESS_DEFINITION_USER_ERROR(50013, "count process definition user error", "查询各用户流程定义数错误"),
START_PROCESS_INSTANCE_ERROR(50014, "start process instance error", "运行工作流实例错误"),
EXECUTE_PROCESS_INSTANCE_ERROR(50015, "execute process instance error", "操作工作流实例错误"),
CHECK_PROCESS_DEFINITION_ERROR(50016, "check process definition error", "检查工作流实例错误"),
QUERY_RECIPIENTS_AND_COPYERS_BY_PROCESS_DEFINITION_ERROR(50017, "query recipients and copyers by process definition error", "查询收件人和抄送人错误"),
DATA_IS_NOT_VALID(50017, "data {0} not valid", "数据[{0}]无效"),
DATA_IS_NULL(50018, "data {0} is null", "数据[{0}]不能为空"),
PROCESS_NODE_HAS_CYCLE(50019, "process node has cycle", "流程节点间存在循环依赖"),
PROCESS_NODE_S_PARAMETER_INVALID(50020, "process node {0} parameter invalid", "流程节点[{0}]参数无效"),
PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line", "工作流定义[{0}]已上线"),
DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022, "delete process definition by id error", "删除工作流定义错误"),
SCHEDULE_CRON_STATE_ONLINE(50023, "the status of schedule {0} is already on line", "调度配置[{0}]已上线"),
DELETE_SCHEDULE_CRON_BY_ID_ERROR(50024, "delete schedule by id error", "删除调度配置错误"),
BATCH_DELETE_PROCESS_DEFINE_ERROR(50025, "batch delete process definition error", "批量删除工作流定义错误"),
BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026, "batch delete process definition by ids {0} error", "批量删除工作流定义[{0}]错误"),
TENANT_NOT_SUITABLE(50027, "there is not any tenant suitable, please choose a tenant available.", "没有合适的租户,请选择可用的租户"),
EXPORT_PROCESS_DEFINE_BY_ID_ERROR(50028, "export process definition by id error", "导出工作流定义错误"),
BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR(50028, "batch export process definition by ids error", "批量导出工作流定义错误"),
IMPORT_PROCESS_DEFINE_ERROR(50029, "import process definition error", "导入工作流定义错误"),
HDFS_NOT_STARTUP(60001, "hdfs not startup", "hdfs未启用"),
/**
* for monitor
*/
QUERY_DATABASE_STATE_ERROR(70001, "query database state error", "查询数据库状态错误"),
QUERY_ZOOKEEPER_STATE_ERROR(70002, "query zookeeper state error", "查询zookeeper状态错误"),
CREATE_ACCESS_TOKEN_ERROR(70010, "create access token error", "创建访问token错误"),
GENERATE_TOKEN_ERROR(70011, "generate token error", "生成token错误"),
QUERY_ACCESSTOKEN_LIST_PAGING_ERROR(70012, "query access token list paging error", "分页查询访问token列表错误"),
UPDATE_ACCESS_TOKEN_ERROR(70013, "update access token error", "更新访问token错误"),
DELETE_ACCESS_TOKEN_ERROR(70014, "delete access token error", "删除访问token错误"),
ACCESS_TOKEN_NOT_EXIST(70015, "access token not exist", "访问token不存在"),
COMMAND_STATE_COUNT_ERROR(80001, "task instance state count error", "查询各状态任务实例数错误"),
NEGTIVE_SIZE_NUMBER_ERROR(80002, "query size number error", "查询size错误"),
START_TIME_BIGGER_THAN_END_TIME_ERROR(80003, "start time bigger than end time error", "开始时间在结束时间之后错误"),
QUEUE_COUNT_ERROR(90001, "queue count error", "查询队列数据错误"),
KERBEROS_STARTUP_STATE(100001, "get kerberos startup state error", "获取kerberos启动状态错误"),
;
private final int code;
private final String enMsg;
private final String zhMsg;
private Status(int code, String enMsg, String zhMsg) {
this.code = code;
this.enMsg = enMsg;
this.zhMsg = zhMsg;
}
public int getCode() {
return this.code;
}
public String getMsg() {
if (Locale.SIMPLIFIED_CHINESE.getLanguage().equals(LocaleContextHolder.getLocale().getLanguage())) {
return this.zhMsg;
} else {
return this.enMsg;
}
}
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,310 | [Feature][Alert-SPI] Plug-ins containing UI components provide display pages |
*Some plugins (such as alert plugin) need to provide UI interfaces to users.
* We use from-creat to dynamically generate UI interfaces. Related parameters are mainly provided by pluginParams.
* From-create can generate dynamic ui based on this parameter. | https://github.com/apache/dolphinscheduler/issues/4310 | https://github.com/apache/dolphinscheduler/pull/4311 | 0f31152aeedf56e3a16883ded012952dbcacfc6b | 17d44216a4795d9b5a93a35b1457a2ead1f2b2c8 | "2020-12-25T09:01:31Z" | java | "2020-12-28T03:25:00Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/UiPluginService.java | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,310 | [Feature][Alert-SPI] Plug-ins containing UI components provide display pages |
*Some plugins (such as alert plugin) need to provide UI interfaces to users.
* We use from-creat to dynamically generate UI interfaces. Related parameters are mainly provided by pluginParams.
* From-create can generate dynamic ui based on this parameter. | https://github.com/apache/dolphinscheduler/issues/4310 | https://github.com/apache/dolphinscheduler/pull/4311 | 0f31152aeedf56e3a16883ded012952dbcacfc6b | 17d44216a4795d9b5a93a35b1457a2ead1f2b2c8 | "2020-12-25T09:01:31Z" | java | "2020-12-28T03:25:00Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/UiPluginServiceImpl.java | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,310 | [Feature][Alert-SPI] Plug-ins containing UI components provide display pages |
*Some plugins (such as alert plugin) need to provide UI interfaces to users.
* We use from-creat to dynamically generate UI interfaces. Related parameters are mainly provided by pluginParams.
* From-create can generate dynamic ui based on this parameter. | https://github.com/apache/dolphinscheduler/issues/4310 | https://github.com/apache/dolphinscheduler/pull/4311 | 0f31152aeedf56e3a16883ded012952dbcacfc6b | 17d44216a4795d9b5a93a35b1457a2ead1f2b2c8 | "2020-12-25T09:01:31Z" | java | "2020-12-28T03:25:00Z" | dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/UiPluginServiceTest.java | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,310 | [Feature][Alert-SPI] Plug-ins containing UI components provide display pages |
*Some plugins (such as alert plugin) need to provide UI interfaces to users.
* We use from-creat to dynamically generate UI interfaces. Related parameters are mainly provided by pluginParams.
* From-create can generate dynamic ui based on this parameter. | https://github.com/apache/dolphinscheduler/issues/4310 | https://github.com/apache/dolphinscheduler/pull/4311 | 0f31152aeedf56e3a16883ded012952dbcacfc6b | 17d44216a4795d9b5a93a35b1457a2ead1f2b2c8 | "2020-12-25T09:01:31Z" | java | "2020-12-28T03:25:00Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/PluginType.java | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,310 | [Feature][Alert-SPI] Plug-ins containing UI components provide display pages |
*Some plugins (such as alert plugin) need to provide UI interfaces to users.
* We use from-creat to dynamically generate UI interfaces. Related parameters are mainly provided by pluginParams.
* From-create can generate dynamic ui based on this parameter. | https://github.com/apache/dolphinscheduler/issues/4310 | https://github.com/apache/dolphinscheduler/pull/4311 | 0f31152aeedf56e3a16883ded012952dbcacfc6b | 17d44216a4795d9b5a93a35b1457a2ead1f2b2c8 | "2020-12-25T09:01:31Z" | java | "2020-12-28T03:25:00Z" | dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/mapper/PluginDefineMapper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao.mapper;
import org.apache.dolphinscheduler.dao.entity.PluginDefine;
import org.apache.ibatis.annotations.Param;
import java.util.List;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
public interface PluginDefineMapper extends BaseMapper<PluginDefine> {
/**
* query all plugin define
*
* @return PluginDefine list
*/
List<PluginDefine> queryAllPluginDefineList();
/**
* query by plugin type
*
* @param pluginType pluginType
* @return PluginDefine list
*/
List<PluginDefine> queryByPluginType(@Param("pluginType") String pluginType);
/**
* query by name and type
*
* @param pluginName
* @param pluginType
* @return
*/
List<PluginDefine> queryByNameAndType(@Param("pluginName") String pluginName, @Param("pluginType") String pluginType);
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,310 | [Feature][Alert-SPI] Plug-ins containing UI components provide display pages |
*Some plugins (such as alert plugin) need to provide UI interfaces to users.
* We use from-creat to dynamically generate UI interfaces. Related parameters are mainly provided by pluginParams.
* From-create can generate dynamic ui based on this parameter. | https://github.com/apache/dolphinscheduler/issues/4310 | https://github.com/apache/dolphinscheduler/pull/4311 | 0f31152aeedf56e3a16883ded012952dbcacfc6b | 17d44216a4795d9b5a93a35b1457a2ead1f2b2c8 | "2020-12-25T09:01:31Z" | java | "2020-12-28T03:25:00Z" | dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/PluginDefineMapper.xml | <?xml version="1.0" encoding="UTF-8" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.PluginDefineMapper">
<select id="queryAllPluginDefineList" resultType="org.apache.dolphinscheduler.dao.entity.PluginDefine">
select *
from t_ds_plugin_define
where 1=1
</select>
<select id="queryByPluginType" resultType="org.apache.dolphinscheduler.dao.entity.PluginDefine">
select *
from t_ds_plugin_define
where plugin_type = #{pluginType}
</select>
<select id="queryByNameAndType" resultType="org.apache.dolphinscheduler.dao.entity.PluginDefine">
select *
from t_ds_plugin_define
where plugin_name = #{pluginName} and plugin_type = #{pluginType}
</select>
</mapper> |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,310 | [Feature][Alert-SPI] Plug-ins containing UI components provide display pages |
*Some plugins (such as alert plugin) need to provide UI interfaces to users.
* We use from-creat to dynamically generate UI interfaces. Related parameters are mainly provided by pluginParams.
* From-create can generate dynamic ui based on this parameter. | https://github.com/apache/dolphinscheduler/issues/4310 | https://github.com/apache/dolphinscheduler/pull/4311 | 0f31152aeedf56e3a16883ded012952dbcacfc6b | 17d44216a4795d9b5a93a35b1457a2ead1f2b2c8 | "2020-12-25T09:01:31Z" | java | "2020-12-28T03:25:00Z" | pom.xml | <?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId>
<version>1.3.2-SNAPSHOT</version>
<packaging>pom</packaging>
<name>${project.artifactId}</name>
<url>http://dolphinscheduler.apache.org</url>
<description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated
to solving the complex dependencies in data processing, making the scheduling system out of the box for data
processing.
</description>
<licenses>
<license>
<name>Apache License 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<connection>scm:git:https://github.com/apache/incubator-dolphinscheduler.git</connection>
<developerConnection>scm:git:https://github.com/apache/incubator-dolphinscheduler.git</developerConnection>
<url>https://github.com/apache/incubator-dolphinscheduler</url>
<tag>HEAD</tag>
</scm>
<mailingLists>
<mailingList>
<name>DolphinScheduler Developer List</name>
<post>dev@dolphinscheduler.incubator.apache.org</post>
<subscribe>dev-subscribe@dolphinscheduler.incubator.apache.org</subscribe>
<unsubscribe>dev-unsubscribe@dolphinscheduler.incubator.apache.org</unsubscribe>
</mailingList>
</mailingLists>
<parent>
<groupId>org.apache</groupId>
<artifactId>apache</artifactId>
<version>21</version>
</parent>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<curator.version>4.3.0</curator.version>
<spring.version>5.1.5.RELEASE</spring.version>
<spring.boot.version>2.1.3.RELEASE</spring.boot.version>
<java.version>1.8</java.version>
<logback.version>1.2.3</logback.version>
<hadoop.version>2.7.3</hadoop.version>
<quartz.version>2.2.3</quartz.version>
<jackson.version>2.9.8</jackson.version>
<mybatis-plus.version>3.2.0</mybatis-plus.version>
<mybatis.spring.version>2.0.1</mybatis.spring.version>
<cron.utils.version>5.0.5</cron.utils.version>
<druid.version>1.1.22</druid.version>
<h2.version>1.4.200</h2.version>
<commons.codec.version>1.6</commons.codec.version>
<commons.logging.version>1.1.1</commons.logging.version>
<httpclient.version>4.4.1</httpclient.version>
<httpcore.version>4.4.1</httpcore.version>
<junit.version>4.12</junit.version>
<mysql.connector.version>5.1.34</mysql.connector.version>
<slf4j.api.version>1.7.5</slf4j.api.version>
<slf4j.log4j12.version>1.7.5</slf4j.log4j12.version>
<commons.collections.version>3.2.2</commons.collections.version>
<commons.httpclient>3.0.1</commons.httpclient>
<commons.beanutils.version>1.7.0</commons.beanutils.version>
<commons.configuration.version>1.10</commons.configuration.version>
<commons.email.version>1.5</commons.email.version>
<poi.version>3.17</poi.version>
<javax.servlet.api.version>3.1.0</javax.servlet.api.version>
<commons.collections4.version>4.1</commons.collections4.version>
<guava.version>24.1-jre</guava.version>
<postgresql.version>42.1.4</postgresql.version>
<hive.jdbc.version>2.1.0</hive.jdbc.version>
<commons.io.version>2.4</commons.io.version>
<oshi.core.version>3.5.0</oshi.core.version>
<clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version>
<mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version>
<presto.jdbc.version>0.238.1</presto.jdbc.version>
<jsp-2.1.version>6.1.14</jsp-2.1.version>
<spotbugs.version>3.1.12</spotbugs.version>
<checkstyle.version>3.0.0</checkstyle.version>
<apache.rat.version>0.13</apache.rat.version>
<zookeeper.version>3.4.14</zookeeper.version>
<frontend-maven-plugin.version>1.6</frontend-maven-plugin.version>
<maven-compiler-plugin.version>3.3</maven-compiler-plugin.version>
<maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version>
<maven-release-plugin.version>2.5.3</maven-release-plugin.version>
<maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version>
<maven-source-plugin.version>2.4</maven-source-plugin.version>
<maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version>
<maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version>
<rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version>
<jacoco.version>0.8.4</jacoco.version>
<jcip.version>1.0</jcip.version>
<maven.deploy.skip>false</maven.deploy.skip>
<cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version>
<mockito.version>2.21.0</mockito.version>
<powermock.version>2.0.2</powermock.version>
<servlet-api.version>2.5</servlet-api.version>
<swagger.version>1.9.3</swagger.version>
<springfox.version>2.9.2</springfox.version>
<guava-retry.version>2.0.0</guava-retry.version>
<dep.airlift.version>0.184</dep.airlift.version>
<dep.packaging.version>${dep.airlift.version}</dep.packaging.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<!-- quartz-->
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
<version>${quartz.version}</version>
</dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz-jobs</artifactId>
<version>${quartz.version}</version>
</dependency>
<dependency>
<groupId>com.cronutils</groupId>
<artifactId>cron-utils</artifactId>
<version>${cron.utils.version}</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>${druid.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>${spring.boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-server</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-alert-plugin</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-dao</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-api</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-remote</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-service</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-alert</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-spi</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-framework</artifactId>
<version>${curator.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<version>${curator.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<artifactId>netty</artifactId>
<groupId>io.netty</groupId>
</exclusion>
<exclusion>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-annotations</artifactId>
</exclusion>
</exclusions>
<version>${zookeeper.version}</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>${commons.codec.version}</version>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>${commons.logging.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>${httpclient.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>${httpcore.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>${mockito.version}</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<version>${powermock.version}</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito2</artifactId>
<version>${powermock.version}</version>
<type>jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.connector.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>${h2.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.api.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.log4j12.version}</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>${commons.collections.version}</version>
</dependency>
<dependency>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
<version>${commons.httpclient}</version>
</dependency>
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>${commons.beanutils.version}</version>
</dependency>
<dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
<version>${commons.configuration.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-email</artifactId>
<version>${commons.email.version}</version>
</dependency>
<!--excel poi-->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${poi.version}</version>
</dependency>
<!-- hadoop -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>com.sun.jersey</artifactId>
<groupId>jersey-json</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-aws</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
<version>${commons.collections4.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>${postgresql.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.jdbc.version}</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons.io.version}</version>
</dependency>
<dependency>
<groupId>com.github.oshi</groupId>
<artifactId>oshi-core</artifactId>
<version>${oshi.core.version}</version>
</dependency>
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<version>${clickhouse.jdbc.version}</version>
</dependency>
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
<version>${mssql.jdbc.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-jdbc</artifactId>
<version>${presto.jdbc.version}</version>
</dependency>
<dependency>
<groupId>net.jcip</groupId>
<artifactId>jcip-annotations</artifactId>
<version>${jcip.version}</version>
<optional>true</optional>
</dependency>
<!-- for api module -->
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jsp-2.1</artifactId>
<version>${jsp-2.1.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<version>${servlet-api.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>${javax.servlet.api.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<version>${springfox.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
<version>${springfox.version}</version>
</dependency>
<dependency>
<groupId>com.github.xiaoymin</groupId>
<artifactId>swagger-bootstrap-ui</artifactId>
<version>${swagger.version}</version>
</dependency>
<dependency>
<groupId>com.github.rholder</groupId>
<artifactId>guava-retrying</artifactId>
<version>${guava-retry.version}</version>
</dependency>
<dependency>
<groupId>org.sonatype.aether</groupId>
<artifactId>aether-api</artifactId>
<version>1.13.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>io.airlift.resolver</groupId>
<artifactId>resolver</artifactId>
<version>1.5</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.ow2.asm</groupId>
<artifactId>asm</artifactId>
<version>6.2.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>javax.activation</groupId>
<artifactId>activation</artifactId>
<version>1.1</version>
</dependency>
<dependency>
<groupId>com.sun.mail</groupId>
<artifactId>javax.mail</artifactId>
<version>1.6.2</version>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<finalName>apache-dolphinscheduler-incubating-${project.version}</finalName>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-maven-plugin</artifactId>
<version>1.0.0</version>
<extensions>true</extensions>
</plugin>
<plugin>
<groupId>ca.vanzyl.maven.plugins</groupId>
<artifactId>provisio-maven-plugin</artifactId>
<version>1.0.4</version>
<extensions>true</extensions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>rpm-maven-plugin</artifactId>
<version>${rpm-maven-plugion.version}</version>
<inherited>false</inherited>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<testSource>${java.version}</testSource>
<testTarget>${java.version}</testTarget>
</configuration>
<version>${maven-compiler-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>${maven-release-plugin.version}</version>
<configuration>
<tagNameFormat>@{project.version}</tagNameFormat>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>${maven-assembly-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<configuration>
<source>8</source>
<failOnError>false</failOnError>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>${maven-source-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<version>${maven-dependency-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-maven-plugin</artifactId>
<extensions>true</extensions>
<!--<configuration>-->
<!--<allowedProvidedDependencies>-->
<!--<allowedProvidedDependency>org.apache.dolphinscheduler:dolphinscheduler-common</allowedProvidedDependency>-->
<!--</allowedProvidedDependencies>-->
<!--</configuration>-->
</plugin>
<plugin>
<groupId>ca.vanzyl.maven.plugins</groupId>
<artifactId>provisio-maven-plugin</artifactId>
<extensions>true</extensions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<phase>verify</phase>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
<configuration>
<aggregate>true</aggregate>
<charset>${project.build.sourceEncoding}</charset>
<encoding>${project.build.sourceEncoding}</encoding>
<docencoding>${project.build.sourceEncoding}</docencoding>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>${maven-release-plugin.version}</version>
<configuration>
<autoVersionSubmodules>true</autoVersionSubmodules>
<tagNameFormat>@{project.version}</tagNameFormat>
<tagBase>${project.version}</tagBase>
<!--<goals>-f pom.xml deploy</goals>-->
</configuration>
<dependencies>
<dependency>
<groupId>org.apache.maven.scm</groupId>
<artifactId>maven-scm-provider-jgit</artifactId>
<version>1.9.5</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<encoding>${project.build.sourceEncoding}</encoding>
<skip>false</skip><!--not skip compile test classes-->
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<includes>
<include>**/api/controller/ProcessDefinitionControllerTest.java</include>
<include>**/api/dto/resources/filter/ResourceFilterTest.java</include>
<include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include>
<includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest>
<include>**/api/enums/StatusTest.java</include>
<include>**/api/exceptions/ApiExceptionHandlerTest.java</include>
<include>**/api/exceptions/ServiceExceptionTest.java</include>
<include>**/api/interceptor/LoginHandlerInterceptorTest.java</include>
<include>**/api/security/PasswordAuthenticatorTest.java</include>
<include>**/api/security/SecurityConfigTest.java</include>
<include>**/api/service/AccessTokenServiceTest.java</include>
<include>**/api/service/AlertGroupServiceTest.java</include>
<include>**/api/service/BaseDAGServiceTest.java</include>
<include>**/api/service/BaseServiceTest.java</include>
<include>**/api/service/DataAnalysisServiceTest.java</include>
<include>**/api/service/DataSourceServiceTest.java</include>
<include>**/api/service/ExecutorService2Test.java</include>
<include>**/api/service/ExecutorServiceTest.java</include>
<include>**/api/service/LoggerServiceTest.java</include>
<include>**/api/service/MonitorServiceTest.java</include>
<include>**/api/service/ProcessDefinitionServiceTest.java</include>
<include>**/api/service/ProcessDefinitionVersionServiceTest.java</include>
<include>**/api/service/ProcessInstanceServiceTest.java</include>
<include>**/api/service/ProjectServiceTest.java</include>
<include>**/api/service/QueueServiceTest.java</include>
<include>**/api/service/ResourcesServiceTest.java</include>
<include>**/api/service/SchedulerServiceTest.java</include>
<include>**/api/service/SessionServiceTest.java</include>
<include>**/api/service/TaskInstanceServiceTest.java</include>
<include>**/api/service/TenantServiceTest.java</include>
<include>**/api/service/UdfFuncServiceTest.java</include>
<include>**/api/service/UserAlertGroupServiceTest.java</include>
<include>**/api/service/UsersServiceTest.java</include>
<include>**/api/service/WorkerGroupServiceTest.java</include>
<include>**/api/service/WorkFlowLineageServiceTest.java</include>
<include>**/api/controller/ProcessDefinitionControllerTest.java</include>
<include>**/api/controller/WorkFlowLineageControllerTest.java</include>
<include>**/api/utils/exportprocess/DataSourceParamTest.java</include>
<include>**/api/utils/exportprocess/DependentParamTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/FileUtilsTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/ResultTest.java</include>
<include>**/common/graph/DAGTest.java</include>
<include>**/common/os/OshiTest.java</include>
<include>**/common/os/OSUtilsTest.java</include>
<include>**/common/shell/ShellExecutorTest.java</include>
<include>**/common/task/EntityTestUtils.java</include>
<include>**/common/task/FlinkParametersTest.java</include>
<include>**/common/task/HttpParametersTest.java</include>
<include>**/common/task/SqoopParameterEntityTest.java</include>
<include>**/common/threadutils/ThreadPoolExecutorsTest.java</include>
<include>**/common/threadutils/ThreadUtilsTest.java</include>
<include>**/common/utils/process/ProcessBuilderForWin32Test.java</include>
<include>**/common/utils/process/ProcessEnvironmentForWin32Test.java</include>
<include>**/common/utils/process/ProcessImplForWin32Test.java</include>
<include>**/common/utils/CollectionUtilsTest.java</include>
<include>**/common/utils/CommonUtilsTest.java</include>
<include>**/common/utils/DateUtilsTest.java</include>
<include>**/common/utils/DependentUtilsTest.java</include>
<include>**/common/utils/EncryptionUtilsTest.java</include>
<include>**/common/utils/FileUtilsTest.java</include>
<include>**/common/utils/IpUtilsTest.java</include>
<include>**/common/utils/JSONUtilsTest.java</include>
<include>**/common/utils/LoggerUtilsTest.java</include>
<include>**/common/utils/OSUtilsTest.java</include>
<include>**/common/utils/ParameterUtilsTest.java</include>
<include>**/common/utils/PreconditionsTest.java</include>
<include>**/common/utils/PropertyUtilsTest.java</include>
<include>**/common/utils/SchemaUtilsTest.java</include>
<include>**/common/utils/ScriptRunnerTest.java</include>
<include>**/common/utils/SensitiveLogUtilsTest.java</include>
<include>**/common/utils/StringTest.java</include>
<include>**/common/utils/StringUtilsTest.java</include>
<include>**/common/utils/TaskParametersUtilsTest.java</include>
<include>**/common/utils/VarPoolUtilsTest.java</include>
<include>**/common/utils/HadoopUtilsTest.java</include>
<include>**/common/utils/HttpUtilsTest.java</include>
<include>**/common/utils/KerberosHttpClientTest.java</include>
<include>**/common/ConstantsTest.java</include>
<include>**/common/utils/HadoopUtils.java</include>
<include>**/common/utils/RetryerUtilsTest.java</include>
<include>**/common/plugin/FilePluginManagerTest</include>
<include>**/common/plugin/PluginClassLoaderTest</include>
<include>**/common/enums/ExecutionStatusTest</include>
<include>**/dao/mapper/AccessTokenMapperTest.java</include>
<include>**/dao/mapper/AlertGroupMapperTest.java</include>
<include>**/dao/mapper/CommandMapperTest.java</include>
<include>**/dao/mapper/ConnectionFactoryTest.java</include>
<include>**/dao/mapper/DataSourceMapperTest.java</include>
<include>**/dao/entity/TaskInstanceTest.java</include>
<include>**/dao/entity/UdfFuncTest.java</include>
<include>**/remote/JsonSerializerTest.java</include>
<include>**/remote/RemoveTaskLogResponseCommandTest.java</include>
<include>**/remote/RemoveTaskLogRequestCommandTest.java</include>
<include>**/remote/NettyRemotingClientTest.java</include>
<include>**/remote/NettyUtilTest.java</include>
<include>**/remote/ResponseFutureTest.java</include>
<include>**/remote/command/alert/AlertSendRequestCommandTest.java</include>
<include>**/remote/command/alert/AlertSendResponseCommandTest.java</include>
<include>**/server/log/LoggerServerTest.java</include>
<include>**/server/entity/SQLTaskExecutionContextTest.java</include>
<include>**/server/log/MasterLogFilterTest.java</include>
<include>**/server/log/SensitiveDataConverterTest.java</include>
<!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>-->
<include>**/server/log/TaskLogFilterTest.java</include>
<include>**/server/log/WorkerLogFilterTest.java</include>
<include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include>
<include>**/server/master/runner/MasterTaskExecThreadTest.java</include>
<!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>-->
<include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include>
<include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include>
<include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include>
<include>**/server/master/register/MasterRegistryTest.java</include>
<include>**/server/master/dispatch/host/assign/RoundRobinHostManagerTest.java</include>
<include>**/server/master/AlertManagerTest.java</include>
<include>**/server/master/MasterCommandTest.java</include>
<include>**/server/master/DependentTaskTest.java</include>
<include>**/server/master/ConditionsTaskTest.java</include>
<include>**/server/master/MasterExecThreadTest.java</include>
<include>**/server/master/ParamsTest.java</include>
<include>**/server/master/SubProcessTaskTest.java</include>
<include>**/server/register/ZookeeperNodeManagerTest.java</include>
<include>**/server/utils/DataxUtilsTest.java</include>
<include>**/server/utils/ExecutionContextTestUtils.java</include>
<include>**/server/utils/HostTest.java</include>
<!--<include>**/server/utils/FlinkArgsUtilsTest.java</include>-->
<include>**/server/utils/LogUtilsTest.java</include>
<include>**/server/utils/ParamUtilsTest.java</include>
<include>**/server/utils/ProcessUtilsTest.java</include>
<include>**/server/utils/SparkArgsUtilsTest.java</include>
<include>**/server/worker/processor/TaskCallbackServiceTest.java</include>
<include>**/server/worker/registry/WorkerRegistryTest.java</include>
<include>**/server/worker/shell/ShellCommandExecutorTest.java</include>
<include>**/server/worker/sql/SqlExecutorTest.java</include>
<include>**/server/worker/task/sql/SqlTaskTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<include>**/server/worker/task/EnvFileTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<!--<include>**/server/worker/task/datax/DataxTaskTest.java</include>-->
<!--<include>**/server/worker/task/http/HttpTaskTest.java</include>-->
<include>**/server/worker/task/sqoop/SqoopTaskTest.java</include>
<include>**/server/worker/task/TaskManagerTest.java</include>
<include>**/server/worker/EnvFileTest.java</include>
<include>**/server/worker/runner/TaskExecuteThreadTest.java</include>
<include>**/service/quartz/cron/CronUtilsTest.java</include>
<include>**/service/process/ProcessServiceTest.java</include>
<include>**/service/zk/DefaultEnsembleProviderTest.java</include>
<include>**/service/zk/ZKServerTest.java</include>
<include>**/service/zk/CuratorZookeeperClientTest.java</include>
<include>**/service/queue/TaskUpdateQueueTest.java</include>
<include>**/service/alert/AlertClientServiceTest.java</include>
<include>**/dao/mapper/DataSourceUserMapperTest.java</include>
<!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>-->
<include>**/dao/mapper/ProcessDefinitionMapperTest.java</include>
<include>**/dao/mapper/ProcessDefinitionVersionMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapperTest.java</include>
<include>**/dao/mapper/ProjectMapperTest.java</include>
<include>**/dao/mapper/ProjectUserMapperTest.java</include>
<include>**/dao/mapper/QueueMapperTest.java</include>
<include>**/dao/mapper/ResourceUserMapperTest.java</include>
<include>**/dao/mapper/ScheduleMapperTest.java</include>
<include>**/dao/mapper/SessionMapperTest.java</include>
<include>**/dao/mapper/TaskInstanceMapperTest.java</include>
<include>**/dao/mapper/TenantMapperTest.java</include>
<include>**/dao/mapper/UdfFuncMapperTest.java</include>
<include>**/dao/mapper/UDFUserMapperTest.java</include>
<include>**/dao/mapper/UserAlertGroupMapperTest.java</include>
<include>**/dao/mapper/UserMapperTest.java</include>
<include>**/dao/mapper/AlertPluginInstanceMapperTest.java</include>
<include>**/dao/mapper/PluginDefineTest.java</include>
<include>**/dao/utils/DagHelperTest.java</include>
<include>**/dao/AlertDaoTest.java</include>
<include>**/dao/datasource/OracleDataSourceTest.java</include>
<include>**/dao/datasource/HiveDataSourceTest.java</include>
<include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include>
<include>**/dao/upgrade/WokrerGrouopDaoTest.java</include>
<include>**/dao/upgrade/UpgradeDaoTest.java</include>
<include>**/plugin/alert/email/EmailAlertChannelFactoryTest.java</include>
<include>**/plugin/alert/email/EmailAlertChannelTest.java</include>
<include>**/plugin/alert/email/ExcelUtilsTest.java</include>
<include>**/plugin/alert/email/MailUtilsTest.java</include>
<include>**/plugin/alert/email/template/DefaultHTMLTemplateTest.java</include>
<include>**/plugin/alert/dingtalk/DingTalkSenderTest.java</include>
<include>**/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java</include>
<include>**/plugin/alert/wechat/WeChatSenderTest.java</include>
<include>**/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java</include>
<include>**/plugin/alert/script/ProcessUtilsTest.java</include>
<include>**/plugin/alert/script/ScriptAlertChannelFactoryTest.java</include>
<include>**/plugin/alert/script/ScriptSenderTest.java</include>
<include>**/plugin/alert/http/HttpAlertChannelFactoryTest.java</include>
<include>**/plugin/alert/http/HttpAlertChannelTest.java</include>
<include>**/plugin/alert/http/HttpAlertPluginTest.java</include>
<include>**/plugin/alert/http/HttpSenderTest.java</include>
<include>**/spi/params/PluginParamsTransferTest.java</include>
<!--<include>**/alert/plugin/EmailAlertPluginTest.java</include>-->
<include>**/alert/plugin/AlertPluginManagerTest.java</include>
<include>**/alert/plugin/DolphinPluginLoaderTest.java</include>
<include>**/alert/utils/DingTalkUtilsTest.java</include>
<include>**/alert/utils/EnterpriseWeChatUtilsTest.java</include>
<include>**/alert/utils/FuncUtilsTest.java</include>
<include>**/alert/processor/AlertRequestProcessorTest.java</include>
<include>**/alert/runner/AlertSenderTest.java</include>
<include>**/alert/AlertServerTest.java</include>
</includes>
<!-- <skip>true</skip> -->
</configuration>
</plugin>
<!-- jenkins plugin jacoco report-->
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>${jacoco.version}</version>
<configuration>
<destFile>target/jacoco.exec</destFile>
<dataFile>target/jacoco.exec</dataFile>
</configuration>
<executions>
<execution>
<id>jacoco-initialize</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>jacoco-site</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<version>${apache.rat.version}</version>
<configuration>
<excludeSubProjects>false</excludeSubProjects>
<addDefaultLicenseMatchers>false</addDefaultLicenseMatchers>
<licenses>
<license implementation="org.apache.rat.analysis.license.SimplePatternBasedLicense">
<licenseFamilyCategory>AL20</licenseFamilyCategory>
<licenseFamilyName>Apache License, 2.0</licenseFamilyName>
<patterns>
<pattern>Licensed to the Apache Software Foundation (ASF)</pattern>
</patterns>
</license>
</licenses>
<licenseFamilies>
<licenseFamily implementation="org.apache.rat.license.SimpleLicenseFamily">
<familyName>Apache License, 2.0</familyName>
</licenseFamily>
</licenseFamilies>
<excludes>
<exclude>**/alert/plugin/DolphinPluginClassLoader.java</exclude>
<exclude>**/alert/plugin/DolphinPluginDiscovery.java</exclude>
<exclude>**/alert/plugin/DolphinPluginLoader.java</exclude>
<exclude>**/node_modules/**</exclude>
<exclude>**/node/**</exclude>
<exclude>**/dist/**</exclude>
<exclude>**/licenses/**</exclude>
<exclude>.github/**</exclude>
<exclude>**/sql/soft_version</exclude>
<exclude>**/common/utils/ScriptRunner.java</exclude>
<exclude>**/*.json</exclude>
<!-- document files -->
<exclude>**/*.md</exclude>
<excldue>**/*.MD</excldue>
<exclude>**/*.txt</exclude>
<exclude>**/docs/**</exclude>
<exclude>**/*.babelrc</exclude>
<exclude>**/*.eslintrc</exclude>
<exclude>**/.mvn/jvm.config</exclude>
<exclude>**/.mvn/wrapper/**</exclude>
<exclude>**/*.iml</exclude>
</excludes>
<consoleOutput>true</consoleOutput>
</configuration>
</plugin>
<plugin>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-maven-plugin</artifactId>
<version>${spotbugs.version}</version>
<configuration>
<xmlOutput>true</xmlOutput>
<threshold>medium</threshold>
<effort>default</effort>
<excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile>
<failOnError>true</failOnError>
</configuration>
<dependencies>
<dependency>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs</artifactId>
<version>4.0.0-beta4</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>${checkstyle.version}</version>
<dependencies>
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>8.18</version>
</dependency>
</dependencies>
<configuration>
<consoleOutput>true</consoleOutput>
<encoding>UTF-8</encoding>
<configLocation>style/checkstyle.xml</configLocation>
<suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation>
<suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
<failOnViolation>true</failOnViolation>
<violationSeverity>warning</violationSeverity>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<sourceDirectories>
<sourceDirectory>${project.build.sourceDirectory}</sourceDirectory>
</sourceDirectories>
<excludes>**\/generated-sources\/</excludes>
<skip>true</skip>
</configuration>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>cobertura-maven-plugin</artifactId>
<version>${cobertura-maven-plugin.version}</version>
<configuration>
<check>
</check>
<aggregate>true</aggregate>
<outputDirectory>./target/cobertura</outputDirectory>
<encoding>${project.build.sourceEncoding}</encoding>
<quiet>true</quiet>
<format>xml</format>
<instrumentation>
<ignoreTrivial>true</ignoreTrivial>
</instrumentation>
</configuration>
</plugin>
</plugins>
</build>
<modules>
<module>dolphinscheduler-alert-plugin</module>
<module>dolphinscheduler-ui</module>
<module>dolphinscheduler-server</module>
<module>dolphinscheduler-common</module>
<module>dolphinscheduler-api</module>
<module>dolphinscheduler-dao</module>
<module>dolphinscheduler-alert</module>
<module>dolphinscheduler-dist</module>
<module>dolphinscheduler-remote</module>
<module>dolphinscheduler-service</module>
<module>dolphinscheduler-spi</module>
<module>dolphinscheduler-microbench</module>
</modules>
</project>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,314 | [Question] dolphinscheduler cannot get the YARN address | The task is submitted and run successfully, but dolphinscheduler reports that the task status failed
After querying the log, it is found that the obtained yarn address is null, but the hosts and common are configured normally, and the yarn node is normally accessed
任务提交并运行成功,但dolphinscheduler报告任务状态失败
查询日志后发现获取到的yarn地址为null,但hosts与common均配置正常,且yarn节点正常访问
hosts
```properties
192.169.199.105 hdnn01
192.168.199.106 hdnn02
192.168.199.107 hdnn03
```
common.properties
```properties
# if resourcemanager HA enable, please type the HA ips ; if resourcemanager is single, make this value empty
yarn.resourcemanager.ha.rm.ids=hdnn01,hdnn02,hdnn03
# if resourcemanager HA enable or not use resourcemanager, please keep the default value; If resourcemanager is single, you only need to replace ds1 to actual resourcemanager hostname.
yarn.application.status.address=http://yarnIp1:8088/ws/v1/cluster/apps/%s
```
dolphinscheduler-worker.log
```
[INFO] 2020-12-25 18:17:30.759 - [taskAppId=TASK-3-4-17]:[193] - process start, process id is: 107032
[INFO] 2020-12-25 18:18:52.502 - [taskAppId=TASK-3-4-17]:[202] - process has exited, execute path:/tmp/dolphinscheduler/exec/process/1/3/4/17, processId:107032 ,exitStatusCode:0
[INFO] 2020-12-25 18:18:52.504 - [taskAppId=TASK-3-4-17]:[419] - find app id: application_1608880603127_0014
[INFO] 2020-12-25 18:18:52.576 org.apache.dolphinscheduler.common.utils.HadoopUtils:[144] - get property:fs.defaultFS -> hdfs://hdcluster, from core-site.xml hdfs-site.xml
[INFO] 2020-12-25 18:19:09.220 org.apache.dolphinscheduler.common.utils.HadoopUtils:[206] - application url : http://null:8088/ws/v1/cluster/apps/%s
[ERROR] 2020-12-25 18:19:24.259 org.apache.dolphinscheduler.common.utils.HttpUtils:[73] - null: Name or service not known
java.net.UnknownHostException: null: Name or service not known
at java.net.Inet6AddressImpl.lookupAllHostAddr(Native Method)
at java.net.InetAddress$2.lookupAllHostAddr(InetAddress.java:928)
at java.net.InetAddress.getAddressesFromNameService(InetAddress.java:1323)
at java.net.InetAddress.getAllByName0(InetAddress.java:1276)
at java.net.InetAddress.getAllByName(InetAddress.java:1192)
at java.net.InetAddress.getAllByName(InetAddress.java:1126)
at org.apache.http.impl.conn.SystemDefaultDnsResolver.resolve(SystemDefaultDnsResolver.java:45)
at org.apache.http.impl.conn.DefaultHttpClientConnectionOperator.connect(DefaultHttpClientConnectionOperator.java:111)
at org.apache.http.impl.conn.PoolingHttpClientConnectionManager.connect(PoolingHttpClientConnectionManager.java:353)
at org.apache.http.impl.execchain.MainClientExec.establishRoute(MainClientExec.java:380)
at org.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:236)
at org.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:184)
at org.apache.http.impl.execchain.RetryExec.execute(RetryExec.java:88)
at org.apache.http.impl.execchain.RedirectExec.execute(RedirectExec.java:110)
at org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:184)
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:82)
at org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:107)
at org.apache.dolphinscheduler.common.utils.HttpUtils.get(HttpUtils.java:60)
at org.apache.dolphinscheduler.common.utils.HadoopUtils.getApplicationStatus(HadoopUtils.java:412)
at org.apache.dolphinscheduler.server.worker.task.AbstractCommandExecutor.isSuccessOfYarnState(AbstractCommandExecutor.java:378)
at org.apache.dolphinscheduler.server.worker.task.AbstractCommandExecutor.run(AbstractCommandExecutor.java:218)
at org.apache.dolphinscheduler.server.worker.task.AbstractYarnTask.handle(AbstractYarnTask.java:57)
at org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread.run(TaskExecuteThread.java:129)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
[ERROR] 2020-12-25 18:19:24.260 - [taskAppId=TASK-3-4-17]:[392] - yarn applications: [application_1608880603127_0014] status failed
java.lang.NullPointerException: null
at org.apache.dolphinscheduler.common.utils.HadoopUtils.getApplicationStatus(HadoopUtils.java:415)
at org.apache.dolphinscheduler.server.worker.task.AbstractCommandExecutor.isSuccessOfYarnState(AbstractCommandExecutor.java:378)
at org.apache.dolphinscheduler.server.worker.task.AbstractCommandExecutor.run(AbstractCommandExecutor.java:218)
at org.apache.dolphinscheduler.server.worker.task.AbstractYarnTask.handle(AbstractYarnTask.java:57)
at org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread.run(TaskExecuteThread.java:129)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
[INFO] 2020-12-25 18:19:24.261 org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread:[137] - task instance id : 17,task final status : FAILURE
``` | https://github.com/apache/dolphinscheduler/issues/4314 | https://github.com/apache/dolphinscheduler/pull/4344 | b9e8dbed2f9dd29f51b30411f80bc92a0ca04c06 | ccd06fec16bae7da97bc4f61c45d4b471dccebc4 | "2020-12-26T06:57:41Z" | java | "2020-12-30T03:03:36Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/HadoopUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import static org.apache.dolphinscheduler.common.Constants.RESOURCE_UPLOAD_PATH;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.ResUploadType;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.client.cli.RMAdminCLI;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.security.PrivilegedExceptionAction;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
/**
* hadoop utils
* single instance
*/
public class HadoopUtils implements Closeable {
private static final Logger logger = LoggerFactory.getLogger(HadoopUtils.class);
private static String hdfsUser = PropertyUtils.getString(Constants.HDFS_ROOT_USER);
public static final String resourceUploadPath = PropertyUtils.getString(RESOURCE_UPLOAD_PATH, "/dolphinscheduler");
public static final String rmHaIds = PropertyUtils.getString(Constants.YARN_RESOURCEMANAGER_HA_RM_IDS);
public static final String appAddress = PropertyUtils.getString(Constants.YARN_APPLICATION_STATUS_ADDRESS);
public static final String jobHistoryAddress = PropertyUtils.getString(Constants.YARN_JOB_HISTORY_STATUS_ADDRESS);
private static final String HADOOP_UTILS_KEY = "HADOOP_UTILS_KEY";
private static final LoadingCache<String, HadoopUtils> cache = CacheBuilder
.newBuilder()
.expireAfterWrite(PropertyUtils.getInt(Constants.KERBEROS_EXPIRE_TIME, 2), TimeUnit.HOURS)
.build(new CacheLoader<String, HadoopUtils>() {
@Override
public HadoopUtils load(String key) throws Exception {
return new HadoopUtils();
}
});
private static volatile boolean yarnEnabled = false;
private Configuration configuration;
private FileSystem fs;
private HadoopUtils() {
init();
initHdfsPath();
}
public static HadoopUtils getInstance() {
return cache.getUnchecked(HADOOP_UTILS_KEY);
}
/**
* init dolphinscheduler root path in hdfs
*/
private void initHdfsPath() {
Path path = new Path(resourceUploadPath);
try {
if (!fs.exists(path)) {
fs.mkdirs(path);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
/**
* init hadoop configuration
*/
private void init() {
try {
configuration = new Configuration();
String resourceStorageType = PropertyUtils.getUpperCaseString(Constants.RESOURCE_STORAGE_TYPE);
ResUploadType resUploadType = ResUploadType.valueOf(resourceStorageType);
if (resUploadType == ResUploadType.HDFS) {
if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)) {
System.setProperty(Constants.JAVA_SECURITY_KRB5_CONF,
PropertyUtils.getString(Constants.JAVA_SECURITY_KRB5_CONF_PATH));
configuration.set(Constants.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
hdfsUser = "";
UserGroupInformation.setConfiguration(configuration);
UserGroupInformation.loginUserFromKeytab(PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_USERNAME),
PropertyUtils.getString(Constants.LOGIN_USER_KEY_TAB_PATH));
}
String defaultFS = configuration.get(Constants.FS_DEFAULTFS);
//first get key from core-site.xml hdfs-site.xml ,if null ,then try to get from properties file
// the default is the local file system
if (defaultFS.startsWith("file")) {
String defaultFSProp = PropertyUtils.getString(Constants.FS_DEFAULTFS);
if (StringUtils.isNotBlank(defaultFSProp)) {
Map<String, String> fsRelatedProps = PropertyUtils.getPrefixedProperties("fs.");
configuration.set(Constants.FS_DEFAULTFS, defaultFSProp);
fsRelatedProps.forEach((key, value) -> configuration.set(key, value));
} else {
logger.error("property:{} can not to be empty, please set!", Constants.FS_DEFAULTFS);
throw new RuntimeException(
String.format("property: %s can not to be empty, please set!", Constants.FS_DEFAULTFS)
);
}
} else {
logger.info("get property:{} -> {}, from core-site.xml hdfs-site.xml ", Constants.FS_DEFAULTFS, defaultFS);
}
if (fs == null) {
if (StringUtils.isNotEmpty(hdfsUser)) {
UserGroupInformation ugi = UserGroupInformation.createRemoteUser(hdfsUser);
ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
@Override
public Boolean run() throws Exception {
fs = FileSystem.get(configuration);
return true;
}
});
} else {
logger.warn("hdfs.root.user is not set value!");
fs = FileSystem.get(configuration);
}
}
} else if (resUploadType == ResUploadType.S3) {
System.setProperty(Constants.AWS_S3_V4, Constants.STRING_TRUE);
configuration.set(Constants.FS_DEFAULTFS, PropertyUtils.getString(Constants.FS_DEFAULTFS));
configuration.set(Constants.FS_S3A_ENDPOINT, PropertyUtils.getString(Constants.FS_S3A_ENDPOINT));
configuration.set(Constants.FS_S3A_ACCESS_KEY, PropertyUtils.getString(Constants.FS_S3A_ACCESS_KEY));
configuration.set(Constants.FS_S3A_SECRET_KEY, PropertyUtils.getString(Constants.FS_S3A_SECRET_KEY));
fs = FileSystem.get(configuration);
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
/**
* @return Configuration
*/
public Configuration getConfiguration() {
return configuration;
}
/**
* get application url
*
* @param applicationId application id
* @return url of application
*/
public String getApplicationUrl(String applicationId) throws Exception {
/**
* if rmHaIds contains xx, it signs not use resourcemanager
* otherwise:
* if rmHaIds is empty, single resourcemanager enabled
* if rmHaIds not empty: resourcemanager HA enabled
*/
String appUrl = "";
if (StringUtils.isEmpty(rmHaIds)) {
//single resourcemanager enabled
appUrl = appAddress;
yarnEnabled = true;
} else {
//resourcemanager HA enabled
appUrl = getAppAddress(appAddress, rmHaIds);
yarnEnabled = true;
logger.info("application url : {}", appUrl);
}
if (StringUtils.isBlank(appUrl)) {
throw new Exception("application url is blank");
}
return String.format(appUrl, applicationId);
}
public String getJobHistoryUrl(String applicationId) {
//eg:application_1587475402360_712719 -> job_1587475402360_712719
String jobId = applicationId.replace("application", "job");
return String.format(jobHistoryAddress, jobId);
}
/**
* cat file on hdfs
*
* @param hdfsFilePath hdfs file path
* @return byte[] byte array
* @throws IOException errors
*/
public byte[] catFile(String hdfsFilePath) throws IOException {
if (StringUtils.isBlank(hdfsFilePath)) {
logger.error("hdfs file path:{} is blank", hdfsFilePath);
return new byte[0];
}
try (FSDataInputStream fsDataInputStream = fs.open(new Path(hdfsFilePath))) {
return IOUtils.toByteArray(fsDataInputStream);
}
}
/**
* cat file on hdfs
*
* @param hdfsFilePath hdfs file path
* @param skipLineNums skip line numbers
* @param limit read how many lines
* @return content of file
* @throws IOException errors
*/
public List<String> catFile(String hdfsFilePath, int skipLineNums, int limit) throws IOException {
if (StringUtils.isBlank(hdfsFilePath)) {
logger.error("hdfs file path:{} is blank", hdfsFilePath);
return Collections.emptyList();
}
try (FSDataInputStream in = fs.open(new Path(hdfsFilePath))) {
BufferedReader br = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
Stream<String> stream = br.lines().skip(skipLineNums).limit(limit);
return stream.collect(Collectors.toList());
}
}
/**
* make the given file and all non-existent parents into
* directories. Has the semantics of Unix 'mkdir -p'.
* Existence of the directory hierarchy is not an error.
*
* @param hdfsPath path to create
* @return mkdir result
* @throws IOException errors
*/
public boolean mkdir(String hdfsPath) throws IOException {
return fs.mkdirs(new Path(hdfsPath));
}
/**
* copy files between FileSystems
*
* @param srcPath source hdfs path
* @param dstPath destination hdfs path
* @param deleteSource whether to delete the src
* @param overwrite whether to overwrite an existing file
* @return if success or not
* @throws IOException errors
*/
public boolean copy(String srcPath, String dstPath, boolean deleteSource, boolean overwrite) throws IOException {
return FileUtil.copy(fs, new Path(srcPath), fs, new Path(dstPath), deleteSource, overwrite, fs.getConf());
}
/**
* the src file is on the local disk. Add it to FS at
* the given dst name.
*
* @param srcFile local file
* @param dstHdfsPath destination hdfs path
* @param deleteSource whether to delete the src
* @param overwrite whether to overwrite an existing file
* @return if success or not
* @throws IOException errors
*/
public boolean copyLocalToHdfs(String srcFile, String dstHdfsPath, boolean deleteSource, boolean overwrite) throws IOException {
Path srcPath = new Path(srcFile);
Path dstPath = new Path(dstHdfsPath);
fs.copyFromLocalFile(deleteSource, overwrite, srcPath, dstPath);
return true;
}
/**
* copy hdfs file to local
*
* @param srcHdfsFilePath source hdfs file path
* @param dstFile destination file
* @param deleteSource delete source
* @param overwrite overwrite
* @return result of copy hdfs file to local
* @throws IOException errors
*/
public boolean copyHdfsToLocal(String srcHdfsFilePath, String dstFile, boolean deleteSource, boolean overwrite) throws IOException {
Path srcPath = new Path(srcHdfsFilePath);
File dstPath = new File(dstFile);
if (dstPath.exists()) {
if (dstPath.isFile()) {
if (overwrite) {
Files.delete(dstPath.toPath());
}
} else {
logger.error("destination file must be a file");
}
}
if (!dstPath.getParentFile().exists()) {
dstPath.getParentFile().mkdirs();
}
return FileUtil.copy(fs, srcPath, dstPath, deleteSource, fs.getConf());
}
/**
* delete a file
*
* @param hdfsFilePath the path to delete.
* @param recursive if path is a directory and set to
* true, the directory is deleted else throws an exception. In
* case of a file the recursive can be set to either true or false.
* @return true if delete is successful else false.
* @throws IOException errors
*/
public boolean delete(String hdfsFilePath, boolean recursive) throws IOException {
return fs.delete(new Path(hdfsFilePath), recursive);
}
/**
* check if exists
*
* @param hdfsFilePath source file path
* @return result of exists or not
* @throws IOException errors
*/
public boolean exists(String hdfsFilePath) throws IOException {
return fs.exists(new Path(hdfsFilePath));
}
/**
* Gets a list of files in the directory
*
* @param filePath file path
* @return {@link FileStatus} file status
* @throws Exception errors
*/
public FileStatus[] listFileStatus(String filePath) throws Exception {
try {
return fs.listStatus(new Path(filePath));
} catch (IOException e) {
logger.error("Get file list exception", e);
throw new Exception("Get file list exception", e);
}
}
/**
* Renames Path src to Path dst. Can take place on local fs
* or remote DFS.
*
* @param src path to be renamed
* @param dst new path after rename
* @return true if rename is successful
* @throws IOException on failure
*/
public boolean rename(String src, String dst) throws IOException {
return fs.rename(new Path(src), new Path(dst));
}
/**
* hadoop resourcemanager enabled or not
*
* @return result
*/
public boolean isYarnEnabled() {
return yarnEnabled;
}
/**
* get the state of an application
*
* @param applicationId application id
* @return the return may be null or there may be other parse exceptions
*/
public ExecutionStatus getApplicationStatus(String applicationId) throws Exception {
if (StringUtils.isEmpty(applicationId)) {
return null;
}
String result = Constants.FAILED;
String applicationUrl = getApplicationUrl(applicationId);
logger.info("applicationUrl={}", applicationUrl);
String responseContent;
if (PropertyUtils.getBoolean(Constants.HADOOP_SECURITY_AUTHENTICATION_STARTUP_STATE, false)) {
responseContent = KerberosHttpClient.get(applicationUrl);
} else {
responseContent = HttpUtils.get(applicationUrl);
}
if (responseContent != null) {
ObjectNode jsonObject = JSONUtils.parseObject(responseContent);
if (!jsonObject.has("app")) {
return ExecutionStatus.FAILURE;
}
result = jsonObject.path("app").path("finalStatus").asText();
} else {
//may be in job history
String jobHistoryUrl = getJobHistoryUrl(applicationId);
logger.info("jobHistoryUrl={}", jobHistoryUrl);
responseContent = HttpUtils.get(jobHistoryUrl);
if (null != responseContent) {
ObjectNode jsonObject = JSONUtils.parseObject(responseContent);
if (!jsonObject.has("job")) {
return ExecutionStatus.FAILURE;
}
result = jsonObject.path("job").path("state").asText();
} else {
return ExecutionStatus.FAILURE;
}
}
switch (result) {
case Constants.ACCEPTED:
return ExecutionStatus.SUBMITTED_SUCCESS;
case Constants.SUCCEEDED:
return ExecutionStatus.SUCCESS;
case Constants.NEW:
case Constants.NEW_SAVING:
case Constants.SUBMITTED:
case Constants.FAILED:
return ExecutionStatus.FAILURE;
case Constants.KILLED:
return ExecutionStatus.KILL;
case Constants.RUNNING:
default:
return ExecutionStatus.RUNNING_EXECUTION;
}
}
/**
* get data hdfs path
*
* @return data hdfs path
*/
public static String getHdfsDataBasePath() {
if ("/".equals(resourceUploadPath)) {
// if basepath is configured to /, the generated url may be //default/resources (with extra leading /)
return "";
} else {
return resourceUploadPath;
}
}
/**
* hdfs resource dir
*
* @param tenantCode tenant code
* @param resourceType resource type
* @return hdfs resource dir
*/
public static String getHdfsDir(ResourceType resourceType, String tenantCode) {
String hdfsDir = "";
if (resourceType.equals(ResourceType.FILE)) {
hdfsDir = getHdfsResDir(tenantCode);
} else if (resourceType.equals(ResourceType.UDF)) {
hdfsDir = getHdfsUdfDir(tenantCode);
}
return hdfsDir;
}
/**
* hdfs resource dir
*
* @param tenantCode tenant code
* @return hdfs resource dir
*/
public static String getHdfsResDir(String tenantCode) {
return String.format("%s/resources", getHdfsTenantDir(tenantCode));
}
/**
* hdfs user dir
*
* @param tenantCode tenant code
* @param userId user id
* @return hdfs resource dir
*/
public static String getHdfsUserDir(String tenantCode, int userId) {
return String.format("%s/home/%d", getHdfsTenantDir(tenantCode), userId);
}
/**
* hdfs udf dir
*
* @param tenantCode tenant code
* @return get udf dir on hdfs
*/
public static String getHdfsUdfDir(String tenantCode) {
return String.format("%s/udfs", getHdfsTenantDir(tenantCode));
}
/**
* get hdfs file name
*
* @param resourceType resource type
* @param tenantCode tenant code
* @param fileName file name
* @return hdfs file name
*/
public static String getHdfsFileName(ResourceType resourceType, String tenantCode, String fileName) {
if (fileName.startsWith("/")) {
fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsDir(resourceType, tenantCode), fileName);
}
/**
* get absolute path and name for resource file on hdfs
*
* @param tenantCode tenant code
* @param fileName file name
* @return get absolute path and name for file on hdfs
*/
public static String getHdfsResourceFileName(String tenantCode, String fileName) {
if (fileName.startsWith("/")) {
fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsResDir(tenantCode), fileName);
}
/**
* get absolute path and name for udf file on hdfs
*
* @param tenantCode tenant code
* @param fileName file name
* @return get absolute path and name for udf file on hdfs
*/
public static String getHdfsUdfFileName(String tenantCode, String fileName) {
if (fileName.startsWith("/")) {
fileName = fileName.replaceFirst("/", "");
}
return String.format("%s/%s", getHdfsUdfDir(tenantCode), fileName);
}
/**
* @param tenantCode tenant code
* @return file directory of tenants on hdfs
*/
public static String getHdfsTenantDir(String tenantCode) {
return String.format("%s/%s", getHdfsDataBasePath(), tenantCode);
}
/**
* getAppAddress
*
* @param appAddress app address
* @param rmHa resource manager ha
* @return app address
*/
public static String getAppAddress(String appAddress, String rmHa) {
//get active ResourceManager
String activeRM = YarnHAAdminUtils.getAcitveRMName(rmHa);
String[] split1 = appAddress.split(Constants.DOUBLE_SLASH);
if (split1.length != 2) {
return null;
}
String start = split1[0] + Constants.DOUBLE_SLASH;
String[] split2 = split1[1].split(Constants.COLON);
if (split2.length != 2) {
return null;
}
String end = Constants.COLON + split2[1];
return start + activeRM + end;
}
@Override
public void close() throws IOException {
if (fs != null) {
try {
fs.close();
} catch (IOException e) {
logger.error("Close HadoopUtils instance failed", e);
throw new IOException("Close HadoopUtils instance failed", e);
}
}
}
/**
* yarn ha admin utils
*/
private static final class YarnHAAdminUtils extends RMAdminCLI {
/**
* get active resourcemanager
*
* @param rmIds
* @return
*/
public static String getAcitveRMName(String rmIds) {
String[] rmIdArr = rmIds.split(Constants.COMMA);
int activeResourceManagerPort = PropertyUtils.getInt(Constants.HADOOP_RESOURCE_MANAGER_HTTPADDRESS_PORT, 8088);
String yarnUrl = "http://%s:" + activeResourceManagerPort + "/ws/v1/cluster/info";
String state = null;
try {
/**
* send http get request to rm1
*/
state = getRMState(String.format(yarnUrl, rmIdArr[0]));
if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) {
return rmIdArr[0];
} else if (Constants.HADOOP_RM_STATE_STANDBY.equals(state)) {
state = getRMState(String.format(yarnUrl, rmIdArr[1]));
if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) {
return rmIdArr[1];
}
} else {
return null;
}
} catch (Exception e) {
state = getRMState(String.format(yarnUrl, rmIdArr[1]));
if (Constants.HADOOP_RM_STATE_ACTIVE.equals(state)) {
return rmIdArr[0];
}
}
return null;
}
/**
* get ResourceManager state
*
* @param url
* @return
*/
public static String getRMState(String url) {
String retStr = HttpUtils.get(url);
if (StringUtils.isEmpty(retStr)) {
return null;
}
//to json
ObjectNode jsonObject = JSONUtils.parseObject(retStr);
//get ResourceManager state
if (!jsonObject.has("clusterInfo")) {
return null;
}
return jsonObject.get("clusterInfo").path("haState").asText();
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,338 | [Bug][UI] Invalid date problem in IE | **For better global communication, Please describe it in English. If you feel the description in English is not clear, then you can append description in Chinese(just for Mandarin(CN)), thx! **
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior, for example:
1. Go to 'Task Instance', 'Warnning group manage' or 'Token manage' in IE
2. See 'Invalid Date' error
**Expected behavior**
Bug fixed.
**Screenshots**
If applicable, add screenshots to help explain your problem.
![image](https://user-images.githubusercontent.com/4902714/103278240-02132380-4a06-11eb-8c3c-db16e9558201.png)
![image](https://user-images.githubusercontent.com/4902714/103278253-06d7d780-4a06-11eb-8903-52d25bc5f6dc.png)
![image](https://user-images.githubusercontent.com/4902714/103278260-09d2c800-4a06-11eb-9145-185b8081518d.png)
**Which version of Dolphin Scheduler:**
-[all versions]
**Additional context**
Add any other context about the problem here.
**Requirement or improvement**
- Please describe about your requirements or improvement suggestions.
| https://github.com/apache/dolphinscheduler/issues/4338 | https://github.com/apache/dolphinscheduler/pull/4339 | 2e44f4c553b761236a78daf993e150b08172ed37 | 45b097f37870d2a7b8e51fbba3e6d70b49ed2341 | "2020-12-29T10:45:50Z" | java | "2020-12-30T04:00:56Z" | dolphinscheduler-ui/src/js/module/filter/filter.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import dayjs from 'dayjs'
/**
* Formatting time
*/
const formatDate = (value, fmt) => {
fmt = fmt || 'YYYY-MM-DD HH:mm:ss'
if (value === null) {
return '-'
} else {
return dayjs(value).format(fmt)
}
}
/**
* filter null
*/
const filterNull = (value) => {
if (value === null || value === '') {
return '-'
} else {
return value
}
}
export {
formatDate, filterNull
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,359 | [Bug][UI] the page does not refresh after clicking the delete function |
**Describe the bug**
the page does not refresh after clicking the delete function.
**To Reproduce**
Steps to reproduce the behavior, for example:
1. Click workflow Definition
2. Click the timer management button of a workflow
3. Click a timed delete button
**Which version of Dolphin Scheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4359 | https://github.com/apache/dolphinscheduler/pull/4360 | 46443a6f3b4228556562606e29b2026c21f1d252 | 66141d58f6e19285a1a5d4d7cbd508b0f585a26c | "2021-01-02T05:10:18Z" | java | "2021-01-04T01:30:04Z" | dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/timing/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div>
<div class="conditions-box">
<!--<m-conditions @on-conditions="_onConditions"></m-conditions>-->
</div>
<div class="list-model" v-if="!isLoading">
<template v-if="list.length">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%">
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column prop="processDefinitionName" :label="$t('Process Name')"></el-table-column>
<el-table-column :label="$t('Start Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.startTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('End Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.endTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column prop="crontab" :label="$t('crontab')"></el-table-column>
<el-table-column prop="failureStrategy" :label="$t('Failure Strategy')"></el-table-column>
<el-table-column :label="$t('State')">
<template slot-scope="scope">
<span>{{_rtReleaseState(scope.row.releaseState)}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Create Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.createTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Update Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="120">
<template slot-scope="scope">
<el-tooltip :content="$t('Edit')" placement="top">
<span><el-button type="primary" size="mini" icon="el-icon-edit-outline" :disabled="scope.row.releaseState === 'ONLINE'" @click="_editTiming(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('online')" placement="top" v-if="scope.row.releaseState === 'OFFLINE'">
<span><el-button type="warning" size="mini" icon="el-icon-upload2" @click="_online(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('offline')" placement="top" v-if="scope.row.releaseState === 'ONLINE'">
<span><el-button type="danger" size="mini" icon="el-icon-download" @click="_offline(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('delete')" placement="top">
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Delete?')"
@onConfirm="_delete(scope.row,scope.row.id)"
>
<el-button type="danger" size="mini" icon="el-icon-delete" circle slot="reference"></el-button>
</el-popconfirm>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
<div class="page-box">
<el-pagination
background
@current-change="_page"
@size-change="_pageSize"
:page-size="pageSize"
:current-page.sync="pageNo"
:page-sizes="[10, 30, 50]"
layout="sizes, prev, pager, next, jumper"
:total="total">
</el-pagination>
</div>
</template>
<template v-if="!list.length">
<m-no-data></m-no-data>
</template>
</div>
<m-spin :is-spin="isLoading"></m-spin>
<el-dialog
:title="$t('Set parameters before timing')"
:visible.sync="timingDialog"
width="auto">
<m-timing :timingData="timingData" @onUpdateTiming="onUpdateTiming" @closeTiming="closeTiming"></m-timing>
</el-dialog>
</div>
</template>
<script>
import _ from 'lodash'
import { mapActions } from 'vuex'
import mSpin from '@/module/components/spin/spin'
import mTiming from '../../pages/list/_source/timing'
import mNoData from '@/module/components/noData/noData'
import { publishStatus } from '@/conf/home/pages/dag/_source/config'
export default {
name: 'list',
data () {
return {
isLoading: false,
total: null,
pageNo: 1,
pageSize: 10,
list: [],
timingDialog: false,
timingData: {
item: {},
receiversD: [],
receiversCcD: []
}
}
},
props: {
},
methods: {
...mapActions('dag', ['getScheduleList', 'scheduleOffline', 'scheduleOnline', 'getReceiver', 'deleteTiming']),
/**
* delete
*/
_delete (item, i) {
this.deleteTiming({
scheduleId: item.id
}).then(res => {
this.$refs[`poptip-delete-${i}`][0].doClose()
this.$message.success(res.msg)
this.$router.push({ name: 'projects-definition-list' })
}).catch(e => {
this.$refs[`poptip-delete-${i}`][0].doClose()
this.$message.error(e.msg || '')
})
},
/**
* Close the delete layer
*/
_closeDelete (i) {
this.$refs[`poptip-delete-${i}`][0].doClose()
},
/**
* return state
*/
_rtReleaseState (code) {
return _.filter(publishStatus, v => v.code === code)[0].desc
},
/**
* page
*/
_page (val) {
this.pageNo = val
this._getScheduleList()
},
_pageSize (val) {
this.pageSize = val
this._getScheduleList()
},
/**
* Inquire list
*/
_getScheduleList (flag) {
this.isLoading = !flag
this.getScheduleList({
processDefinitionId: this.$route.params.id,
searchVal: '',
pageNo: this.pageNo,
pageSize: this.pageSize
}).then(res => {
this.list = []
setTimeout(() => {
this.list = res.data.totalList
})
this.total = res.data.total
this.isLoading = false
}).catch(e => {
this.isLoading = false
})
},
/**
* search
*/
_onConditions (o) {
this.searchVal = o.searchVal
this.pageNo = 1
this._getScheduleList('false')
},
/**
* online
*/
_online (item) {
this.pageNo = 1
this.scheduleOnline({
id: item.id
}).then(res => {
this.$message.success(res.msg)
this._getScheduleList('false')
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* offline
*/
_offline (item) {
this.pageNo = 1
this.scheduleOffline({
id: item.id
}).then(res => {
this.$message.success(res.msg)
this._getScheduleList('false')
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* get email
*/
_getReceiver (id) {
return new Promise((resolve, reject) => {
this.getReceiver({ processDefinitionId: id }).then(res => {
resolve({
receivers: res.receivers && res.receivers.split(',') || [],
receiversCc: res.receiversCc && res.receiversCc.split(',') || []
})
})
})
},
/**
* timing
*/
_editTiming (item) {
this._getReceiver(item.processDefinitionId).then(res => {
this.timingData.item = item
this.timingData.receiversD = res.receivers
this.timingData.receiversCcD = res.receiversCc
this.timingDialog = true
})
},
onUpdateTiming () {
this.pageNo = 1
this._getScheduleList('false')
this.timingDialog = false
},
closeTiming () {
this.timingDialog = false
}
},
watch: {},
created () {
this._getScheduleList()
},
mounted () {},
components: { mSpin, mNoData, mTiming }
}
</script>
<style lang="scss" rel="stylesheet/scss">
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,326 | [Improvement][UI] Add description field to data source list | **Describe the question**
[UI] Add description field to data source list
**Which version of DolphinScheduler:**
-[dev]
Anyone interested can claim this PR. | https://github.com/apache/dolphinscheduler/issues/4326 | https://github.com/apache/dolphinscheduler/pull/4362 | 66141d58f6e19285a1a5d4d7cbd508b0f585a26c | 0b71c77b2d2ff85afe2b440e53773fe2c428bed3 | "2020-12-28T11:19:08Z" | java | "2021-01-04T02:04:58Z" | dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%">
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column prop="name" :label="$t('Datasource Name')"></el-table-column>
<el-table-column prop="type" :label="$t('Datasource Type')"></el-table-column>
<el-table-column :label="$t('Datasource Parameter')">
<template slot-scope="scope">
<div>
<m-tooltips-JSON :JSON="JSON.parse(scope.row.connectionParams)" :id="scope.row.id">
<span slot="reference">
<el-button size="small" type="text">{{$t('Click to view')}}</el-button>
</span>
</m-tooltips-JSON>
</div>
</template>
</el-table-column>
<el-table-column :label="$t('Create Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.createTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Update Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="150">
<template slot-scope="scope">
<el-tooltip :content="$t('Edit')" placement="top" :enterable="false">
<span><el-button type="primary" size="mini" icon="el-icon-edit-outline" @click="_edit(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('delete')" placement="top" :enterable="false">
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Delete?')"
@onConfirm="_delete(scope.row,scope.row.id)"
>
<el-button type="danger" size="mini" icon="el-icon-delete" circle slot="reference"></el-button>
</el-popconfirm>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
</div>
</template>
<script>
import { mapActions } from 'vuex'
import { findComponentDownward } from '@/module/util/'
import mTooltipsJSON from '@/module/components/tooltipsJSON/tooltipsJSON'
export default {
name: 'datasource-list',
data () {
return {
// list
list: []
}
},
props: {
// External incoming data
datasourcesList: Array,
// current page number
pageNo: Number,
// Total number of articles
pageSize: Number
},
methods: {
...mapActions('datasource', ['deleteDatasource']),
/**
* Delete current line
*/
_delete (item, i) {
this.deleteDatasource({
id: item.id
}).then(res => {
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* edit
*/
_edit (item) {
findComponentDownward(this.$root, 'datasource-indexP')._create(item)
}
},
watch: {
/**
* Monitor external data changes
*/
datasourcesList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
this.list = this.datasourcesList
},
mounted () {
},
components: { mTooltipsJSON }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,233 | [Bug][api-server] ClassCastException: AppClassLoader cannot be cast to class java.net.URLClassLoader | *For better global communication, please give priority to using English description, thx! *
**Which version of Dolphin Scheduler:**
1.3.1
**Describe the bug**
Just started the api-server, logs show as follows:
```
[WARN] 2020-07-17 10:17:17.231 org.eclipse.jetty.server.handler.ContextHandler.application:[2355] - unavailable
java.lang.ClassCastException: class jdk.internal.loader.ClassLoaders$AppClassLoader cannot be cast to class java.net.URLClassLoader (jdk.internal.loader.ClassLoaders$AppClassLoader and java.net.URLClassLoader are in module java.base of loader 'bootstrap')
at org.apache.jasper.compiler.JspRuntimeContext.<init>(JspRuntimeContext.java:174)
at org.apache.jasper.servlet.JspServlet.init(JspServlet.java:150)
at org.eclipse.jetty.servlet.ServletHolder.initServlet(ServletHolder.java:672)
at org.eclipse.jetty.servlet.ServletHolder.initialize(ServletHolder.java:429)
at org.eclipse.jetty.servlet.ServletHandler.lambda$initialize$0(ServletHandler.java:750)
at java.base/java.util.stream.SortedOps$SizedRefSortingSink.end(SortedOps.java:357)
at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:485)
at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474)
at java.base/java.util.stream.StreamSpliterators$WrappingSpliterator.forEachRemaining(StreamSpliterators.java:312)
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:735)
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734)
at java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:658)
at org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:744)
at org.springframework.boot.web.embedded.jetty.JettyEmbeddedWebAppContext$JettyEmbeddedServletHandler.deferredInitialize(JettyEmbeddedWebAppContext.java:46)
at org.springframework.boot.web.embedded.jetty.JettyEmbeddedWebAppContext.deferredInitialize(JettyEmbeddedWebAppContext.java:36)
at org.springframework.boot.web.embedded.jetty.JettyWebServer.handleDeferredInitialize(JettyWebServer.java:221)
at org.springframework.boot.web.embedded.jetty.JettyWebServer.start(JettyWebServer.java:142)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.startWebServer(ServletWebServerApplicationContext.java:311)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.finishRefresh(ServletWebServerApplicationContext.java:164)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:552)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:142)
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:775)
at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:397)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:316)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1260)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1248)
at org.apache.dolphinscheduler.api.ApiApplicationServer.main(ApiApplicationServer.java:36)
```
I found [an explaination on stackoverflow][1]. It shows that JDK 9 has a breaking change here.
[1]: https://stackoverflow.com/questions/39739075/hive-shell-not-loading/41637409#41637409
**Requirement or improvement**
Please fix this to make DS compatible with modern JDK. Thanks! | https://github.com/apache/dolphinscheduler/issues/3233 | https://github.com/apache/dolphinscheduler/pull/4312 | 9c226435e377e055c64c4c5586a2fcd426ca61af | b8dc4357270ef4610237e2bb498fe604153c2a0f | "2020-07-17T17:30:23Z" | java | "2021-01-05T02:02:16Z" | dolphinscheduler-api/pom.xml | <?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId>
<version>1.3.4-SNAPSHOT</version>
</parent>
<artifactId>dolphinscheduler-api</artifactId>
<name>${project.artifactId}</name>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-alert</artifactId>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-dao</artifactId>
</dependency>
<!--springboot-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-tomcat</artifactId>
</exclusion>
<exclusion>
<artifactId>log4j-to-slf4j</artifactId>
<groupId>org.apache.logging.log4j</groupId>
</exclusion>
</exclusions>
</dependency>
<!-- use jetty -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jetty</artifactId>
<exclusions>
<exclusion>
<groupId>org.eclipse.jetty.websocket</groupId>
<artifactId>javax-websocket-server-impl</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty.websocket</groupId>
<artifactId>websocket-server</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
<exclusions>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
</dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
<exclusions>
<exclusion>
<groupId>com.mchange</groupId>
<artifactId>c3p0</artifactId>
</exclusion>
<exclusion>
<groupId>com.mchange</groupId>
<artifactId>mchange-commons-java</artifactId>
</exclusion>
<exclusion>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP-java6</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz-jobs</artifactId>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
</dependency>
<dependency>
<groupId>io.swagger</groupId>
<artifactId>swagger-models</artifactId>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-service</artifactId>
</dependency>
<dependency>
<groupId>com.github.xiaoymin</groupId>
<artifactId>swagger-bootstrap-ui</artifactId>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-framework</artifactId>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<exclusions>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- hadoop -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<exclusions>
<exclusion>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.curator</groupId>
<artifactId>curator-client</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<exclusions>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
<exclusions>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-aws</artifactId>
</dependency>
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jsp-2.1</artifactId>
<exclusions>
<exclusion>
<groupId>org.mortbay.jetty</groupId>
<artifactId>servlet-api-2.5</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- just for test -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.ow2.asm</groupId>
<artifactId>asm</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-autoconfigure</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito2</artifactId>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
</project>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,233 | [Bug][api-server] ClassCastException: AppClassLoader cannot be cast to class java.net.URLClassLoader | *For better global communication, please give priority to using English description, thx! *
**Which version of Dolphin Scheduler:**
1.3.1
**Describe the bug**
Just started the api-server, logs show as follows:
```
[WARN] 2020-07-17 10:17:17.231 org.eclipse.jetty.server.handler.ContextHandler.application:[2355] - unavailable
java.lang.ClassCastException: class jdk.internal.loader.ClassLoaders$AppClassLoader cannot be cast to class java.net.URLClassLoader (jdk.internal.loader.ClassLoaders$AppClassLoader and java.net.URLClassLoader are in module java.base of loader 'bootstrap')
at org.apache.jasper.compiler.JspRuntimeContext.<init>(JspRuntimeContext.java:174)
at org.apache.jasper.servlet.JspServlet.init(JspServlet.java:150)
at org.eclipse.jetty.servlet.ServletHolder.initServlet(ServletHolder.java:672)
at org.eclipse.jetty.servlet.ServletHolder.initialize(ServletHolder.java:429)
at org.eclipse.jetty.servlet.ServletHandler.lambda$initialize$0(ServletHandler.java:750)
at java.base/java.util.stream.SortedOps$SizedRefSortingSink.end(SortedOps.java:357)
at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:485)
at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474)
at java.base/java.util.stream.StreamSpliterators$WrappingSpliterator.forEachRemaining(StreamSpliterators.java:312)
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:735)
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734)
at java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:658)
at org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:744)
at org.springframework.boot.web.embedded.jetty.JettyEmbeddedWebAppContext$JettyEmbeddedServletHandler.deferredInitialize(JettyEmbeddedWebAppContext.java:46)
at org.springframework.boot.web.embedded.jetty.JettyEmbeddedWebAppContext.deferredInitialize(JettyEmbeddedWebAppContext.java:36)
at org.springframework.boot.web.embedded.jetty.JettyWebServer.handleDeferredInitialize(JettyWebServer.java:221)
at org.springframework.boot.web.embedded.jetty.JettyWebServer.start(JettyWebServer.java:142)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.startWebServer(ServletWebServerApplicationContext.java:311)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.finishRefresh(ServletWebServerApplicationContext.java:164)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:552)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:142)
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:775)
at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:397)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:316)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1260)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1248)
at org.apache.dolphinscheduler.api.ApiApplicationServer.main(ApiApplicationServer.java:36)
```
I found [an explaination on stackoverflow][1]. It shows that JDK 9 has a breaking change here.
[1]: https://stackoverflow.com/questions/39739075/hive-shell-not-loading/41637409#41637409
**Requirement or improvement**
Please fix this to make DS compatible with modern JDK. Thanks! | https://github.com/apache/dolphinscheduler/issues/3233 | https://github.com/apache/dolphinscheduler/pull/4312 | 9c226435e377e055c64c4c5586a2fcd426ca61af | b8dc4357270ef4610237e2bb498fe604153c2a0f | "2020-07-17T17:30:23Z" | java | "2021-01-05T02:02:16Z" | dolphinscheduler-dist/release-docs/LICENSE | Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=======================================================================
Apache DolphinScheduler Subcomponents:
The Apache DolphinScheduler project contains subcomponents with separate copyright
notices and license terms. Your use of the source code for the these
subcomponents is subject to the terms and conditions of the following
licenses.
========================================================================
Apache 2.0 licenses
========================================================================
The following components are provided under the Apache License. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
ant-1.6.5 https://mvnrepository.com/artifact/ant/ant/1.6.5, Apache 2.0
apacheds-i18n 2.0.0-M15: https://mvnrepository.com/artifact/org.apache.directory.server/apacheds-i18n/2.0.0-M15, Apache 2.0
apacheds-kerberos-codec 2.0.0-M15: https://mvnrepository.com/artifact/org.apache.directory.server/apacheds-kerberos-codec/2.0.0-M15, Apache 2.0
apache-el 8.5.35.1: https://mvnrepository.com/artifact/org.mortbay.jasper/apache-el/8.5.35.1, Apache 2.0
api-asn1-api 1.0.0-M20: https://mvnrepository.com/artifact/org.apache.directory.api/api-asn1-api/1.0.0-M20, Apache 2.0
api-util 1.0.0-M20: https://mvnrepository.com/artifact/org.apache.directory.api/api-util/1.0.0-M20, Apache 2.0
audience-annotations 0.5.0: https://mvnrepository.com/artifact/org.apache.yetus/audience-annotations/0.5.0, Apache 2.0
avro 1.7.4: https://github.com/apache/avro, Apache 2.0
aws-sdk-java 1.7.4: https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk/1.7.4, Apache 2.0
bonecp 0.8.0.RELEASE: https://github.com/wwadge/bonecp, Apache 2.0
byte-buddy 1.9.10: https://mvnrepository.com/artifact/net.bytebuddy/byte-buddy/1.9.10, Apache 2.0
classmate 1.4.0: https://mvnrepository.com/artifact/com.fasterxml/classmate/1.4.0, Apache 2.0
clickhouse-jdbc 0.1.52: https://mvnrepository.com/artifact/ru.yandex.clickhouse/clickhouse-jdbc/0.1.52, Apache 2.0
commons-beanutils 1.7.0 https://mvnrepository.com/artifact/commons-beanutils/commons-beanutils/1.7.0, Apache 2.0
commons-cli 1.2: https://mvnrepository.com/artifact/commons-cli/commons-cli/1.2, Apache 2.0
commons-codec 1.11: https://mvnrepository.com/artifact/commons-codec/commons-codec/1.11, Apache 2.0
commons-collections 3.2.2: https://mvnrepository.com/artifact/commons-collections/commons-collections/3.2.2, Apache 2.0
commons-collections4 4.1: https://mvnrepository.com/artifact/org.apache.commons/commons-collections4/4.1, Apache 2.0
commons-compress 1.4.1: https://mvnrepository.com/artifact/org.apache.commons/commons-compress/1.4.1, Apache 2.0
commons-configuration 1.10: https://mvnrepository.com/artifact/commons-configuration/commons-configuration/1.10, Apache 2.0
commons-daemon 1.0.13 https://mvnrepository.com/artifact/commons-daemon/commons-daemon/1.0.13, Apache 2.0
commons-dbcp 1.4: https://github.com/apache/commons-dbcp, Apache 2.0
commons-email 1.5: https://github.com/apache/commons-email, Apache 2.0
commons-httpclient 3.0.1: https://mvnrepository.com/artifact/commons-httpclient/commons-httpclient/3.0.1, Apache 2.0
commons-io 2.4: https://github.com/apache/commons-io, Apache 2.0
commons-lang 2.6: https://github.com/apache/commons-lang, Apache 2.0
commons-logging 1.1.1: https://github.com/apache/commons-logging, Apache 2.0
commons-math3 3.1.1: https://github.com/apache/commons-math, Apache 2.0
commons-net 3.1: https://github.com/apache/commons-net, Apache 2.0
commons-pool 1.6: https://github.com/apache/commons-pool, Apache 2.0
cron-utils 5.0.5: https://mvnrepository.com/artifact/com.cronutils/cron-utils/5.0.5, Apache 2.0
curator-client 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-client/4.3.0, Apache 2.0
curator-framework 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-framework/4.3.0, Apache 2.0
curator-recipes 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-recipes/4.3.0, Apache 2.0
datanucleus-api-jdo 4.2.1: https://mvnrepository.com/artifact/org.datanucleus/datanucleus-api-jdo/4.2.1, Apache 2.0
datanucleus-core 4.1.6: https://mvnrepository.com/artifact/org.datanucleus/datanucleus-core/4.1.6, Apache 2.0
datanucleus-rdbms 4.1.7: https://mvnrepository.com/artifact/org.datanucleus/datanucleus-rdbms/4.1.7, Apache 2.0
derby 10.14.2.0: https://github.com/apache/derby, Apache 2.0
druid 1.1.14: https://mvnrepository.com/artifact/com.alibaba/druid/1.1.14, Apache 2.0
gson 2.8.5: https://github.com/google/gson, Apache 2.0
guava 20.0: https://mvnrepository.com/artifact/com.google.guava/guava/20.0, Apache 2.0
guice 3.0: https://mvnrepository.com/artifact/com.google.inject/guice/3.0, Apache 2.0
guice-servlet 3.0: https://mvnrepository.com/artifact/com.google.inject.extensions/guice-servlet/3.0, Apache 2.0
hadoop-annotations 2.7.3:https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-annotations/2.7.3, Apache 2.0
hadoop-auth 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-auth/2.7.3, Apache 2.0
hadoop-aws 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-aws/2.7.3, Apache 2.0
hadoop-client 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client/2.7.3, Apache 2.0
hadoop-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common/2.7.3, Apache 2.0
hadoop-hdfs 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs/2.7.3, Apache 2.0
hadoop-mapreduce-client-app 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-app/2.7.3, Apache 2.0
hadoop-mapreduce-client-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-common/2.7.3, Apache 2.0
hadoop-mapreduce-client-core 2.7.3: https://mvnrepository.com/artifact/io.hops/hadoop-mapreduce-client-core/2.7.3, Apache 2.0
hadoop-mapreduce-client-jobclient 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-jobclient/2.7.3, Apache 2.0
hadoop-mapreduce-client-shuffle 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-shuffle/2.7.3, Apache 2.0
hadoop-yarn-api 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-api/2.7.3, Apache 2.0
hadoop-yarn-client 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-client/2.7.3, Apache 2.0
hadoop-yarn-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-common/2.7.3, Apache 2.0
hadoop-yarn-server-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-server-common/2.7.3, Apache 2.0
hibernate-validator 6.0.14.Final: https://github.com/hibernate/hibernate-validator, Apache 2.0
HikariCP 3.2.0: https://mvnrepository.com/artifact/com.zaxxer/HikariCP/3.2.0, Apache 2.0
hive-common 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-common/2.1.0, Apache 2.0
hive-jdbc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-jdbc/2.1.0, Apache 2.0
hive-metastore 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-metastore/2.1.0, Apache 2.0
hive-orc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-orc/2.1.0, Apache 2.0
hive-serde 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-serde/2.1.0, Apache 2.0
hive-service 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-service/2.1.0, Apache 2.0
hive-service-rpc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-service-rpc/2.1.0, Apache 2.0
hive-storage-api 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-storage-api/2.1.0, Apache 2.0
htrace-core 3.1.0-incubating: https://mvnrepository.com/artifact/org.apache.htrace/htrace-core/3.1.0-incubating, Apache 2.0
httpclient 4.4.1: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpclient/4.4.1, Apache 2.0
httpcore 4.4.1: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpcore/4.4.1, Apache 2.0
httpmime 4.5.7: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpmime/4.5.7, Apache 2.0
jackson-annotations 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-annotations/2.9.10, Apache 2.0
jackson-core 2.9.10: https://github.com/FasterXML/jackson-core, Apache 2.0
jackson-core-asl 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-core-asl/1.9.13, Apache 2.0
jackson-databind 2.9.10: https://github.com/FasterXML/jackson-databind, Apache 2.0
jackson-datatype-jdk8 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jdk8/2.9.10, Apache 2.0
jackson-datatype-jsr310 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jsr310/2.9.10, Apache 2.0
jackson-jaxrs 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-jaxrs/1.9.13, Apache 2.0 and LGPL 2.1
jackson-mapper-asl 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-mapper-asl/1.9.13, Apache 2.0
jackson-module-parameter-names 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.module/jackson-module-parameter-names/2.9.10, Apache 2.0
jackson-xc 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-xc/1.9.13, Apache 2.0 and LGPL 2.1
javax.inject 1: https://mvnrepository.com/artifact/javax.inject/javax.inject/1, Apache 2.0
javax.jdo-3.2.0-m3: https://mvnrepository.com/artifact/org.datanucleus/javax.jdo/3.2.0-m3, Apache 2.0
java-xmlbuilder 0.4 : https://mvnrepository.com/artifact/com.jamesmurty.utils/java-xmlbuilder/0.4, Apache 2.0
jboss-logging 3.3.2.Final: https://mvnrepository.com/artifact/org.jboss.logging/jboss-logging/3.3.2.Final, Apache 2.0
jdo-api 3.0.1: https://mvnrepository.com/artifact/javax.jdo/jdo-api/3.0.1, Apache 2.0
jets3t 0.9.0: https://mvnrepository.com/artifact/net.java.dev.jets3t/jets3t/0.9.0, Apache 2.0
jettison 1.1: https://github.com/jettison-json/jettison, Apache 2.0
jetty 6.1.26: https://mvnrepository.com/artifact/org.mortbay.jetty/jetty/6.1.26, Apache 2.0 and EPL 1.0
jetty-continuation 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-continuation/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-http 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-http/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-io 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-io/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-security 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-security/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-server 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-server/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-servlet 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-servlet/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-servlets 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-servlets/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-util 6.1.26: https://mvnrepository.com/artifact/org.mortbay.jetty/jetty-util/6.1.26, Apache 2.0 and EPL 1.0
jetty-util 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-util/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-webapp 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-webapp/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-xml 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-xml/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jna 4.5.2: https://mvnrepository.com/artifact/net.java.dev.jna/jna/4.5.2, Apache 2.0 and LGPL 2.1
jna-platform 4.5.2: https://mvnrepository.com/artifact/net.java.dev.jna/jna-platform/4.5.2, Apache 2.0 and LGPL 2.1
joda-time 2.10.1: https://github.com/JodaOrg/joda-time, Apache 2.0
jpam 1.1: https://mvnrepository.com/artifact/net.sf.jpam/jpam/1.1, Apache 2.0
jsqlparser 2.1: https://github.com/JSQLParser/JSqlParser, Apache 2.0 or LGPL 2.1
jsp-api-2.1 6.1.14: https://mvnrepository.com/artifact/org.mortbay.jetty/jsp-api-2.1/6.1.14, Apache 2.0
jsr305 3.0.0: https://mvnrepository.com/artifact/com.google.code.findbugs/jsr305, Apache 2.0
libfb303 0.9.3: https://mvnrepository.com/artifact/org.apache.thrift/libfb303/0.9.3, Apache 2.0
libthrift 0.9.3: https://mvnrepository.com/artifact/org.apache.thrift/libthrift/0.9.3, Apache 2.0
log4j-api 2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-api/2.11.2, Apache 2.0
log4j-core-2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core/2.11.2, Apache 2.0
log4j 1.2.17: https://mvnrepository.com/artifact/log4j/log4j/1.2.17, Apache 2.0
log4j-1.2-api 2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-1.2-api/2.11.2, Apache 2.0
lz4 1.3.0: https://mvnrepository.com/artifact/net.jpountz.lz4/lz4/1.3.0, Apache 2.0
mapstruct 1.2.0.Final: https://github.com/mapstruct/mapstruct, Apache 2.0
mybatis 3.5.2 https://mvnrepository.com/artifact/org.mybatis/mybatis/3.5.2, Apache 2.0
mybatis-plus 3.2.0: https://github.com/baomidou/mybatis-plus, Apache 2.0
mybatis-plus-annotation 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-annotation/3.2.0, Apache 2.0
mybatis-plus-boot-starter 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-boot-starter/3.2.0, Apache 2.0
mybatis-plus-core 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-core/3.2.0, Apache 2.0
mybatis-plus-extension 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-extension/3.2.0, Apache 2.0
mybatis-spring 2.0.2: https://mvnrepository.com/artifact/org.mybatis/mybatis-spring/2.0.2, Apache 2.0
netty 3.6.2.Final: https://github.com/netty/netty, Apache 2.0
netty-all 4.1.33.Final: https://github.com/netty/netty/blob/netty-4.1.33.Final/LICENSE.txt, Apache 2.0
opencsv 2.3: https://mvnrepository.com/artifact/net.sf.opencsv/opencsv/2.3, Apache 2.0
parquet-hadoop-bundle 1.8.1: https://mvnrepository.com/artifact/org.apache.parquet/parquet-hadoop-bundle/1.8.1, Apache 2.0
poi 3.17: https://mvnrepository.com/artifact/org.apache.poi/poi/3.17, Apache 2.0
quartz 2.3.0: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz/2.3.0, Apache 2.0
quartz-jobs 2.3.0: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz-jobs/2.3.0, Apache 2.0
snakeyaml 1.23: https://mvnrepository.com/artifact/org.yaml/snakeyaml/1.23, Apache 2.0
snappy 0.2: https://mvnrepository.com/artifact/org.iq80.snappy/snappy/0.2, Apache 2.0
snappy-java 1.0.4.1: https://github.com/xerial/snappy-java, Apache 2.0
spring-aop 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-aop/5.1.18.RELEASE, Apache 2.0
spring-beans 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-beans/5.1.18.RELEASE, Apache 2.0
spring-boot 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot/2.1.17.RELEASE, Apache 2.0
spring-boot-autoconfigure 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-autoconfigure/2.1.17.RELEASE, Apache 2.0
spring-boot-starter 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter/2.1.17.RELEASE, Apache 2.0
spring-boot-starter-aop 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-aop/2.1.17.RELEASE, Apache 2.0
spring-boot-starter-jdbc 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jdbc/2.1.17.RELEASE, Apache 2.0
spring-boot-starter-jetty 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jetty/2.1.17.RELEASE, Apache 2.0
spring-boot-starter-json 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-json/2.1.17.RELEASE, Apache 2.0
spring-boot-starter-logging 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-logging/2.1.17.RELEASE, Apache 2.0
spring-boot-starter-web 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-web/2.1.17.RELEASE, Apache 2.0
spring-context 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-context/5.1.18.RELEASE, Apache 2.0
spring-core 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-core/5.1.18.RELEASE, Apache 2.0
spring-expression 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-expression/5.1.18.RELEASE, Apache 2.0
springfox-core 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-core, Apache 2.0
springfox-schema 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-schema, Apache 2.0
springfox-spi 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-spi, Apache 2.0
springfox-spring-web 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-spring-web/2.9.2, Apache 2.0
springfox-swagger2 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger2/2.9.2, Apache 2.0
springfox-swagger-common 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger-common/2.9.2, Apache 2.0
springfox-swagger-ui 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger-ui/2.9.2, Apache 2.0
spring-jcl 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-jcl/5.1.18.RELEASE, Apache 2.0
spring-jdbc 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-jdbc/5.1.18.RELEASE, Apache 2.0
spring-plugin-core 1.2.0.RELEASE: https://mvnrepository.com/artifact/org.springframework.plugin/spring-plugin-core/1.2.0.RELEASE, Apache 2.0
spring-plugin-metadata 1.2.0.RELEASE: https://mvnrepository.com/artifact/org.springframework.plugin/spring-plugin-metadata/1.2.0.RELEASE, Apache 2.0
spring-tx 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-tx/5.1.18.RELEASE, Apache 2.0
spring-web 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-web/5.1.18.RELEASE, Apache 2.0
spring-webmvc 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-webmvc/5.1.18.RELEASE, Apache 2.0
swagger-annotations 1.5.20: https://mvnrepository.com/artifact/io.swagger/swagger-annotations/1.5.20, Apache 2.0
swagger-bootstrap-ui 1.9.3: https://mvnrepository.com/artifact/com.github.xiaoymin/swagger-bootstrap-ui/1.9.3, Apache 2.0
swagger-models 1.5.20: https://mvnrepository.com/artifact/io.swagger/swagger-models/1.5.20, Apache 2.0
tephra-api 0.6.0: https://mvnrepository.com/artifact/co.cask.tephra/tephra-api/0.6.0, Apache 2.0
validation-api 2.0.1.Final: https://mvnrepository.com/artifact/javax.validation/validation-api/2.0.1.Final, Apache 2.0
xercesImpl 2.9.1: https://mvnrepository.com/artifact/xerces/xercesImpl/2.9.1, Apache 2.0
xml-apis 1.4.01: https://mvnrepository.com/artifact/xml-apis/xml-apis/1.4.01, Apache 2.0 and W3C
zookeeper 3.4.14: https://mvnrepository.com/artifact/org.apache.zookeeper/zookeeper/3.4.14, Apache 2.0
presto-jdbc 0.238.1 https://mvnrepository.com/artifact/com.facebook.presto/presto-jdbc/0.238.1
========================================================================
BSD licenses
========================================================================
The following components are provided under a BSD license. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
asm 3.1: https://github.com/jdf/javalin/tree/master/lib/asm-3.1, BSD
javolution 5.5.1: https://mvnrepository.com/artifact/javolution/javolution/5.5.1, BSD
jline 0.9.94: https://github.com/jline/jline3, BSD
jsch 0.1.42: https://mvnrepository.com/artifact/com.jcraft/jsch/0.1.42, BSD
leveldbjni-all 1.8: https://github.com/fusesource/leveldbjni, BSD-3-Clause
postgresql 42.1.4: https://mvnrepository.com/artifact/org.postgresql/postgresql/42.1.4, BSD 2-clause
protobuf-java 2.5.0: https://mvnrepository.com/artifact/com.google.protobuf/protobuf-java/2.5.0, BSD 2-clause
paranamer 2.3: https://mvnrepository.com/artifact/com.thoughtworks.paranamer/paranamer/2.3, BSD
threetenbp 1.3.6: https://mvnrepository.com/artifact/org.threeten/threetenbp/1.3.6, BSD 3-clause
xmlenc 0.52: https://mvnrepository.com/artifact/xmlenc/xmlenc/0.52, BSD
hamcrest-core 1.3: https://mvnrepository.com/artifact/org.hamcrest/hamcrest-core/1.3, BSD 2-Clause
========================================================================
CDDL licenses
========================================================================
The following components are provided under the CDDL License. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
activation 1.1: https://mvnrepository.com/artifact/javax.activation/activation/1.1 CDDL 1.0
javax.activation-api 1.2.0: https://mvnrepository.com/artifact/javax.activation/javax.activation-api/1.2.0, CDDL and LGPL 2.0
javax.annotation-api 1.3.2: https://mvnrepository.com/artifact/javax.annotation/javax.annotation-api/1.3.2, CDDL + GPLv2
javax.mail 1.6.2: https://mvnrepository.com/artifact/com.sun.mail/javax.mail/1.6.2, CDDL/GPLv2
javax.servlet-api 3.1.0: https://mvnrepository.com/artifact/javax.servlet/javax.servlet-api/3.1.0, CDDL + GPLv2
jaxb-api 2.3.1: https://mvnrepository.com/artifact/javax.xml.bind/jaxb-api/2.3.1, CDDL 1.1
jaxb-impl 2.2.3-1: https://mvnrepository.com/artifact/com.sun.xml.bind/jaxb-impl/2.2.3-1, CDDL and GPL 1.1
jersey-client 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-client/1.9, CDDL 1.1 and GPL 1.1
jersey-core 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-core/1.9, CDDL 1.1 and GPL 1.1
jersey-guice 1.9: https://mvnrepository.com/artifact/com.sun.jersey.contribs/jersey-guice/1.9, CDDL 1.1 and GPL 1.1
jersey-json 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-json/1.9, CDDL 1.1 and GPL 1.1
jersey-server 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-server/1.9, CDDL 1.1 and GPL 1.1
jsp-2.1 6.1.14 https://mvnrepository.com/artifact/org.mortbay.jetty/jsp-2.1/6.1.14, CDDL 1.0
jta 1.1: https://mvnrepository.com/artifact/javax.transaction/jta/1.1, CDDL 1.0
transaction-api 1.1: https://mvnrepository.com/artifact/javax.transaction/transaction-api/1.1, CDDL 1.0
========================================================================
EPL licenses
========================================================================
The following components are provided under the EPL License. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
aspectjweaver 1.9.2:https://mvnrepository.com/artifact/org.aspectj/aspectjweaver/1.9.2, EPL 1.0
core-3.1.1 https://mvnrepository.com/artifact/org.eclipse.jdt/core/3.1.1 Eclipse Public License v1.0
logback-classic 1.2.3: https://mvnrepository.com/artifact/ch.qos.logback/logback-classic/1.2.3, EPL 1.0 and LGPL 2.1
logback-core 1.2.3: https://mvnrepository.com/artifact/ch.qos.logback/logback-core/1.2.3, EPL 1.0 and LGPL 2.1
oshi-core 3.5.0: https://mvnrepository.com/artifact/com.github.oshi/oshi-core/3.5.0, EPL 1.0
junit 4.12: https://mvnrepository.com/artifact/junit/junit/4.12, EPL 1.0
h2-1.4.200 https://github.com/h2database/h2database/blob/master/LICENSE.txt, MPL 2.0 or EPL 1.0
========================================================================
MIT licenses
========================================================================
The following components are provided under a MIT 2.0 license. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
jul-to-slf4j 1.7.25: https://mvnrepository.com/artifact/org.slf4j/jul-to-slf4j/1.7.25, MIT
mssql-jdbc 6.1.0.jre8: https://mvnrepository.com/artifact/com.microsoft.sqlserver/mssql-jdbc/6.1.0.jre8, MIT
slf4j-api 1.7.5: https://mvnrepository.com/artifact/org.slf4j/slf4j-api/1.7.5, MIT
========================================================================
MPL 1.1 licenses
========================================================================
The following components are provided under a MPL 1.1 license. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
jamon-runtime 2.3.1: https://mvnrepository.com/artifact/org.jamon/jamon-runtime/2.3.1, MPL-1.1
========================================================================
Public Domain licenses
========================================================================
xz 1.0: https://mvnrepository.com/artifact/org.tukaani/xz/1.0, Public Domain
aopalliance 1.0: https://mvnrepository.com/artifact/aopalliance/aopalliance/1.0, Public Domain
========================================================================
UI related licenses
========================================================================
The following components are used in UI.See project link for details.
The text of each license is also included at licenses/ui-licenses/LICENSE-[project].txt.
========================================
MIT licenses
========================================
ans-UI 1.1.7: https://github.com/analysys/ans-ui MIT
axios 0.16.2: https://github.com/axios/axios MIT
bootstrap 3.3.7: https://github.com/twbs/bootstrap MIT
canvg 1.5.1: https://github.com/canvg/canvg MIT
clipboard 2.0.1: https://github.com/zenorocha/clipboard.js MIT
codemirror 5.43.0: https://github.com/codemirror/CodeMirror MIT
dayjs 1.7.8: https://github.com/iamkun/dayjs MIT
element-ui 2.13.2: https://github.com/ElemeFE/element MIT
html2canvas 0.5.0-beta4: https://github.com/niklasvh/html2canvas MIT
jquery 3.3.1: https://github.com/jquery/jquery MIT
jquery-ui 1.12.1: https://github.com/jquery/jquery-ui MIT
js-cookie 2.2.1: https://github.com/js-cookie/js-cookie MIT
jsplumb 2.8.6: https://github.com/jsplumb/jsplumb MIT and GPLv2
lodash 4.17.11: https://github.com/lodash/lodash MIT
vue-treeselect 0.4.0: https://github.com/riophae/vue-treeselect MIT
vue 2.5.17: https://github.com/vuejs/vue MIT
vue-router 2.7.0: https://github.com/vuejs/vue-router MIT
vuex 3.0.0: https://github.com/vuejs/vuex MIT
vuex-router-sync 4.1.2: https://github.com/vuejs/vuex-router-sync MIT
dagre 0.8.5: https://github.com/dagrejs/dagre MIT
========================================
Apache 2.0 licenses
========================================
echarts 4.1.0: https://github.com/apache/incubator-echarts Apache-2.0
========================================
BSD licenses
========================================
d3 3.5.17: https://github.com/d3/d3 BSD-3-Clause
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,233 | [Bug][api-server] ClassCastException: AppClassLoader cannot be cast to class java.net.URLClassLoader | *For better global communication, please give priority to using English description, thx! *
**Which version of Dolphin Scheduler:**
1.3.1
**Describe the bug**
Just started the api-server, logs show as follows:
```
[WARN] 2020-07-17 10:17:17.231 org.eclipse.jetty.server.handler.ContextHandler.application:[2355] - unavailable
java.lang.ClassCastException: class jdk.internal.loader.ClassLoaders$AppClassLoader cannot be cast to class java.net.URLClassLoader (jdk.internal.loader.ClassLoaders$AppClassLoader and java.net.URLClassLoader are in module java.base of loader 'bootstrap')
at org.apache.jasper.compiler.JspRuntimeContext.<init>(JspRuntimeContext.java:174)
at org.apache.jasper.servlet.JspServlet.init(JspServlet.java:150)
at org.eclipse.jetty.servlet.ServletHolder.initServlet(ServletHolder.java:672)
at org.eclipse.jetty.servlet.ServletHolder.initialize(ServletHolder.java:429)
at org.eclipse.jetty.servlet.ServletHandler.lambda$initialize$0(ServletHandler.java:750)
at java.base/java.util.stream.SortedOps$SizedRefSortingSink.end(SortedOps.java:357)
at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:485)
at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474)
at java.base/java.util.stream.StreamSpliterators$WrappingSpliterator.forEachRemaining(StreamSpliterators.java:312)
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:735)
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734)
at java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:658)
at org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:744)
at org.springframework.boot.web.embedded.jetty.JettyEmbeddedWebAppContext$JettyEmbeddedServletHandler.deferredInitialize(JettyEmbeddedWebAppContext.java:46)
at org.springframework.boot.web.embedded.jetty.JettyEmbeddedWebAppContext.deferredInitialize(JettyEmbeddedWebAppContext.java:36)
at org.springframework.boot.web.embedded.jetty.JettyWebServer.handleDeferredInitialize(JettyWebServer.java:221)
at org.springframework.boot.web.embedded.jetty.JettyWebServer.start(JettyWebServer.java:142)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.startWebServer(ServletWebServerApplicationContext.java:311)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.finishRefresh(ServletWebServerApplicationContext.java:164)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:552)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:142)
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:775)
at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:397)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:316)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1260)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1248)
at org.apache.dolphinscheduler.api.ApiApplicationServer.main(ApiApplicationServer.java:36)
```
I found [an explaination on stackoverflow][1]. It shows that JDK 9 has a breaking change here.
[1]: https://stackoverflow.com/questions/39739075/hive-shell-not-loading/41637409#41637409
**Requirement or improvement**
Please fix this to make DS compatible with modern JDK. Thanks! | https://github.com/apache/dolphinscheduler/issues/3233 | https://github.com/apache/dolphinscheduler/pull/4312 | 9c226435e377e055c64c4c5586a2fcd426ca61af | b8dc4357270ef4610237e2bb498fe604153c2a0f | "2020-07-17T17:30:23Z" | java | "2021-01-05T02:02:16Z" | dolphinscheduler-dist/release-docs/licenses/LICENSE-ant-1.6.5.txt | /*
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "[]"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright [yyyy] [name of copyright owner]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,233 | [Bug][api-server] ClassCastException: AppClassLoader cannot be cast to class java.net.URLClassLoader | *For better global communication, please give priority to using English description, thx! *
**Which version of Dolphin Scheduler:**
1.3.1
**Describe the bug**
Just started the api-server, logs show as follows:
```
[WARN] 2020-07-17 10:17:17.231 org.eclipse.jetty.server.handler.ContextHandler.application:[2355] - unavailable
java.lang.ClassCastException: class jdk.internal.loader.ClassLoaders$AppClassLoader cannot be cast to class java.net.URLClassLoader (jdk.internal.loader.ClassLoaders$AppClassLoader and java.net.URLClassLoader are in module java.base of loader 'bootstrap')
at org.apache.jasper.compiler.JspRuntimeContext.<init>(JspRuntimeContext.java:174)
at org.apache.jasper.servlet.JspServlet.init(JspServlet.java:150)
at org.eclipse.jetty.servlet.ServletHolder.initServlet(ServletHolder.java:672)
at org.eclipse.jetty.servlet.ServletHolder.initialize(ServletHolder.java:429)
at org.eclipse.jetty.servlet.ServletHandler.lambda$initialize$0(ServletHandler.java:750)
at java.base/java.util.stream.SortedOps$SizedRefSortingSink.end(SortedOps.java:357)
at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:485)
at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474)
at java.base/java.util.stream.StreamSpliterators$WrappingSpliterator.forEachRemaining(StreamSpliterators.java:312)
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:735)
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734)
at java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:658)
at org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:744)
at org.springframework.boot.web.embedded.jetty.JettyEmbeddedWebAppContext$JettyEmbeddedServletHandler.deferredInitialize(JettyEmbeddedWebAppContext.java:46)
at org.springframework.boot.web.embedded.jetty.JettyEmbeddedWebAppContext.deferredInitialize(JettyEmbeddedWebAppContext.java:36)
at org.springframework.boot.web.embedded.jetty.JettyWebServer.handleDeferredInitialize(JettyWebServer.java:221)
at org.springframework.boot.web.embedded.jetty.JettyWebServer.start(JettyWebServer.java:142)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.startWebServer(ServletWebServerApplicationContext.java:311)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.finishRefresh(ServletWebServerApplicationContext.java:164)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:552)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:142)
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:775)
at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:397)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:316)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1260)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1248)
at org.apache.dolphinscheduler.api.ApiApplicationServer.main(ApiApplicationServer.java:36)
```
I found [an explaination on stackoverflow][1]. It shows that JDK 9 has a breaking change here.
[1]: https://stackoverflow.com/questions/39739075/hive-shell-not-loading/41637409#41637409
**Requirement or improvement**
Please fix this to make DS compatible with modern JDK. Thanks! | https://github.com/apache/dolphinscheduler/issues/3233 | https://github.com/apache/dolphinscheduler/pull/4312 | 9c226435e377e055c64c4c5586a2fcd426ca61af | b8dc4357270ef4610237e2bb498fe604153c2a0f | "2020-07-17T17:30:23Z" | java | "2021-01-05T02:02:16Z" | dolphinscheduler-dist/release-docs/licenses/LICENSE-core-3.1.1.txt | Eclipse Public License - v 1.0
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
1. DEFINITIONS
"Contribution" means:
a) in the case of the initial Contributor, the initial code and documentation distributed under this Agreement, and
b) in the case of each subsequent Contributor:
i) changes to the Program, and
ii) additions to the Program;
where such changes and/or additions to the Program originate from and are distributed by that particular Contributor. A Contribution 'originates' from a Contributor if it was added to the Program by such Contributor itself or anyone acting on such Contributor's behalf. Contributions do not include additions to the Program which: (i) are separate modules of software distributed in conjunction with the Program under their own license agreement, and (ii) are not derivative works of the Program.
"Contributor" means any person or entity that distributes the Program.
"Licensed Patents" mean patent claims licensable by a Contributor which are necessarily infringed by the use or sale of its Contribution alone or when combined with the Program.
"Program" means the Contributions distributed in accordance with this Agreement.
"Recipient" means anyone who receives the Program under this Agreement, including all Contributors.
2. GRANT OF RIGHTS
a) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, distribute and sublicense the Contribution of such Contributor, if any, and such derivative works, in source code and object code form.
b) Subject to the terms of this Agreement, each Contributor hereby grants Recipient a non-exclusive, worldwide, royalty-free patent license under Licensed Patents to make, use, sell, offer to sell, import and otherwise transfer the Contribution of such Contributor, if any, in source code and object code form. This patent license shall apply to the combination of the Contribution and the Program if, at the time the Contribution is added by the Contributor, such addition of the Contribution causes such combination to be covered by the Licensed Patents. The patent license shall not apply to any other combinations which include the Contribution. No hardware per se is licensed hereunder.
c) Recipient understands that although each Contributor grants the licenses to its Contributions set forth herein, no assurances are provided by any Contributor that the Program does not infringe the patent or other intellectual property rights of any other entity. Each Contributor disclaims any liability to Recipient for claims brought by any other entity based on infringement of intellectual property rights or otherwise. As a condition to exercising the rights and licenses granted hereunder, each Recipient hereby assumes sole responsibility to secure any other intellectual property rights needed, if any. For example, if a third party patent license is required to allow Recipient to distribute the Program, it is Recipient's responsibility to acquire that license before distributing the Program.
d) Each Contributor represents that to its knowledge it has sufficient copyright rights in its Contribution, if any, to grant the copyright license set forth in this Agreement.
3. REQUIREMENTS
A Contributor may choose to distribute the Program in object code form under its own license agreement, provided that:
a) it complies with the terms and conditions of this Agreement; and
b) its license agreement:
i) effectively disclaims on behalf of all Contributors all warranties and conditions, express and implied, including warranties or conditions of title and non-infringement, and implied warranties or conditions of merchantability and fitness for a particular purpose;
ii) effectively excludes on behalf of all Contributors all liability for damages, including direct, indirect, special, incidental and consequential damages, such as lost profits;
iii) states that any provisions which differ from this Agreement are offered by that Contributor alone and not by any other party; and
iv) states that source code for the Program is available from such Contributor, and informs licensees how to obtain it in a reasonable manner on or through a medium customarily used for software exchange.
When the Program is made available in source code form:
a) it must be made available under this Agreement; and
b) a copy of this Agreement must be included with each copy of the Program.
Contributors may not remove or alter any copyright notices contained within the Program.
Each Contributor must identify itself as the originator of its Contribution, if any, in a manner that reasonably allows subsequent Recipients to identify the originator of the Contribution.
4. COMMERCIAL DISTRIBUTION
Commercial distributors of software may accept certain responsibilities with respect to end users, business partners and the like. While this license is intended to facilitate the commercial use of the Program, the Contributor who includes the Program in a commercial product offering should do so in a manner which does not create potential liability for other Contributors. Therefore, if a Contributor includes the Program in a commercial product offering, such Contributor ("Commercial Contributor") hereby agrees to defend and indemnify every other Contributor ("Indemnified Contributor") against any losses, damages and costs (collectively "Losses") arising from claims, lawsuits and other legal actions brought by a third party against the Indemnified Contributor to the extent caused by the acts or omissions of such Commercial Contributor in connection with its distribution of the Program in a commercial product offering. The obligations in this section do not apply to any claims or Losses relating to any actual or alleged intellectual property infringement. In order to qualify, an Indemnified Contributor must: a) promptly notify the Commercial Contributor in writing of such claim, and b) allow the Commercial Contributor to control, and cooperate with the Commercial Contributor in, the defense and any related settlement negotiations. The Indemnified Contributor may participate in any such claim at its own expense.
For example, a Contributor might include the Program in a commercial product offering, Product X. That Contributor is then a Commercial Contributor. If that Commercial Contributor then makes performance claims, or offers warranties related to Product X, those performance claims and warranties are such Commercial Contributor's responsibility alone. Under this section, the Commercial Contributor would have to defend claims against the other Contributors related to those performance claims and warranties, and if a court requires any other Contributor to pay any damages as a result, the Commercial Contributor must pay those damages.
5. NO WARRANTY
EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each Recipient is solely responsible for determining the appropriateness of using and distributing the Program and assumes all risks associated with its exercise of rights under this Agreement , including but not limited to the risks and costs of program errors, compliance with applicable laws, damage to or loss of data, programs or equipment, and unavailability or interruption of operations.
6. DISCLAIMER OF LIABILITY
EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
7. GENERAL
If any provision of this Agreement is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this Agreement, and without further action by the parties hereto, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable.
If Recipient institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Program itself (excluding combinations of the Program with other software or hardware) infringes such Recipient's patent(s), then such Recipient's rights granted under Section 2(b) shall terminate as of the date such litigation is filed.
All Recipient's rights under this Agreement shall terminate if it fails to comply with any of the material terms or conditions of this Agreement and does not cure such failure in a reasonable period of time after becoming aware of such noncompliance. If all Recipient's rights under this Agreement terminate, Recipient agrees to cease use and distribution of the Program as soon as reasonably practicable. However, Recipient's obligations under this Agreement and any licenses granted by Recipient relating to the Program shall continue and survive.
Everyone is permitted to copy and distribute copies of this Agreement, but in order to avoid inconsistency the Agreement is copyrighted and may only be modified in the following manner. The Agreement Steward reserves the right to publish new versions (including revisions) of this Agreement from time to time. No one other than the Agreement Steward has the right to modify this Agreement. The Eclipse Foundation is the initial Agreement Steward. The Eclipse Foundation may assign the responsibility to serve as the Agreement Steward to a suitable separate entity. Each new version of the Agreement will be given a distinguishing version number. The Program (including Contributions) may always be distributed subject to the version of the Agreement under which it was received. In addition, after a new version of the Agreement is published, Contributor may elect to distribute the Program (including its Contributions) under the new version. Except as expressly stated in Sections 2(a) and 2(b) above, Recipient receives no rights or licenses to the intellectual property of any Contributor under this Agreement, whether expressly, by implication, estoppel or otherwise. All rights in the Program not expressly granted under this Agreement are reserved.
This Agreement is governed by the laws of the State of New York and the intellectual property laws of the United States of America. No party to this Agreement will bring a legal action under this Agreement more than one year after the cause of action arose. Each party waives its rights to a jury trial in any resulting litigation. |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,233 | [Bug][api-server] ClassCastException: AppClassLoader cannot be cast to class java.net.URLClassLoader | *For better global communication, please give priority to using English description, thx! *
**Which version of Dolphin Scheduler:**
1.3.1
**Describe the bug**
Just started the api-server, logs show as follows:
```
[WARN] 2020-07-17 10:17:17.231 org.eclipse.jetty.server.handler.ContextHandler.application:[2355] - unavailable
java.lang.ClassCastException: class jdk.internal.loader.ClassLoaders$AppClassLoader cannot be cast to class java.net.URLClassLoader (jdk.internal.loader.ClassLoaders$AppClassLoader and java.net.URLClassLoader are in module java.base of loader 'bootstrap')
at org.apache.jasper.compiler.JspRuntimeContext.<init>(JspRuntimeContext.java:174)
at org.apache.jasper.servlet.JspServlet.init(JspServlet.java:150)
at org.eclipse.jetty.servlet.ServletHolder.initServlet(ServletHolder.java:672)
at org.eclipse.jetty.servlet.ServletHolder.initialize(ServletHolder.java:429)
at org.eclipse.jetty.servlet.ServletHandler.lambda$initialize$0(ServletHandler.java:750)
at java.base/java.util.stream.SortedOps$SizedRefSortingSink.end(SortedOps.java:357)
at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:485)
at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474)
at java.base/java.util.stream.StreamSpliterators$WrappingSpliterator.forEachRemaining(StreamSpliterators.java:312)
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:735)
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734)
at java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:658)
at org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:744)
at org.springframework.boot.web.embedded.jetty.JettyEmbeddedWebAppContext$JettyEmbeddedServletHandler.deferredInitialize(JettyEmbeddedWebAppContext.java:46)
at org.springframework.boot.web.embedded.jetty.JettyEmbeddedWebAppContext.deferredInitialize(JettyEmbeddedWebAppContext.java:36)
at org.springframework.boot.web.embedded.jetty.JettyWebServer.handleDeferredInitialize(JettyWebServer.java:221)
at org.springframework.boot.web.embedded.jetty.JettyWebServer.start(JettyWebServer.java:142)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.startWebServer(ServletWebServerApplicationContext.java:311)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.finishRefresh(ServletWebServerApplicationContext.java:164)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:552)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:142)
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:775)
at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:397)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:316)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1260)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1248)
at org.apache.dolphinscheduler.api.ApiApplicationServer.main(ApiApplicationServer.java:36)
```
I found [an explaination on stackoverflow][1]. It shows that JDK 9 has a breaking change here.
[1]: https://stackoverflow.com/questions/39739075/hive-shell-not-loading/41637409#41637409
**Requirement or improvement**
Please fix this to make DS compatible with modern JDK. Thanks! | https://github.com/apache/dolphinscheduler/issues/3233 | https://github.com/apache/dolphinscheduler/pull/4312 | 9c226435e377e055c64c4c5586a2fcd426ca61af | b8dc4357270ef4610237e2bb498fe604153c2a0f | "2020-07-17T17:30:23Z" | java | "2021-01-05T02:02:16Z" | dolphinscheduler-dist/release-docs/licenses/LICENSE-jsp-api-2.1-6.1.14.txt | Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
You must give any other recipients of the Work or Derivative Works a copy of this License; and
You must cause any modified files to carry prominent notices stating that You changed the files; and
You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,233 | [Bug][api-server] ClassCastException: AppClassLoader cannot be cast to class java.net.URLClassLoader | *For better global communication, please give priority to using English description, thx! *
**Which version of Dolphin Scheduler:**
1.3.1
**Describe the bug**
Just started the api-server, logs show as follows:
```
[WARN] 2020-07-17 10:17:17.231 org.eclipse.jetty.server.handler.ContextHandler.application:[2355] - unavailable
java.lang.ClassCastException: class jdk.internal.loader.ClassLoaders$AppClassLoader cannot be cast to class java.net.URLClassLoader (jdk.internal.loader.ClassLoaders$AppClassLoader and java.net.URLClassLoader are in module java.base of loader 'bootstrap')
at org.apache.jasper.compiler.JspRuntimeContext.<init>(JspRuntimeContext.java:174)
at org.apache.jasper.servlet.JspServlet.init(JspServlet.java:150)
at org.eclipse.jetty.servlet.ServletHolder.initServlet(ServletHolder.java:672)
at org.eclipse.jetty.servlet.ServletHolder.initialize(ServletHolder.java:429)
at org.eclipse.jetty.servlet.ServletHandler.lambda$initialize$0(ServletHandler.java:750)
at java.base/java.util.stream.SortedOps$SizedRefSortingSink.end(SortedOps.java:357)
at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:485)
at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474)
at java.base/java.util.stream.StreamSpliterators$WrappingSpliterator.forEachRemaining(StreamSpliterators.java:312)
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:735)
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734)
at java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:658)
at org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:744)
at org.springframework.boot.web.embedded.jetty.JettyEmbeddedWebAppContext$JettyEmbeddedServletHandler.deferredInitialize(JettyEmbeddedWebAppContext.java:46)
at org.springframework.boot.web.embedded.jetty.JettyEmbeddedWebAppContext.deferredInitialize(JettyEmbeddedWebAppContext.java:36)
at org.springframework.boot.web.embedded.jetty.JettyWebServer.handleDeferredInitialize(JettyWebServer.java:221)
at org.springframework.boot.web.embedded.jetty.JettyWebServer.start(JettyWebServer.java:142)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.startWebServer(ServletWebServerApplicationContext.java:311)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.finishRefresh(ServletWebServerApplicationContext.java:164)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:552)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:142)
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:775)
at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:397)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:316)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1260)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1248)
at org.apache.dolphinscheduler.api.ApiApplicationServer.main(ApiApplicationServer.java:36)
```
I found [an explaination on stackoverflow][1]. It shows that JDK 9 has a breaking change here.
[1]: https://stackoverflow.com/questions/39739075/hive-shell-not-loading/41637409#41637409
**Requirement or improvement**
Please fix this to make DS compatible with modern JDK. Thanks! | https://github.com/apache/dolphinscheduler/issues/3233 | https://github.com/apache/dolphinscheduler/pull/4312 | 9c226435e377e055c64c4c5586a2fcd426ca61af | b8dc4357270ef4610237e2bb498fe604153c2a0f | "2020-07-17T17:30:23Z" | java | "2021-01-05T02:02:16Z" | pom.xml | <?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId>
<version>1.3.4-SNAPSHOT</version>
<packaging>pom</packaging>
<name>${project.artifactId}</name>
<url>http://dolphinscheduler.apache.org</url>
<description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated
to solving the complex dependencies in data processing, making the scheduling system out of the box for data
processing.
</description>
<licenses>
<license>
<name>Apache License 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<connection>scm:git:https://github.com/apache/incubator-dolphinscheduler.git</connection>
<developerConnection>scm:git:https://github.com/apache/incubator-dolphinscheduler.git</developerConnection>
<url>https://github.com/apache/incubator-dolphinscheduler</url>
<tag>HEAD</tag>
</scm>
<mailingLists>
<mailingList>
<name>DolphinScheduler Developer List</name>
<post>dev@dolphinscheduler.incubator.apache.org</post>
<subscribe>dev-subscribe@dolphinscheduler.incubator.apache.org</subscribe>
<unsubscribe>dev-unsubscribe@dolphinscheduler.incubator.apache.org</unsubscribe>
</mailingList>
</mailingLists>
<parent>
<groupId>org.apache</groupId>
<artifactId>apache</artifactId>
<version>21</version>
</parent>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<curator.version>4.3.0</curator.version>
<spring.version>5.1.18.RELEASE</spring.version>
<spring.boot.version>2.1.17.RELEASE</spring.boot.version>
<java.version>1.8</java.version>
<logback.version>1.2.3</logback.version>
<hadoop.version>2.7.3</hadoop.version>
<quartz.version>2.3.0</quartz.version>
<jackson.version>2.9.10</jackson.version>
<mybatis-plus.version>3.2.0</mybatis-plus.version>
<mybatis.spring.version>2.0.1</mybatis.spring.version>
<cron.utils.version>5.0.5</cron.utils.version>
<druid.version>1.1.22</druid.version>
<h2.version>1.4.200</h2.version>
<commons.codec.version>1.11</commons.codec.version>
<commons.logging.version>1.1.1</commons.logging.version>
<httpclient.version>4.4.1</httpclient.version>
<httpcore.version>4.4.1</httpcore.version>
<junit.version>4.12</junit.version>
<mysql.connector.version>5.1.34</mysql.connector.version>
<slf4j.api.version>1.7.5</slf4j.api.version>
<slf4j.log4j12.version>1.7.5</slf4j.log4j12.version>
<commons.collections.version>3.2.2</commons.collections.version>
<commons.httpclient>3.0.1</commons.httpclient>
<commons.beanutils.version>1.7.0</commons.beanutils.version>
<commons.configuration.version>1.10</commons.configuration.version>
<commons.email.version>1.5</commons.email.version>
<poi.version>3.17</poi.version>
<javax.servlet.api.version>3.1.0</javax.servlet.api.version>
<commons.collections4.version>4.1</commons.collections4.version>
<guava.version>20.0</guava.version>
<postgresql.version>42.1.4</postgresql.version>
<hive.jdbc.version>2.1.0</hive.jdbc.version>
<commons.io.version>2.4</commons.io.version>
<oshi.core.version>3.5.0</oshi.core.version>
<clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version>
<mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version>
<presto.jdbc.version>0.238.1</presto.jdbc.version>
<jsp-2.1.version>6.1.14</jsp-2.1.version>
<spotbugs.version>3.1.12</spotbugs.version>
<checkstyle.version>3.0.0</checkstyle.version>
<apache.rat.version>0.13</apache.rat.version>
<zookeeper.version>3.4.14</zookeeper.version>
<frontend-maven-plugin.version>1.6</frontend-maven-plugin.version>
<maven-compiler-plugin.version>3.3</maven-compiler-plugin.version>
<maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version>
<maven-release-plugin.version>2.5.3</maven-release-plugin.version>
<maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version>
<maven-source-plugin.version>2.4</maven-source-plugin.version>
<maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version>
<maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version>
<rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version>
<jacoco.version>0.8.4</jacoco.version>
<jcip.version>1.0</jcip.version>
<maven.deploy.skip>false</maven.deploy.skip>
<cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version>
<mockito.version>2.21.0</mockito.version>
<powermock.version>2.0.2</powermock.version>
<servlet-api.version>2.5</servlet-api.version>
<swagger.version>1.9.3</swagger.version>
<springfox.version>2.9.2</springfox.version>
<swagger-models.version>1.5.24</swagger-models.version>
<guava-retry.version>2.0.0</guava-retry.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<!-- quartz-->
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
<version>${quartz.version}</version>
</dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz-jobs</artifactId>
<version>${quartz.version}</version>
</dependency>
<dependency>
<groupId>com.cronutils</groupId>
<artifactId>cron-utils</artifactId>
<version>${cron.utils.version}</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>${druid.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>${spring.boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-server</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-plugin-api</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-dao</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-api</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-remote</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-service</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-alert</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-framework</artifactId>
<version>${curator.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<version>${curator.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<artifactId>netty</artifactId>
<groupId>io.netty</groupId>
</exclusion>
<exclusion>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-annotations</artifactId>
</exclusion>
</exclusions>
<version>${zookeeper.version}</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>${commons.codec.version}</version>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>${commons.logging.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>${httpclient.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>${httpcore.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>${mockito.version}</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<version>${powermock.version}</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito2</artifactId>
<version>${powermock.version}</version>
<type>jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.connector.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>${h2.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.api.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.log4j12.version}</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>${commons.collections.version}</version>
</dependency>
<dependency>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
<version>${commons.httpclient}</version>
</dependency>
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>${commons.beanutils.version}</version>
</dependency>
<dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
<version>${commons.configuration.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-email</artifactId>
<version>${commons.email.version}</version>
</dependency>
<!--excel poi-->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${poi.version}</version>
</dependency>
<!-- hadoop -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>com.sun.jersey</artifactId>
<groupId>jersey-json</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-aws</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
<version>${commons.collections4.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>${postgresql.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.jdbc.version}</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons.io.version}</version>
</dependency>
<dependency>
<groupId>com.github.oshi</groupId>
<artifactId>oshi-core</artifactId>
<version>${oshi.core.version}</version>
</dependency>
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<version>${clickhouse.jdbc.version}</version>
</dependency>
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
<version>${mssql.jdbc.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-jdbc</artifactId>
<version>${presto.jdbc.version}</version>
</dependency>
<dependency>
<groupId>net.jcip</groupId>
<artifactId>jcip-annotations</artifactId>
<version>${jcip.version}</version>
<optional>true</optional>
</dependency>
<!-- for api module -->
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jsp-2.1</artifactId>
<version>${jsp-2.1.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<version>${servlet-api.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>${javax.servlet.api.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<version>${springfox.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
<version>${springfox.version}</version>
</dependency>
<dependency>
<groupId>io.swagger</groupId>
<artifactId>swagger-models</artifactId>
<version>${swagger-models.version}</version>
</dependency>
<dependency>
<groupId>com.github.xiaoymin</groupId>
<artifactId>swagger-bootstrap-ui</artifactId>
<version>${swagger.version}</version>
</dependency>
<dependency>
<groupId>com.github.rholder</groupId>
<artifactId>guava-retrying</artifactId>
<version>${guava-retry.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<finalName>apache-dolphinscheduler-incubating-${project.version}</finalName>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>rpm-maven-plugin</artifactId>
<version>${rpm-maven-plugion.version}</version>
<inherited>false</inherited>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<testSource>${java.version}</testSource>
<testTarget>${java.version}</testTarget>
</configuration>
<version>${maven-compiler-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>${maven-release-plugin.version}</version>
<configuration>
<tagNameFormat>@{project.version}</tagNameFormat>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>${maven-assembly-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<configuration>
<source>8</source>
<failOnError>false</failOnError>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>${maven-source-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<version>${maven-dependency-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<phase>verify</phase>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
<configuration>
<aggregate>true</aggregate>
<charset>${project.build.sourceEncoding}</charset>
<encoding>${project.build.sourceEncoding}</encoding>
<docencoding>${project.build.sourceEncoding}</docencoding>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>${maven-release-plugin.version}</version>
<configuration>
<autoVersionSubmodules>true</autoVersionSubmodules>
<tagNameFormat>@{project.version}</tagNameFormat>
<tagBase>${project.version}</tagBase>
<!--<goals>-f pom.xml deploy</goals>-->
</configuration>
<dependencies>
<dependency>
<groupId>org.apache.maven.scm</groupId>
<artifactId>maven-scm-provider-jgit</artifactId>
<version>1.9.5</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<encoding>${project.build.sourceEncoding}</encoding>
<skip>false</skip><!--not skip compile test classes-->
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<includes>
<include>**/alert/utils/DingTalkUtilsTest.java</include>
<include>**/alert/template/AlertTemplateFactoryTest.java</include>
<include>**/alert/template/impl/DefaultHTMLTemplateTest.java</include>
<include>**/alert/utils/EnterpriseWeChatUtilsTest.java</include>
<include>**/alert/utils/ExcelUtilsTest.java</include>
<include>**/alert/utils/FuncUtilsTest.java</include>
<include>**/alert/utils/JSONUtilsTest.java</include>
<!--<include>**/alert/utils/MailUtilsTest.java</include>-->
<include>**/alert/plugin/EmailAlertPluginTest.java</include>
<include>**/api/controller/ProcessDefinitionControllerTest.java</include>
<include>**/api/controller/TenantControllerTest.java</include>
<include>**/api/dto/resources/filter/ResourceFilterTest.java</include>
<include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include>
<includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest>
<include>**/api/enums/StatusTest.java</include>
<include>**/api/exceptions/ApiExceptionHandlerTest.java</include>
<include>**/api/exceptions/ServiceExceptionTest.java</include>
<include>**/api/interceptor/LoginHandlerInterceptorTest.java</include>
<include>**/api/security/impl/pwd/PasswordAuthenticatorTest.java</include>
<include>**/api/security/impl/ldap/LdapAuthenticatorTest.java</include>
<include>**/api/security/SecurityConfigLDAPTest.java</include>
<include>**/api/security/SecurityConfigPasswordTest.java</include>
<include>**/api/service/AccessTokenServiceTest.java</include>
<include>**/api/service/AlertGroupServiceTest.java</include>
<include>**/api/service/BaseDAGServiceTest.java</include>
<include>**/api/service/BaseServiceTest.java</include>
<include>**/api/service/DataAnalysisServiceTest.java</include>
<include>**/api/service/DataSourceServiceTest.java</include>
<include>**/api/service/ExecutorService2Test.java</include>
<include>**/api/service/ExecutorServiceTest.java</include>
<include>**/api/service/LoggerServiceTest.java</include>
<include>**/api/service/MonitorServiceTest.java</include>
<include>**/api/service/ProcessDefinitionServiceTest.java</include>
<include>**/api/service/ProcessDefinitionVersionServiceTest.java</include>
<include>**/api/service/ProcessInstanceServiceTest.java</include>
<include>**/api/service/ProjectServiceTest.java</include>
<include>**/api/service/QueueServiceTest.java</include>
<include>**/api/service/ResourcesServiceTest.java</include>
<include>**/api/service/SchedulerServiceTest.java</include>
<include>**/api/service/SessionServiceTest.java</include>
<include>**/api/service/TaskInstanceServiceTest.java</include>
<include>**/api/service/TenantServiceTest.java</include>
<include>**/api/service/UdfFuncServiceTest.java</include>
<include>**/api/service/UserAlertGroupServiceTest.java</include>
<include>**/api/service/UsersServiceTest.java</include>
<include>**/api/service/WorkerGroupServiceTest.java</include>
<include>**/api/service/WorkFlowLineageServiceTest.java</include>
<include>**/api/controller/ProcessDefinitionControllerTest.java</include>
<include>**/api/controller/TaskInstanceControllerTest.java</include>
<include>**/api/controller/WorkFlowLineageControllerTest.java</include>
<include>**/api/utils/exportprocess/DataSourceParamTest.java</include>
<include>**/api/utils/exportprocess/DependentParamTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/FileUtilsTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/ResultTest.java</include>
<include>**/common/graph/DAGTest.java</include>
<include>**/common/os/OshiTest.java</include>
<include>**/common/os/OSUtilsTest.java</include>
<include>**/common/shell/ShellExecutorTest.java</include>
<include>**/common/task/DataxParametersTest.java</include>
<include>**/common/task/EntityTestUtils.java</include>
<include>**/common/task/FlinkParametersTest.java</include>
<include>**/common/task/HttpParametersTest.java</include>
<include>**/common/task/SqoopParameterEntityTest.java</include>
<include>**/common/threadutils/ThreadPoolExecutorsTest.java</include>
<include>**/common/threadutils/ThreadUtilsTest.java</include>
<include>**/common/utils/CollectionUtilsTest.java</include>
<include>**/common/utils/CommonUtilsTest.java</include>
<include>**/common/utils/DateUtilsTest.java</include>
<include>**/common/utils/DependentUtilsTest.java</include>
<include>**/common/utils/EncryptionUtilsTest.java</include>
<include>**/common/utils/FileUtilsTest.java</include>
<include>**/common/utils/IpUtilsTest.java</include>
<include>**/common/utils/JSONUtilsTest.java</include>
<include>**/common/utils/LoggerUtilsTest.java</include>
<include>**/common/utils/OSUtilsTest.java</include>
<include>**/common/utils/ParameterUtilsTest.java</include>
<include>**/common/utils/TimePlaceholderUtilsTest.java</include>
<include>**/common/utils/PreconditionsTest.java</include>
<include>**/common/utils/PropertyUtilsTest.java</include>
<include>**/common/utils/SchemaUtilsTest.java</include>
<include>**/common/utils/ScriptRunnerTest.java</include>
<include>**/common/utils/SensitiveLogUtilsTest.java</include>
<include>**/common/utils/StringTest.java</include>
<include>**/common/utils/StringUtilsTest.java</include>
<include>**/common/utils/TaskParametersUtilsTest.java</include>
<include>**/common/utils/VarPoolUtilsTest.java</include>
<include>**/common/utils/HadoopUtilsTest.java</include>
<include>**/common/utils/HttpUtilsTest.java</include>
<include>**/common/utils/KerberosHttpClientTest.java</include>
<include>**/common/ConstantsTest.java</include>
<include>**/common/utils/HadoopUtils.java</include>
<include>**/common/utils/RetryerUtilsTest.java</include>
<include>**/common/plugin/FilePluginManagerTest</include>
<include>**/common/plugin/PluginClassLoaderTest</include>
<include>**/common/enums/ExecutionStatusTest</include>
<include>**/dao/mapper/AccessTokenMapperTest.java</include>
<include>**/dao/mapper/AlertGroupMapperTest.java</include>
<include>**/dao/mapper/CommandMapperTest.java</include>
<include>**/dao/mapper/ConnectionFactoryTest.java</include>
<include>**/dao/mapper/DataSourceMapperTest.java</include>
<include>**/dao/entity/TaskInstanceTest.java</include>
<include>**/dao/entity/UdfFuncTest.java</include>
<include>**/remote/JsonSerializerTest.java</include>
<include>**/remote/RemoveTaskLogResponseCommandTest.java</include>
<include>**/remote/RemoveTaskLogRequestCommandTest.java</include>
<include>**/remote/NettyRemotingClientTest.java</include>
<include>**/remote/NettyUtilTest.java</include>
<include>**/remote/ResponseFutureTest.java</include>
<include>**/server/log/LoggerServerTest.java</include>
<include>**/server/entity/SQLTaskExecutionContextTest.java</include>
<include>**/server/log/MasterLogFilterTest.java</include>
<include>**/server/log/SensitiveDataConverterTest.java</include>
<!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>-->
<include>**/server/log/TaskLogFilterTest.java</include>
<include>**/server/log/WorkerLogFilterTest.java</include>
<include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include>
<include>**/server/master/runner/MasterTaskExecThreadTest.java</include>
<!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>-->
<include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include>
<include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include>
<include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include>
<include>**/server/master/register/MasterRegistryTest.java</include>
<include>**/server/master/dispatch/host/assign/RoundRobinHostManagerTest.java</include>
<include>**/server/master/AlertManagerTest.java</include>
<include>**/server/master/MasterCommandTest.java</include>
<include>**/server/master/DependentTaskTest.java</include>
<include>**/server/master/ConditionsTaskTest.java</include>
<include>**/server/master/MasterExecThreadTest.java</include>
<include>**/server/master/ParamsTest.java</include>
<include>**/server/master/SubProcessTaskTest.java</include>
<include>**/server/master/processor/TaskAckProcessorTest.java</include>
<include>**/server/master/processor/TaskKillResponseProcessorTest.java</include>
<include>**/server/master/processor/queue/TaskResponseServiceTest.java</include>
<include>**/server/register/ZookeeperNodeManagerTest.java</include>
<include>**/server/utils/DataxUtilsTest.java</include>
<include>**/server/utils/ExecutionContextTestUtils.java</include>
<include>**/server/utils/HostTest.java</include>
<include>**/server/utils/FlinkArgsUtilsTest.java</include>
<include>**/server/utils/LogUtilsTest.java</include>
<include>**/server/utils/ParamUtilsTest.java</include>
<include>**/server/utils/ProcessUtilsTest.java</include>
<include>**/server/utils/SparkArgsUtilsTest.java</include>
<include>**/server/worker/processor/TaskCallbackServiceTest.java</include>
<include>**/server/worker/registry/WorkerRegistryTest.java</include>
<include>**/server/worker/shell/ShellCommandExecutorTest.java</include>
<include>**/server/worker/sql/SqlExecutorTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<include>**/server/worker/task/EnvFileTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<!--<include>**/server/worker/task/datax/DataxTaskTest.java</include>-->
<!--<include>**/server/worker/task/http/HttpTaskTest.java</include>-->
<include>**/server/worker/task/sqoop/SqoopTaskTest.java</include>
<include>**/server/worker/task/shell/ShellTaskTest.java</include>
<include>**/server/worker/task/TaskManagerTest.java</include>
<include>**/server/worker/EnvFileTest.java</include>
<include>**/server/worker/runner/TaskExecuteThreadTest.java</include>
<include>**/service/quartz/cron/CronUtilsTest.java</include>
<include>**/service/process/ProcessServiceTest.java</include>
<include>**/service/zk/DefaultEnsembleProviderTest.java</include>
<include>**/service/zk/ZKServerTest.java</include>
<include>**/service/zk/CuratorZookeeperClientTest.java</include>
<include>**/service/queue/TaskUpdateQueueTest.java</include>
<include>**/service/queue/PeerTaskInstancePriorityQueueTest.java</include>
<include>**/dao/mapper/DataSourceUserMapperTest.java</include>
<!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>-->
<include>**/dao/mapper/ProcessDefinitionMapperTest.java</include>
<include>**/dao/mapper/ProcessDefinitionVersionMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapperTest.java</include>
<include>**/dao/mapper/ProjectMapperTest.java</include>
<include>**/dao/mapper/ProjectUserMapperTest.java</include>
<include>**/dao/mapper/QueueMapperTest.java</include>
<include>**/dao/mapper/ResourceUserMapperTest.java</include>
<include>**/dao/mapper/ScheduleMapperTest.java</include>
<include>**/dao/mapper/SessionMapperTest.java</include>
<include>**/dao/mapper/TaskInstanceMapperTest.java</include>
<include>**/dao/mapper/TenantMapperTest.java</include>
<include>**/dao/mapper/UdfFuncMapperTest.java</include>
<include>**/dao/mapper/UDFUserMapperTest.java</include>
<include>**/dao/mapper/UserAlertGroupMapperTest.java</include>
<include>**/dao/mapper/UserMapperTest.java</include>
<include>**/dao/utils/DagHelperTest.java</include>
<include>**/dao/AlertDaoTest.java</include>
<include>**/dao/datasource/OracleDataSourceTest.java</include>
<include>**/dao/datasource/HiveDataSourceTest.java</include>
<include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include>
<include>**/dao/upgrade/WokrerGrouopDaoTest.java</include>
<include>**/dao/upgrade/UpgradeDaoTest.java</include>
<include>**/plugin/model/AlertDataTest.java</include>
<include>**/plugin/model/AlertInfoTest.java</include>
<include>**/plugin/utils/PropertyUtilsTest.java</include>
</includes>
<!-- <skip>true</skip> -->
</configuration>
</plugin>
<!-- jenkins plugin jacoco report-->
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>${jacoco.version}</version>
<configuration>
<destFile>target/jacoco.exec</destFile>
<dataFile>target/jacoco.exec</dataFile>
</configuration>
<executions>
<execution>
<id>jacoco-initialize</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>jacoco-site</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
<version>${apache.rat.version}</version>
<configuration>
<excludeSubProjects>false</excludeSubProjects>
<addDefaultLicenseMatchers>false</addDefaultLicenseMatchers>
<licenses>
<license implementation="org.apache.rat.analysis.license.SimplePatternBasedLicense">
<licenseFamilyCategory>AL20</licenseFamilyCategory>
<licenseFamilyName>Apache License, 2.0</licenseFamilyName>
<patterns>
<pattern>Licensed to the Apache Software Foundation (ASF)</pattern>
</patterns>
</license>
</licenses>
<licenseFamilies>
<licenseFamily implementation="org.apache.rat.license.SimpleLicenseFamily">
<familyName>Apache License, 2.0</familyName>
</licenseFamily>
</licenseFamilies>
<excludes>
<exclude>**/node_modules/**</exclude>
<exclude>**/node/**</exclude>
<exclude>**/dist/**</exclude>
<exclude>**/licenses/**</exclude>
<exclude>.github/**</exclude>
<exclude>**/sql/soft_version</exclude>
<exclude>**/common/utils/ScriptRunner.java</exclude>
<exclude>**/*.json</exclude>
<!-- document files -->
<exclude>**/*.md</exclude>
<excldue>**/*.MD</excldue>
<exclude>**/*.txt</exclude>
<exclude>**/docs/**</exclude>
<exclude>**/*.babelrc</exclude>
<exclude>**/*.eslint*</exclude>
<exclude>**/.mvn/jvm.config</exclude>
<exclude>**/.mvn/wrapper/**</exclude>
</excludes>
<consoleOutput>true</consoleOutput>
</configuration>
</plugin>
<plugin>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-maven-plugin</artifactId>
<version>${spotbugs.version}</version>
<configuration>
<xmlOutput>true</xmlOutput>
<threshold>medium</threshold>
<effort>default</effort>
<excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile>
<failOnError>true</failOnError>
</configuration>
<dependencies>
<dependency>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs</artifactId>
<version>4.0.0-beta4</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>${checkstyle.version}</version>
<dependencies>
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>8.18</version>
</dependency>
</dependencies>
<configuration>
<consoleOutput>true</consoleOutput>
<encoding>UTF-8</encoding>
<configLocation>style/checkstyle.xml</configLocation>
<suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation>
<suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
<failOnViolation>true</failOnViolation>
<violationSeverity>warning</violationSeverity>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<sourceDirectories>
<sourceDirectory>${project.build.sourceDirectory}</sourceDirectory>
</sourceDirectories>
<excludes>**\/generated-sources\/</excludes>
<skip>true</skip>
</configuration>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>cobertura-maven-plugin</artifactId>
<version>${cobertura-maven-plugin.version}</version>
<configuration>
<check>
</check>
<aggregate>true</aggregate>
<outputDirectory>./target/cobertura</outputDirectory>
<encoding>${project.build.sourceEncoding}</encoding>
<quiet>true</quiet>
<format>xml</format>
<instrumentation>
<ignoreTrivial>true</ignoreTrivial>
</instrumentation>
</configuration>
</plugin>
</plugins>
</build>
<modules>
<module>dolphinscheduler-ui</module>
<module>dolphinscheduler-server</module>
<module>dolphinscheduler-common</module>
<module>dolphinscheduler-api</module>
<module>dolphinscheduler-dao</module>
<module>dolphinscheduler-alert</module>
<module>dolphinscheduler-dist</module>
<module>dolphinscheduler-remote</module>
<module>dolphinscheduler-service</module>
<module>dolphinscheduler-plugin-api</module>
<module>dolphinscheduler-microbench</module>
</modules>
</project>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,233 | [Bug][api-server] ClassCastException: AppClassLoader cannot be cast to class java.net.URLClassLoader | *For better global communication, please give priority to using English description, thx! *
**Which version of Dolphin Scheduler:**
1.3.1
**Describe the bug**
Just started the api-server, logs show as follows:
```
[WARN] 2020-07-17 10:17:17.231 org.eclipse.jetty.server.handler.ContextHandler.application:[2355] - unavailable
java.lang.ClassCastException: class jdk.internal.loader.ClassLoaders$AppClassLoader cannot be cast to class java.net.URLClassLoader (jdk.internal.loader.ClassLoaders$AppClassLoader and java.net.URLClassLoader are in module java.base of loader 'bootstrap')
at org.apache.jasper.compiler.JspRuntimeContext.<init>(JspRuntimeContext.java:174)
at org.apache.jasper.servlet.JspServlet.init(JspServlet.java:150)
at org.eclipse.jetty.servlet.ServletHolder.initServlet(ServletHolder.java:672)
at org.eclipse.jetty.servlet.ServletHolder.initialize(ServletHolder.java:429)
at org.eclipse.jetty.servlet.ServletHandler.lambda$initialize$0(ServletHandler.java:750)
at java.base/java.util.stream.SortedOps$SizedRefSortingSink.end(SortedOps.java:357)
at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:485)
at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474)
at java.base/java.util.stream.StreamSpliterators$WrappingSpliterator.forEachRemaining(StreamSpliterators.java:312)
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:735)
at java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734)
at java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:658)
at org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:744)
at org.springframework.boot.web.embedded.jetty.JettyEmbeddedWebAppContext$JettyEmbeddedServletHandler.deferredInitialize(JettyEmbeddedWebAppContext.java:46)
at org.springframework.boot.web.embedded.jetty.JettyEmbeddedWebAppContext.deferredInitialize(JettyEmbeddedWebAppContext.java:36)
at org.springframework.boot.web.embedded.jetty.JettyWebServer.handleDeferredInitialize(JettyWebServer.java:221)
at org.springframework.boot.web.embedded.jetty.JettyWebServer.start(JettyWebServer.java:142)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.startWebServer(ServletWebServerApplicationContext.java:311)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.finishRefresh(ServletWebServerApplicationContext.java:164)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:552)
at org.springframework.boot.web.servlet.context.ServletWebServerApplicationContext.refresh(ServletWebServerApplicationContext.java:142)
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:775)
at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:397)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:316)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1260)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1248)
at org.apache.dolphinscheduler.api.ApiApplicationServer.main(ApiApplicationServer.java:36)
```
I found [an explaination on stackoverflow][1]. It shows that JDK 9 has a breaking change here.
[1]: https://stackoverflow.com/questions/39739075/hive-shell-not-loading/41637409#41637409
**Requirement or improvement**
Please fix this to make DS compatible with modern JDK. Thanks! | https://github.com/apache/dolphinscheduler/issues/3233 | https://github.com/apache/dolphinscheduler/pull/4312 | 9c226435e377e055c64c4c5586a2fcd426ca61af | b8dc4357270ef4610237e2bb498fe604153c2a0f | "2020-07-17T17:30:23Z" | java | "2021-01-05T02:02:16Z" | tools/dependencies/known-dependencies.txt | HikariCP-3.2.0.jar
activation-1.1.jar
ant-1.6.5.jar
aopalliance-1.0.jar
apache-el-8.5.54.jar
apacheds-i18n-2.0.0-M15.jar
apacheds-kerberos-codec-2.0.0-M15.jar
api-asn1-api-1.0.0-M20.jar
api-util-1.0.0-M20.jar
asm-3.1.jar
aspectjweaver-1.9.6.jar
audience-annotations-0.5.0.jar
avro-1.7.4.jar
aws-java-sdk-1.7.4.jar
bonecp-0.8.0.RELEASE.jar
byte-buddy-1.9.16.jar
classmate-1.4.0.jar
clickhouse-jdbc-0.1.52.jar
commons-cli-1.2.jar
commons-codec-1.11.jar
commons-collections-3.2.2.jar
commons-collections4-4.1.jar
commons-compress-1.4.1.jar
commons-compiler-3.0.16.jar
commons-configuration-1.10.jar
commons-daemon-1.0.13.jar
commons-beanutils-1.7.0.jar
commons-dbcp-1.4.jar
commons-email-1.5.jar
commons-httpclient-3.0.1.jar
commons-io-2.4.jar
commons-lang-2.6.jar
commons-logging-1.1.1.jar
commons-math3-3.1.1.jar
commons-net-3.1.jar
commons-pool-1.6.jar
core-3.1.1.jar
cron-utils-5.0.5.jar
curator-client-4.3.0.jar
curator-framework-4.3.0.jar
curator-recipes-4.3.0.jar
datanucleus-api-jdo-4.2.1.jar
datanucleus-core-4.1.6.jar
datanucleus-rdbms-4.1.7.jar
derby-10.14.2.0.jar
druid-1.1.22.jar
gson-2.8.6.jar
guava-20.0.jar
guice-3.0.jar
guice-servlet-3.0.jar
h2-1.4.200.jar
hadoop-annotations-2.7.3.jar
hadoop-auth-2.7.3.jar
hadoop-aws-2.7.3.jar
hadoop-client-2.7.3.jar
hadoop-common-2.7.3.jar
hadoop-hdfs-2.7.3.jar
hadoop-mapreduce-client-app-2.7.3.jar
hadoop-mapreduce-client-common-2.7.3.jar
hadoop-mapreduce-client-core-2.7.3.jar
hadoop-mapreduce-client-jobclient-2.7.3.jar
hadoop-mapreduce-client-shuffle-2.7.3.jar
hadoop-yarn-api-2.7.3.jar
hadoop-yarn-client-2.7.3.jar
hadoop-yarn-common-2.7.3.jar
hadoop-yarn-server-common-2.7.3.jar
hamcrest-core-1.3.jar
hibernate-validator-6.0.20.Final.jar
hive-common-2.1.0.jar
hive-jdbc-2.1.0.jar
hive-metastore-2.1.0.jar
hive-orc-2.1.0.jar
hive-serde-2.1.0.jar
hive-service-2.1.0.jar
hive-service-rpc-2.1.0.jar
hive-storage-api-2.1.0.jar
htrace-core-3.1.0-incubating.jar
httpclient-4.4.1.jar
httpcore-4.4.1.jar
httpmime-4.5.12.jar
jackson-annotations-2.9.10.jar
jackson-core-2.9.10.jar
jackson-core-asl-1.9.13.jar
jackson-databind-2.9.10.jar
jackson-datatype-jdk8-2.9.10.jar
jackson-datatype-jsr310-2.9.10.jar
jackson-jaxrs-1.9.13.jar
jackson-mapper-asl-1.9.13.jar
jackson-module-parameter-names-2.9.10.jar
jackson-xc-1.9.13.jar
jamon-runtime-2.3.1.jar
janino-3.0.16.jar
java-xmlbuilder-0.4.jar
javax.activation-api-1.2.0.jar
javax.annotation-api-1.3.2.jar
javax.inject-1.jar
javax.jdo-3.2.0-m3.jar
javax.mail-1.6.2.jar
javax.servlet-api-3.1.0.jar
javolution-5.5.1.jar
jaxb-api-2.3.1.jar
jaxb-impl-2.2.3-1.jar
jboss-logging-3.3.3.Final.jar
jdo-api-3.0.1.jar
jersey-client-1.9.jar
jersey-core-1.9.jar
jersey-guice-1.9.jar
jersey-json-1.9.jar
jersey-server-1.9.jar
jets3t-0.9.0.jar
jettison-1.1.jar
jetty-6.1.26.jar
jetty-continuation-9.4.31.v20200723.jar
jetty-http-9.4.31.v20200723.jar
jetty-io-9.4.31.v20200723.jar
jetty-security-9.4.31.v20200723.jar
jetty-server-9.4.31.v20200723.jar
jetty-servlet-9.4.31.v20200723.jar
jetty-servlets-9.4.31.v20200723.jar
jetty-util-6.1.26.jar
jetty-util-9.4.31.v20200723.jar
jetty-webapp-9.4.31.v20200723.jar
jetty-xml-9.4.31.v20200723.jar
jline-0.9.94.jar
jna-4.5.2.jar
jna-platform-4.5.2.jar
joda-time-2.10.6.jar
jpam-1.1.jar
jsch-0.1.42.jar
jsp-2.1-6.1.14.jar
jsp-api-2.1-6.1.14.jar
jsp-api-2.1.jar
jsqlparser-2.1.jar
jsr305-3.0.0.jar
jta-1.1.jar
jul-to-slf4j-1.7.30.jar
junit-4.12.jar
leveldbjni-all-1.8.jar
libfb303-0.9.3.jar
libthrift-0.9.3.jar
log4j-1.2-api-2.11.2.jar
log4j-1.2.17.jar
log4j-api-2.11.2.jar
log4j-core-2.11.2.jar
logback-classic-1.2.3.jar
logback-core-1.2.3.jar
lz4-1.3.0.jar
mapstruct-1.2.0.Final.jar
mssql-jdbc-6.1.0.jre8.jar
mybatis-3.5.2.jar
mybatis-plus-3.2.0.jar
mybatis-plus-annotation-3.2.0.jar
mybatis-plus-boot-starter-3.2.0.jar
mybatis-plus-core-3.2.0.jar
mybatis-plus-extension-3.2.0.jar
mybatis-spring-2.0.2.jar
netty-3.6.2.Final.jar
netty-all-4.1.52.Final.jar
opencsv-2.3.jar
oshi-core-3.5.0.jar
paranamer-2.3.jar
parquet-hadoop-bundle-1.8.1.jar
poi-3.17.jar
postgresql-42.1.4.jar
protobuf-java-2.5.0.jar
quartz-2.3.0.jar
quartz-jobs-2.3.0.jar
slf4j-api-1.7.5.jar
snakeyaml-1.23.jar
snappy-0.2.jar
snappy-java-1.0.4.1.jar
spring-aop-5.1.18.RELEASE.jar
spring-beans-5.1.18.RELEASE.jar
spring-boot-2.1.17.RELEASE.jar
spring-boot-autoconfigure-2.1.17.RELEASE.jar
spring-boot-starter-2.1.17.RELEASE.jar
spring-boot-starter-aop-2.1.17.RELEASE.jar
spring-boot-starter-jdbc-2.1.17.RELEASE.jar
spring-boot-starter-jetty-2.1.17.RELEASE.jar
spring-boot-starter-json-2.1.17.RELEASE.jar
spring-boot-starter-logging-2.1.17.RELEASE.jar
spring-boot-starter-web-2.1.17.RELEASE.jar
spring-context-5.1.18.RELEASE.jar
spring-core-5.1.18.RELEASE.jar
spring-expression-5.1.18.RELEASE.jar
spring-jcl-5.1.18.RELEASE.jar
spring-jdbc-5.1.18.RELEASE.jar
spring-plugin-core-1.2.0.RELEASE.jar
spring-plugin-metadata-1.2.0.RELEASE.jar
spring-tx-5.1.18.RELEASE.jar
spring-web-5.1.18.RELEASE.jar
spring-webmvc-5.1.18.RELEASE.jar
springfox-core-2.9.2.jar
springfox-schema-2.9.2.jar
springfox-spi-2.9.2.jar
springfox-spring-web-2.9.2.jar
springfox-swagger-common-2.9.2.jar
springfox-swagger-ui-2.9.2.jar
springfox-swagger2-2.9.2.jar
swagger-annotations-1.5.20.jar
swagger-bootstrap-ui-1.9.3.jar
swagger-models-1.5.24.jar
tephra-api-0.6.0.jar
threetenbp-1.3.6.jar
transaction-api-1.1.jar
validation-api-2.0.1.Final.jar
xercesImpl-2.9.1.jar
xml-apis-1.4.01.jar
xmlenc-0.52.jar
xz-1.0.jar
zookeeper-3.4.14.jar
guava-retrying-2.0.0.jar
presto-jdbc-0.238.1.jar
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,391 | [Improvement][Readme] please remove the invalid content | **Describe the question**
please delete the following content in the README.md and README_zh_CN.md, there two options are not valid now
![image](https://user-images.githubusercontent.com/15833811/103738751-83367000-502f-11eb-83f0-11b3989884bf.png)
**Which version of DolphinScheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4391 | https://github.com/apache/dolphinscheduler/pull/4374 | c67083f028820b035ebbd789fc29efa26dd3152a | cf41fb77742af589a06acf3ed27550f85db6128d | "2021-01-06T06:59:42Z" | java | "2021-01-07T14:41:06Z" | README.md | Dolphin Scheduler Official Website
[dolphinscheduler.apache.org](https://dolphinscheduler.apache.org)
============
[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html)
[![Total Lines](https://tokei.rs/b1/github/apache/Incubator-DolphinScheduler?category=lines)](https://github.com/apache/Incubator-DolphinScheduler)
[![codecov](https://codecov.io/gh/apache/incubator-dolphinscheduler/branch/dev/graph/badge.svg)](https://codecov.io/gh/apache/incubator-dolphinscheduler/branch/dev)
[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=apache-dolphinscheduler&metric=alert_status)](https://sonarcloud.io/dashboard?id=apache-dolphinscheduler)
> Dolphin Scheduler for Big Data
[![Stargazers over time](https://starchart.cc/apache/incubator-dolphinscheduler.svg)](https://starchart.cc/apache/incubator-dolphinscheduler)
[![EN doc](https://img.shields.io/badge/document-English-blue.svg)](README.md)
[![CN doc](https://img.shields.io/badge/文档-中文版-blue.svg)](README_zh_CN.md)
### Design features:
A distributed and easy-to-extend visual DAG workflow scheduling system. Dedicated to solving the complex dependencies in data processing, making the scheduling system `out of the box` for data processing.
Its main objectives are as follows:
- Associate the Tasks according to the dependencies of the tasks in a DAG graph, which can visualize the running state of task in real time.
- Support for many task types: Shell, MR, Spark, SQL (mysql, postgresql, hive, sparksql), Python, Sub_Process, Procedure, etc.
- Support process scheduling, dependency scheduling, manual scheduling, manual pause/stop/recovery, support for failed retry/alarm, recovery from specified nodes, Kill task, etc.
- Support process priority, task priority and task failover and task timeout alarm/failure
- Support process global parameters and node custom parameter settings
- Support online upload/download of resource files, management, etc. Support online file creation and editing
- Support task log online viewing and scrolling, online download log, etc.
- Implement cluster HA, decentralize Master cluster and Worker cluster through Zookeeper
- Support online viewing of `Master/Worker` cpu load, memory
- Support process running history tree/gantt chart display, support task status statistics, process status statistics
- Support backfilling data
- Support multi-tenant
- Support internationalization
- There are more waiting partners to explore
### What's in Dolphin Scheduler
Stability | Easy to use | Features | Scalability |
-- | -- | -- | --
Decentralized multi-master and multi-worker | Visualization process defines key information such as task status, task type, retry times, task running machine, visual variables and so on at a glance. | Support pause, recover operation | support custom task types
HA is supported by itself | All process definition operations are visualized, dragging tasks to draw DAGs, configuring data sources and resources. At the same time, for third-party systems, the api mode operation is provided. | Users on DolphinScheduler can achieve many-to-one or one-to-one mapping relationship through tenants and Hadoop users, which is very important for scheduling large data jobs. " | The scheduler uses distributed scheduling, and the overall scheduling capability will increase linearly with the scale of the cluster. Master and Worker support dynamic online and offline.
Overload processing: Task queue mechanism, the number of schedulable tasks on a single machine can be flexibly configured, when too many tasks will be cached in the task queue, will not cause machine jam. | One-click deployment | Supports traditional shell tasks, and also support big data platform task scheduling: MR, Spark, SQL (mysql, postgresql, hive, sparksql), Python, Procedure, Sub_Process | |
### System partial screenshot
![home page](https://user-images.githubusercontent.com/15833811/75218288-bf286400-57d4-11ea-8263-d639c6511d5f.jpg)
![dag](https://user-images.githubusercontent.com/15833811/75236750-3374fe80-57f9-11ea-857d-62a66a5a559d.png)
![process definition list page](https://user-images.githubusercontent.com/15833811/75216886-6f479e00-57d0-11ea-92dd-66e7640a186f.png)
![view task log online](https://user-images.githubusercontent.com/15833811/75216924-9900c500-57d0-11ea-91dc-3522a76bdbbe.png)
![resource management](https://user-images.githubusercontent.com/15833811/75216984-be8dce80-57d0-11ea-840d-58546edc8788.png)
![monitor](https://user-images.githubusercontent.com/59273635/75625839-c698a480-5bfc-11ea-8bbe-895b561b337f.png)
![security](https://user-images.githubusercontent.com/15833811/75236441-bfd2f180-57f8-11ea-88bd-f24311e01b7e.png)
![treeview](https://user-images.githubusercontent.com/15833811/75217191-3fe56100-57d1-11ea-8856-f19180d9a879.png)
### Online Demo
- <a href="http://106.75.43.194:8888" target="_blank">Online Demo</a>
### Recent R&D plan
Work plan of Dolphin Scheduler: [R&D plan](https://github.com/apache/incubator-dolphinscheduler/projects/1), Under the `In Develop` card is what is currently being developed, TODO card is to be done (including feature ideas)
### How to contribute
Welcome to participate in contributing, please refer to the process of submitting the code:
[[How to contribute](https://dolphinscheduler.apache.org/en-us/docs/development/contribute.html)]
### How to Build
```bash
./mvnw clean install -Prelease
```
Artifact:
```
dolphinscheduler-dist/target/apache-dolphinscheduler-incubating-${latest.release.version}-dolphinscheduler-bin.tar.gz: Binary package of DolphinScheduler
dolphinscheduler-dist/target/apache-dolphinscheduler-incubating-${latest.release.version}-src.zip: Source code package of DolphinScheduler
```
### Thanks
Dolphin Scheduler uses a lot of excellent open source projects, such as google guava, guice, grpc, netty, ali bonecp, quartz, and many open source projects of apache, etc.
It is because of the shoulders of these open source projects that the birth of the Dolphin Scheduler is possible. We are very grateful for all the open source software used! We also hope that we will not only be the beneficiaries of open source, but also be open source contributors. We also hope that partners who have the same passion and conviction for open source will join in and contribute to open source!
### Get Help
1. Submit an issue
1. Subscribe the mail list : https://dolphinscheduler.apache.org/en-us/docs/development/subscribe.html. then send mail to dev@dolphinscheduler.apache.org
1. Slack channel: [![Slack Status](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://join.slack.com/share/zt-do3gvfhj-UUhrAX2GxkVX_~JJt1jpKA)
1. Contact WeChat(dailidong66). This is just for Mandarin(CN) discussion.
### License
Please refer to [LICENSE](https://github.com/apache/incubator-dolphinscheduler/blob/dev/LICENSE) file.
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,391 | [Improvement][Readme] please remove the invalid content | **Describe the question**
please delete the following content in the README.md and README_zh_CN.md, there two options are not valid now
![image](https://user-images.githubusercontent.com/15833811/103738751-83367000-502f-11eb-83f0-11b3989884bf.png)
**Which version of DolphinScheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4391 | https://github.com/apache/dolphinscheduler/pull/4374 | c67083f028820b035ebbd789fc29efa26dd3152a | cf41fb77742af589a06acf3ed27550f85db6128d | "2021-01-06T06:59:42Z" | java | "2021-01-07T14:41:06Z" | README_zh_CN.md | Dolphin Scheduler Official Website
[dolphinscheduler.apache.org](https://dolphinscheduler.apache.org)
============
[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html)
[![Total Lines](https://tokei.rs/b1/github/apache/Incubator-DolphinScheduler?category=lines)](https://github.com/apache/Incubator-DolphinScheduler)
[![codecov](https://codecov.io/gh/apache/incubator-dolphinscheduler/branch/dev/graph/badge.svg)](https://codecov.io/gh/apache/incubator-dolphinscheduler/branch/dev)
[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=apache-dolphinscheduler&metric=alert_status)](https://sonarcloud.io/dashboard?id=apache-dolphinscheduler)
> Dolphin Scheduler for Big Data
[![Stargazers over time](https://starchart.cc/apache/incubator-dolphinscheduler.svg)](https://starchart.cc/apache/incubator-dolphinscheduler)
[![CN doc](https://img.shields.io/badge/文档-中文版-blue.svg)](README_zh_CN.md)
[![EN doc](https://img.shields.io/badge/document-English-blue.svg)](README.md)
**设计特点:** 一个分布式易扩展的可视化DAG工作流任务调度系统。致力于解决数据处理流程中错综复杂的依赖关系,使调度系统在数据处理流程中`开箱即用`。
其主要目标如下:
- 以DAG图的方式将Task按照任务的依赖关系关联起来,可实时可视化监控任务的运行状态
- 支持丰富的任务类型:Shell、MR、Spark、SQL(mysql、postgresql、hive、sparksql),Python,Sub_Process、Procedure等
- 支持工作流定时调度、依赖调度、手动调度、手动暂停/停止/恢复,同时支持失败重试/告警、从指定节点恢复失败、Kill任务等操作
- 支持工作流优先级、任务优先级及任务的故障转移及任务超时告警/失败
- 支持工作流全局参数及节点自定义参数设置
- 支持资源文件的在线上传/下载,管理等,支持在线文件创建、编辑
- 支持任务日志在线查看及滚动、在线下载日志等
- 实现集群HA,通过Zookeeper实现Master集群和Worker集群去中心化
- 支持对`Master/Worker` cpu load,memory,cpu在线查看
- 支持工作流运行历史树形/甘特图展示、支持任务状态统计、流程状态统计
- 支持补数
- 支持多租户
- 支持国际化
- 还有更多等待伙伴们探索
### 系统部分截图
![home page](https://user-images.githubusercontent.com/15833811/75208819-abbad000-57b7-11ea-8d3c-67e7c270671f.jpg)
![dag](https://user-images.githubusercontent.com/15833811/75209584-93e44b80-57b9-11ea-952e-537fb24ec72d.jpg)
![log](https://user-images.githubusercontent.com/15833811/75209645-c55d1700-57b9-11ea-94d4-e3fa91ab5218.jpg)
![gantt](https://user-images.githubusercontent.com/15833811/75209640-c0986300-57b9-11ea-878e-a2098533ad44.jpg)
![resources](https://user-images.githubusercontent.com/15833811/75209403-11f42280-57b9-11ea-9b59-d4be77063553.jpg)
![monitor](https://user-images.githubusercontent.com/15833811/75209631-b5ddce00-57b9-11ea-8d22-cdf15cf0ee25.jpg)
![security](https://user-images.githubusercontent.com/15833811/75209633-baa28200-57b9-11ea-9def-94bef2e212a7.jpg)
### 我要体验
- <a href="http://106.75.43.194:8888" target="_blank">我要体验</a>
### 近期研发计划
DolphinScheduler的工作计划:<a href="https://github.com/apache/incubator-dolphinscheduler/projects/1" target="_blank">研发计划</a> ,其中 In Develop卡片下是正在研发的功能,TODO卡片是待做事项(包括 feature ideas)
### 参与贡献
非常欢迎大家来参与贡献,贡献流程请参考:
[[参与贡献](https://dolphinscheduler.apache.org/zh-cn/docs/development/contribute.html)]
### How to Build
```bash
./mvnw clean install -Prelease
```
Artifact:
```
dolphinscheduler-dist/target/apache-dolphinscheduler-incubating-${latest.release.version}-dolphinscheduler-bin.tar.gz: Binary package of DolphinScheduler
dolphinscheduler-dist/target/apache-dolphinscheduler-incubating-${latest.release.version}-src.zip: Source code package of DolphinScheduler
```
### 感谢
Dolphin Scheduler使用了很多优秀的开源项目,比如google的guava、guice、grpc,netty,ali的bonecp,quartz,以及apache的众多开源项目等等,
正是由于站在这些开源项目的肩膀上,才有Dolphin Scheduler的诞生的可能。对此我们对使用的所有开源软件表示非常的感谢!我们也希望自己不仅是开源的受益者,也能成为开源的贡献者,也希望对开源有同样热情和信念的伙伴加入进来,一起为开源献出一份力!
### 获得帮助
1. 提交issue
1. 先订阅邮件开发列表:[订阅邮件列表](https://dolphinscheduler.apache.org/zh-cn/docs/development/subscribe.html), 订阅成功后发送邮件到dev@dolphinscheduler.apache.org.
1. 联系微信群助手(ID:dailidong66). 微信仅用于中国用户讨论.
### 版权
请参考 [LICENSE](https://github.com/apache/incubator-dolphinscheduler/blob/dev/LICENSE) 文件.
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,428 | It is recommended to add an owner in the data source center and the Resource Center to make it easier to manage | It is recommended to add an owner in the data source center and the Resource Center to make it easier to manage
![image](https://user-images.githubusercontent.com/42087586/104288645-0ace2400-54f3-11eb-9466-04cb435d5fd0.png)
![image](https://user-images.githubusercontent.com/42087586/104288766-3224f100-54f3-11eb-9a9f-82d082a45670.png)
| https://github.com/apache/dolphinscheduler/issues/4428 | https://github.com/apache/dolphinscheduler/pull/4442 | 2a98aee1bd36039be1fd0ef03488432c70b6c63a | d0ac87a6389c2ff8766f477acaf3e236e11de7b1 | "2021-01-12T08:28:56Z" | java | "2021-01-14T09:10:14Z" | dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/DataSourceMapper.xml | <?xml version="1.0" encoding="UTF-8" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.DataSourceMapper">
<sql id="baseSql">
id, name, note, type, user_id, connection_params, create_time, update_time
</sql>
<select id="queryDataSourceByType" resultType="org.apache.dolphinscheduler.dao.entity.DataSource">
select
<include refid="baseSql"/>
from t_ds_datasource
where type=#{type}
<if test="userId != 0">
and id in
(select datasource_id
from t_ds_relation_datasource_user
where user_id=#{userId}
union select id as datasource_id
from t_ds_datasource
where user_id=#{userId}
)
</if>
</select>
<select id="selectPaging" resultType="org.apache.dolphinscheduler.dao.entity.DataSource">
select
<include refid="baseSql"/>
from t_ds_datasource
where 1 =1
<if test="userId != 0">
and id in
(select datasource_id
from t_ds_relation_datasource_user
where user_id=#{userId}
union select id as datasource_id
from t_ds_datasource
where user_id=#{userId}
)
</if>
<if test="name != null and name != ''">
and name like concat ('%', #{name}, '%')
</if>
order by update_time desc
</select>
<select id="queryDataSourceByName" resultType="org.apache.dolphinscheduler.dao.entity.DataSource">
select
<include refid="baseSql"/>
from t_ds_datasource
where name=#{name}
</select>
<select id="queryAuthedDatasource" resultType="org.apache.dolphinscheduler.dao.entity.DataSource">
select ds.id, ds.name, ds.note, ds.type, ds.user_id, ds.connection_params, ds.create_time, ds.update_time
from t_ds_datasource ds, t_ds_relation_datasource_user rel
where ds.id = rel.datasource_id AND rel.user_id = #{userId}
</select>
<select id="queryDatasourceExceptUserId" resultType="org.apache.dolphinscheduler.dao.entity.DataSource">
select
<include refid="baseSql"/>
from t_ds_datasource
where user_id <![CDATA[ <> ]]> #{userId}
</select>
<select id="listAllDataSourceByType" resultType="org.apache.dolphinscheduler.dao.entity.DataSource">
select
<include refid="baseSql"/>
from t_ds_datasource
where type = #{type}
</select>
<select id="listAuthorizedDataSource" resultType="org.apache.dolphinscheduler.dao.entity.DataSource">
select
<include refid="baseSql"/>
from t_ds_datasource
where
id in (select datasource_id from t_ds_relation_datasource_user where user_id=#{userId}
union select id as datasource_id from t_ds_datasource where user_id=#{userId})
<if test="dataSourceIds != null and dataSourceIds != ''">
and id in
<foreach collection="dataSourceIds" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</if>
</select>
</mapper>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,428 | It is recommended to add an owner in the data source center and the Resource Center to make it easier to manage | It is recommended to add an owner in the data source center and the Resource Center to make it easier to manage
![image](https://user-images.githubusercontent.com/42087586/104288645-0ace2400-54f3-11eb-9466-04cb435d5fd0.png)
![image](https://user-images.githubusercontent.com/42087586/104288766-3224f100-54f3-11eb-9a9f-82d082a45670.png)
| https://github.com/apache/dolphinscheduler/issues/4428 | https://github.com/apache/dolphinscheduler/pull/4442 | 2a98aee1bd36039be1fd0ef03488432c70b6c63a | d0ac87a6389c2ff8766f477acaf3e236e11de7b1 | "2021-01-12T08:28:56Z" | java | "2021-01-14T09:10:14Z" | dolphinscheduler-dao/src/main/resources/org/apache/dolphinscheduler/dao/mapper/ResourceMapper.xml | <?xml version="1.0" encoding="UTF-8" ?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="org.apache.dolphinscheduler.dao.mapper.ResourceMapper">
<sql id="baseSql">
id, alias, file_name, description, user_id, type, size, create_time, update_time,
pid, full_name, is_directory
</sql>
<sql id="baseSqlV2">
${alias}.id, ${alias}.alias, ${alias}.file_name, ${alias}.description, ${alias}.user_id, ${alias}.type, ${alias}.size, ${alias}.create_time, ${alias}.update_time,
${alias}.pid, ${alias}.full_name, ${alias}.is_directory
</sql>
<select id="queryResourceList" resultType="org.apache.dolphinscheduler.dao.entity.Resource">
select
<include refid="baseSql"/>
from t_ds_resources
where 1= 1
<if test="fullName != null and fullName != ''">
and full_name = #{fullName}
</if>
<if test="type != -1">
and type = #{type}
</if>
<if test="userId != 0">
and user_id = #{userId}
</if>
</select>
<select id="queryResourceListAuthored" resultType="org.apache.dolphinscheduler.dao.entity.Resource">
select
<include refid="baseSql"/>
from t_ds_resources
where 1 = 1
<if test="type != -1">
and type=#{type}
</if>
<if test="userId != 0 and perm != 0">
and id in (select resources_id from t_ds_relation_resources_user where user_id=#{userId} and perm=#{perm}
union select id as resources_id from t_ds_resources where user_id=#{userId})
</if>
<if test="userId != 0 and perm == 0">
and id in (select resources_id from t_ds_relation_resources_user where user_id=#{userId}
union select id as resources_id from t_ds_resources where user_id=#{userId})
</if>
</select>
<select id="queryResourcePaging" resultType="org.apache.dolphinscheduler.dao.entity.Resource">
select
<include refid="baseSql"/>
from t_ds_resources
where type=#{type} and pid=#{id}
<if test="userId != 0">
and id in (select resources_id from t_ds_relation_resources_user where user_id=#{userId}
union select id as resources_id from t_ds_resources where user_id=#{userId})
</if>
<if test="searchVal != null and searchVal != ''">
and alias like concat('%', #{searchVal}, '%')
</if>
order by update_time desc
</select>
<select id="queryAuthorizedResourceList" resultType="org.apache.dolphinscheduler.dao.entity.Resource">
select
<include refid="baseSqlV2">
<property name="alias" value="r"/>
</include>
from t_ds_resources r,t_ds_relation_resources_user rel
where r.id = rel.resources_id AND rel.user_id = #{userId} and perm=7
</select>
<select id="queryResourceExceptUserId" resultType="org.apache.dolphinscheduler.dao.entity.Resource">
select
<include refid="baseSql"/>
from t_ds_resources
where user_id <![CDATA[ <> ]]> #{userId}
</select>
<select id="queryTenantCodeByResourceName" resultType="java.lang.String">
select tenant_code
from t_ds_tenant t, t_ds_user u, t_ds_resources res
where t.id = u.tenant_id and u.id = res.user_id and res.type=#{resType}
and res.full_name= #{resName}
</select>
<select id="listAuthorizedResource" resultType="org.apache.dolphinscheduler.dao.entity.Resource">
select
<include refid="baseSql"/>
from t_ds_resources
where type=0
and id in (select resources_id from t_ds_relation_resources_user where user_id=#{userId} and perm=7
union select id as resources_id from t_ds_resources where user_id=#{userId})
<if test="resNames != null and resNames != ''">
and full_name in
<foreach collection="resNames" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</if>
</select>
<select id="listAuthorizedResourceById" resultType="org.apache.dolphinscheduler.dao.entity.Resource">
select
<include refid="baseSql"/>
from t_ds_resources
where id in (select resources_id from t_ds_relation_resources_user where user_id=#{userId} and perm=7
union select id as resources_id from t_ds_resources where user_id=#{userId})
<if test="resIds != null and resIds != ''">
and id in
<foreach collection="resIds" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</if>
</select>
<delete id="deleteIds" parameterType="java.lang.Integer">
delete from t_ds_resources where id in
<foreach collection="resIds" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</delete>
<select id="listChildren" resultType="java.lang.Integer">
select id
from t_ds_resources
where pid = #{direcotyId}
</select>
<select id="queryResource" resultType="org.apache.dolphinscheduler.dao.entity.Resource">
select
<include refid="baseSql"/>
from t_ds_resources
where type = #{type}
and full_name = #{fullName}
</select>
<update id="batchUpdateResource" parameterType="java.util.List">
<foreach collection="resourceList" item="resource" index="index" open="" close="" separator=";">
update t_ds_resources
<set>
full_name=#{resource.fullName},
update_time=#{resource.updateTime}
</set>
<where>
id=#{resource.id}
</where>
</foreach>
</update>
<select id="listResourceByIds" resultType="org.apache.dolphinscheduler.dao.entity.Resource">
select
<include refid="baseSql"/>
from t_ds_resources
where id in
<foreach collection="resIds" item="i" open="(" close=")" separator=",">
#{i}
</foreach>
</select>
</mapper>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,428 | It is recommended to add an owner in the data source center and the Resource Center to make it easier to manage | It is recommended to add an owner in the data source center and the Resource Center to make it easier to manage
![image](https://user-images.githubusercontent.com/42087586/104288645-0ace2400-54f3-11eb-9466-04cb435d5fd0.png)
![image](https://user-images.githubusercontent.com/42087586/104288766-3224f100-54f3-11eb-9a9f-82d082a45670.png)
| https://github.com/apache/dolphinscheduler/issues/4428 | https://github.com/apache/dolphinscheduler/pull/4442 | 2a98aee1bd36039be1fd0ef03488432c70b6c63a | d0ac87a6389c2ff8766f477acaf3e236e11de7b1 | "2021-01-12T08:28:56Z" | java | "2021-01-14T09:10:14Z" | dolphinscheduler-ui/src/js/conf/home/pages/datasource/pages/list/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%">
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column prop="name" :label="$t('Datasource Name')"></el-table-column>
<el-table-column prop="type" :label="$t('Datasource Type')"></el-table-column>
<el-table-column :label="$t('Datasource Parameter')">
<template slot-scope="scope">
<div>
<m-tooltips-JSON :JSON="JSON.parse(scope.row.connectionParams)" :id="scope.row.id">
<span slot="reference">
<el-button size="small" type="text">{{$t('Click to view')}}</el-button>
</span>
</m-tooltips-JSON>
</div>
</template>
</el-table-column>
<el-table-column :label="$t('Description')" min-width="100">
<template slot-scope="scope">
<span>{{scope.row.note | filterNull}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Create Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.createTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Update Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="150">
<template slot-scope="scope">
<el-tooltip :content="$t('Edit')" placement="top" :enterable="false">
<span><el-button type="primary" size="mini" icon="el-icon-edit-outline" @click="_edit(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('delete')" placement="top" :enterable="false">
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Delete?')"
@onConfirm="_delete(scope.row,scope.row.id)"
>
<el-button type="danger" size="mini" icon="el-icon-delete" circle slot="reference"></el-button>
</el-popconfirm>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
</div>
</template>
<script>
import { mapActions } from 'vuex'
import { findComponentDownward } from '@/module/util/'
import mTooltipsJSON from '@/module/components/tooltipsJSON/tooltipsJSON'
export default {
name: 'datasource-list',
data () {
return {
// list
list: []
}
},
props: {
// External incoming data
datasourcesList: Array,
// current page number
pageNo: Number,
// Total number of articles
pageSize: Number
},
methods: {
...mapActions('datasource', ['deleteDatasource']),
/**
* Delete current line
*/
_delete (item, i) {
this.deleteDatasource({
id: item.id
}).then(res => {
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* edit
*/
_edit (item) {
findComponentDownward(this.$root, 'datasource-indexP')._create(item)
}
},
watch: {
/**
* Monitor external data changes
*/
datasourcesList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
this.list = this.datasourcesList
},
mounted () {
},
components: { mTooltipsJSON }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,428 | It is recommended to add an owner in the data source center and the Resource Center to make it easier to manage | It is recommended to add an owner in the data source center and the Resource Center to make it easier to manage
![image](https://user-images.githubusercontent.com/42087586/104288645-0ace2400-54f3-11eb-9466-04cb435d5fd0.png)
![image](https://user-images.githubusercontent.com/42087586/104288766-3224f100-54f3-11eb-9a9f-82d082a45670.png)
| https://github.com/apache/dolphinscheduler/issues/4428 | https://github.com/apache/dolphinscheduler/pull/4442 | 2a98aee1bd36039be1fd0ef03488432c70b6c63a | d0ac87a6389c2ff8766f477acaf3e236e11de7b1 | "2021-01-12T08:28:56Z" | java | "2021-01-14T09:10:14Z" | dolphinscheduler-ui/src/js/conf/home/pages/resource/pages/file/pages/list/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%">
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column :label="$t('Name')">
<template slot-scope="scope">
<el-popover trigger="hover" placement="top">
<p>{{ scope.row.alias }}</p>
<div slot="reference" class="name-wrapper">
<a href="javascript:" class="links" @click="_go(scope.row)">{{ scope.row.alias }}</a>
</div>
</el-popover>
</template>
</el-table-column>
<el-table-column :label="$t('Whether directory')" width="100">
<template slot-scope="scope">
{{scope.row.directory? $t('Yes') : $t('No')}}
</template>
</el-table-column>
<el-table-column prop="fileName" :label="$t('File Name')"></el-table-column>
<el-table-column :label="$t('Description')" width="200">
<template slot-scope="scope">
<span>{{scope.row.description | filterNull}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Size')">
<template slot-scope="scope">
{{_rtSize(scope.row.size)}}
</template>
</el-table-column>
<el-table-column :label="$t('Update Time')" min-width="120">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="150">
<template slot-scope="scope">
<el-tooltip :content="$t('Edit')" placement="top">
<span><el-button type="primary" size="mini" icon="el-icon-edit-outline" @click="_edit(scope.row)" :disabled="_rtDisb(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Rename')" placement="top">
<span><el-button type="primary" size="mini" icon="el-icon-edit" @click="_rename(scope.row,scope.$index)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Download')" placement="top">
<span><el-button type="primary" size="mini" icon="el-icon-download" @click="_downloadFile(scope.row)" :disabled="scope.row.directory? true: false" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('delete')" placement="top">
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Delete?')"
@onConfirm="_delete(scope.row,scope.row.id)"
>
<el-button type="danger" size="mini" icon="el-icon-delete" circle slot="reference"></el-button>
</el-popconfirm>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
<el-dialog
:visible.sync="renameDialog"
width="auto">
<m-rename :item="item" @onUpDate="onUpDate" @close="close"></m-rename>
</el-dialog>
</div>
</template>
<script>
import _ from 'lodash'
import mRename from './rename'
import { mapActions } from 'vuex'
import { filtTypeArr } from '../../_source/common'
import { bytesToSize } from '@/module/util/util'
import { downloadFile } from '@/module/download'
import localStore from '@/module/util/localStorage'
export default {
name: 'file-manage-list',
data () {
return {
list: [],
renameDialog: false,
item: {},
index: null
}
},
props: {
fileResourcesList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('resource', ['deleteResource']),
_edit (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
this.$router.push({ path: `/resource/file/edit/${item.id}` })
},
_go (item) {
localStore.setItem('file', `${item.alias}|${item.size}`)
if (item.directory) {
localStore.setItem('currentDir', `${item.fullName}`)
this.$router.push({ path: `/resource/file/subdirectory/${item.id}` })
} else {
this.$router.push({ path: `/resource/file/list/${item.id}` })
}
},
_downloadFile (item) {
downloadFile('resources/download', {
id: item.id
})
},
_rtSize (val) {
return bytesToSize(parseInt(val))
},
_delete (item, i) {
this.deleteResource({
id: item.id
}).then(res => {
this.$emit('on-update')
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_rename (item, i) {
this.item = item
this.index = i
this.renameDialog = true
},
onUpDate (item) {
this.$set(this.list, this.index, item)
this.renameDialog = false
},
close () {
this.renameDialog = false
},
_rtDisb ({ alias, size }) {
let i = alias.lastIndexOf('.')
let a = alias.substring(i, alias.length)
let flag = _.includes(filtTypeArr, _.trimStart(a, '.'))
if (flag && (size < 1000000)) {
flag = true
} else {
flag = false
}
return !flag
}
},
watch: {
fileResourcesList (a) {
this.list = []
setTimeout(() => {
this.list = a
})
}
},
created () {
},
mounted () {
this.list = this.fileResourcesList
},
components: { mRename }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,428 | It is recommended to add an owner in the data source center and the Resource Center to make it easier to manage | It is recommended to add an owner in the data source center and the Resource Center to make it easier to manage
![image](https://user-images.githubusercontent.com/42087586/104288645-0ace2400-54f3-11eb-9466-04cb435d5fd0.png)
![image](https://user-images.githubusercontent.com/42087586/104288766-3224f100-54f3-11eb-9a9f-82d082a45670.png)
| https://github.com/apache/dolphinscheduler/issues/4428 | https://github.com/apache/dolphinscheduler/pull/4442 | 2a98aee1bd36039be1fd0ef03488432c70b6c63a | d0ac87a6389c2ff8766f477acaf3e236e11de7b1 | "2021-01-12T08:28:56Z" | java | "2021-01-14T09:10:14Z" | dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': 'User Name',
'Please enter user name': 'Please enter user name',
Password: 'Password',
'Please enter your password': 'Please enter your password',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': 'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22',
Login: 'Login',
Home: 'Home',
'Failed to create node to save': 'Failed to create node to save',
'Global parameters': 'Global parameters',
'Local parameters': 'Local parameters',
'Copy success': 'Copy success',
'The browser does not support automatic copying': 'The browser does not support automatic copying',
'Whether to save the DAG graph': 'Whether to save the DAG graph',
'Current node settings': 'Current node settings',
'View history': 'View history',
'View log': 'View log',
'Force success': 'Force success',
'Enter this child node': 'Enter this child node',
'Node name': 'Node name',
'Please enter name (required)': 'Please enter name (required)',
'Run flag': 'Run flag',
Normal: 'Normal',
'Prohibition execution': 'Prohibition execution',
'Please enter description': 'Please enter description',
'Number of failed retries': 'Number of failed retries',
Times: 'Times',
'Failed retry interval': 'Failed retry interval',
Minute: 'Minute',
'Delay execution time': 'Delay execution time',
'Delay execution': 'Delay execution',
'Forced success': 'Forced success',
Cancel: 'Cancel',
'Confirm add': 'Confirm add',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process',
'The task has not been executed and cannot enter the sub-Process': 'The task has not been executed and cannot enter the sub-Process',
'Name already exists': 'Name already exists',
'Download Log': 'Download Log',
'Refresh Log': 'Refresh Log',
'Enter full screen': 'Enter full screen',
'Cancel full screen': 'Cancel full screen',
Close: 'Close',
'Update log success': 'Update log success',
'No more logs': 'No more logs',
'No log': 'No log',
'Loading Log...': 'Loading Log...',
'Set the DAG diagram name': 'Set the DAG diagram name',
'Please enter description(optional)': 'Please enter description(optional)',
'Set global': 'Set global',
'Whether to update the process definition': 'Whether to update the process definition',
Add: 'Add',
'DAG graph name cannot be empty': 'DAG graph name cannot be empty',
'Create Datasource': 'Create Datasource',
'Project Home': 'Project Home',
'Project Manage': 'Project',
'Create Project': 'Create Project',
'Cron Manage': 'Cron Manage',
'Copy Workflow': 'Copy Workflow',
'Tenant Manage': 'Tenant Manage',
'Create Tenant': 'Create Tenant',
'User Manage': 'User Manage',
'Create User': 'Create User',
'User Information': 'User Information',
'Edit Password': 'Edit Password',
success: 'success',
failed: 'failed',
delete: 'delete',
'Please choose': 'Please choose',
'Please enter a positive integer': 'Please enter a positive integer',
'Program Type': 'Program Type',
'Main class': 'Main class',
'Main jar package': 'Main jar package',
'Please enter main jar package': 'Please enter main jar package',
'Please enter main class': 'Please enter main class',
'Command-line parameters': 'Command-line parameters',
'Please enter Command-line parameters': 'Please enter Command-line parameters',
'Other parameters': 'Other parameters',
'Please enter other parameters': 'Please enter other parameters',
Resources: 'Resources',
'Custom Parameters': 'Custom Parameters',
'Custom template': 'Custom template',
Datasource: 'Datasource',
methods: 'methods',
'Please enter method(optional)': 'Please enter method(optional)',
Script: 'Script',
'Please enter script(required)': 'Please enter script(required)',
'Deploy Mode': 'Deploy Mode',
'Driver cores': 'Driver cores',
'Please enter Driver cores': 'Please enter Driver cores',
'Driver memory': 'Driver memory',
'Please enter Driver memory': 'Please enter Driver memory',
'Executor Number': 'Executor Number',
'Please enter Executor number': 'Please enter Executor number',
'Executor memory': 'Executor memory',
'Please enter Executor memory': 'Please enter Executor memory',
'Executor cores': 'Executor cores',
'Please enter Executor cores': 'Please enter Executor cores',
'The Executor Number should be a positive integer': 'The Executor Number should be a positive integer',
'Memory should be a positive integer': 'Memory should be a positive integer',
'Core number should be positive integer': 'Core number should be positive integer',
'Please enter JobManager memory': 'Please enter JobManager memory',
'Please enter TaskManager memory': 'Please enter TaskManager memory',
'Please enter Slot number': 'Please enter Slot number',
'Please enter TaskManager number': 'Please enter TaskManager number',
'Please enter app name(optional)': 'Please enter app name(optional)',
'SQL Type': 'SQL Type',
Title: 'Title',
'Please enter the title of email': 'Please enter the title of email',
Table: 'Table',
TableMode: 'Table',
Attachment: 'Attachment',
'SQL Parameter': 'SQL Parameter',
'SQL Statement': 'SQL Statement',
'UDF Function': 'UDF Function',
'Please enter a SQL Statement(required)': 'Please enter a SQL Statement(required)',
'Please enter a JSON Statement(required)': 'Please enter a JSON Statement(required)',
'One form or attachment must be selected': 'One form or attachment must be selected',
'Recipient required': 'Recipient required',
'Mail subject required': 'Mail subject required',
'Child Node': 'Child Node',
'Please select a sub-Process': 'Please select a sub-Process',
Edit: 'Edit',
'Switch To This Version': 'Switch To This Version',
'Datasource Name': 'Datasource Name',
'Please enter datasource name': 'Please enter datasource name',
IP: 'IP',
'Please enter IP': 'Please enter IP',
Port: 'Port',
'Please enter port': 'Please enter port',
'Database Name': 'Database Name',
'Please enter database name': 'Please enter database name',
'Oracle Connect Type': 'ServiceName or SID',
'Oracle Service Name': 'ServiceName',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc connect parameters',
'Test Connect': 'Test Connect',
'Please enter resource name': 'Please enter resource name',
'Please enter resource folder name': 'Please enter resource folder name',
'Please enter a non-query SQL statement': 'Please enter a non-query SQL statement',
'Please enter IP/hostname': 'Please enter IP/hostname',
'jdbc connection parameters is not a correct JSON format': 'jdbc connection parameters is not a correct JSON format',
'#': '#',
'Datasource Type': 'Datasource Type',
'Datasource Parameter': 'Datasource Parameter',
'Create Time': 'Create Time',
'Update Time': 'Update Time',
Operation: 'Operation',
'Current Version': 'Current Version',
'Click to view': 'Click to view',
'Delete?': 'Delete?',
'Switch Version Successfully': 'Switch Version Successfully',
'Confirm Switch To This Version?': 'Confirm Switch To This Version?',
Confirm: 'Confirm',
'Task status statistics': 'Task Status Statistics',
Number: 'Number',
State: 'State',
'Process Status Statistics': 'Process Status Statistics',
'Process Definition Statistics': 'Process Definition Statistics',
'Project Name': 'Project Name',
'Please enter name': 'Please enter name',
'Owned Users': 'Owned Users',
'Process Pid': 'Process Pid',
'Zk registration directory': 'Zk registration directory',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': 'Last heartbeat time',
'Edit Tenant': 'Edit Tenant',
'OS Tenant Code': 'OS Tenant Code',
Queue: 'Yarn Queue',
'Tenant Name': 'Tenant Name',
'Please select a queue': 'default is tenant association queue',
'Please enter the os tenant code in English': 'Please enter the os tenant code in English',
'Please enter os tenant code in English': 'Please enter os tenant code in English',
'Please enter os tenant code': 'Please enter os tenant code',
'Please enter tenant Name': 'Please enter tenant Name',
'The os tenant code. Only letters or a combination of letters and numbers are allowed': 'The os tenant code. Only letters or a combination of letters and numbers are allowed',
'The os tenant code cannot be all numbers': 'The os tenant code cannot be all numbers',
'Edit User': 'Edit User',
Tenant: 'Tenant',
Email: 'Email',
Phone: 'Phone',
'User Type': 'User Type',
'Please enter phone number': 'Please enter phone number',
'Please enter email': 'Please enter email',
'Please enter the correct email format': 'Please enter the correct email format',
'Please enter the correct mobile phone format': 'Please enter the correct mobile phone format',
Project: 'Project',
Authorize: 'Authorize',
'File resources': 'File resources',
'UDF resources': 'UDF resources',
'UDF resources directory': 'UDF resources directory',
'Please select UDF resources directory': 'Please select UDF resources directory',
'Edit alarm group': 'Edit alarm group',
'Create alarm group': 'Create alarm group',
'Group Name': 'Group Name',
'Please enter group name': 'Please enter group name',
'Group Type': 'Group Type',
Remarks: 'Remarks',
SMS: 'SMS',
'Managing Users': 'Managing Users',
Permission: 'Permission',
Administrator: 'Administrator',
'Confirm Password': 'Confirm Password',
'Please enter confirm password': 'Please enter confirm password',
'Password cannot be in Chinese': 'Password cannot be in Chinese',
'Please enter a password (6-22) character password': 'Please enter a password (6-22) character password',
'Confirmation password cannot be in Chinese': 'Confirmation password cannot be in Chinese',
'Please enter a confirmation password (6-22) character password': 'Please enter a confirmation password (6-22) character password',
'The password is inconsistent with the confirmation password': 'The password is inconsistent with the confirmation password',
'Please select the datasource': 'Please select the datasource',
'Please select resources': 'Please select resources',
Query: 'Query',
'Non Query': 'Non Query',
'prop(required)': 'prop(required)',
'value(optional)': 'value(optional)',
'value(required)': 'value(required)',
'prop is empty': 'prop is empty',
'value is empty': 'value is empty',
'prop is repeat': 'prop is repeat',
'Start Time': 'Start Time',
'End Time': 'End Time',
crontab: 'crontab',
'Failure Strategy': 'Failure Strategy',
online: 'online',
offline: 'offline',
'Task Status': 'Task Status',
'Process Instance': 'Process Instance',
'Task Instance': 'Task Instance',
'Select date range': 'Select date range',
startDate: 'startDate',
endDate: 'endDate',
Date: 'Date',
waiting: 'waiting',
execution: 'execution',
finish: 'finish',
'Create File': 'Create File',
'Create folder': 'Create folder',
'File Name': 'File Name',
'Folder Name': 'Folder Name',
'File Format': 'File Format',
'Folder Format': 'Folder Format',
'File Content': 'File Content',
'Upload File Size': 'Upload File size cannot exceed 1g',
Create: 'Create',
'Please enter the resource content': 'Please enter the resource content',
'Resource content cannot exceed 3000 lines': 'Resource content cannot exceed 3000 lines',
'File Details': 'File Details',
'Download Details': 'Download Details',
Return: 'Return',
Save: 'Save',
'File Manage': 'File Manage',
'Upload Files': 'Upload Files',
'Create UDF Function': 'Create UDF Function',
'Upload UDF Resources': 'Upload UDF Resources',
'Service-Master': 'Service-Master',
'Service-Worker': 'Service-Worker',
'Process Name': 'Process Name',
Executor: 'Executor',
'Run Type': 'Run Type',
'Scheduling Time': 'Scheduling Time',
'Run Times': 'Run Times',
host: 'host',
'fault-tolerant sign': 'fault-tolerant sign',
Rerun: 'Rerun',
'Recovery Failed': 'Recovery Failed',
Stop: 'Stop',
Pause: 'Pause',
'Recovery Suspend': 'Recovery Suspend',
Gantt: 'Gantt',
Name: 'Name',
'Node Type': 'Node Type',
'Submit Time': 'Submit Time',
Duration: 'Duration',
'Retry Count': 'Retry Count',
'Task Name': 'Task Name',
'Task Date': 'Task Date',
'Source Table': 'Source Table',
'Record Number': 'Record Number',
'Target Table': 'Target Table',
'Online viewing type is not supported': 'Online viewing type is not supported',
Size: 'Size',
Rename: 'Rename',
Download: 'Download',
Export: 'Export',
'Version Info': 'Version Info',
Submit: 'Submit',
'Edit UDF Function': 'Edit UDF Function',
type: 'type',
'UDF Function Name': 'UDF Function Name',
FILE: 'FILE',
UDF: 'UDF',
'File Subdirectory': 'File Subdirectory',
'Please enter a function name': 'Please enter a function name',
'Package Name': 'Package Name',
'Please enter a Package name': 'Please enter a Package name',
Parameter: 'Parameter',
'Please enter a parameter': 'Please enter a parameter',
'UDF Resources': 'UDF Resources',
'Upload Resources': 'Upload Resources',
Instructions: 'Instructions',
'Please enter a instructions': 'Please enter a instructions',
'Please enter a UDF function name': 'Please enter a UDF function name',
'Select UDF Resources': 'Select UDF Resources',
'Class Name': 'Class Name',
'Jar Package': 'Jar Package',
'Library Name': 'Library Name',
'UDF Resource Name': 'UDF Resource Name',
'File Size': 'File Size',
Description: 'Description',
'Drag Nodes and Selected Items': 'Drag Nodes and Selected Items',
'Select Line Connection': 'Select Line Connection',
'Delete selected lines or nodes': 'Delete selected lines or nodes',
'Full Screen': 'Full Screen',
Unpublished: 'Unpublished',
'Start Process': 'Start Process',
'Execute from the current node': 'Execute from the current node',
'Recover tolerance fault process': 'Recover tolerance fault process',
'Resume the suspension process': 'Resume the suspension process',
'Execute from the failed nodes': 'Execute from the failed nodes',
'Complement Data': 'Complement Data',
'Scheduling execution': 'Scheduling execution',
'Recovery waiting thread': 'Recovery waiting thread',
'Submitted successfully': 'Submitted successfully',
Executing: 'Executing',
'Ready to pause': 'Ready to pause',
'Ready to stop': 'Ready to stop',
'Need fault tolerance': 'Need fault tolerance',
kill: 'kill',
'Waiting for thread': 'Waiting for thread',
'Waiting for dependence': 'Waiting for dependence',
Start: 'Start',
Copy: 'Copy',
'Copy name': 'Copy name',
Delete: 'Delete',
'Please enter keyword': 'Please enter keyword',
'File Upload': 'File Upload',
'Drag the file into the current upload window': 'Drag the file into the current upload window',
'Drag area upload': 'Drag area upload',
Upload: 'Upload',
'ReUpload File': 'Re-upload file',
'Please enter file name': 'Please enter file name',
'Please select the file to upload': 'Please select the file to upload',
'Resources manage': 'Resources',
Security: 'Security',
Logout: 'Logout',
'No data': 'No data',
'Uploading...': 'Uploading...',
'Loading...': 'Loading...',
List: 'List',
'Unable to download without proper url': 'Unable to download without proper url',
Process: 'Process',
'Process definition': 'Process definition',
'Task record': 'Task record',
'Warning group manage': 'Warning group manage',
'Servers manage': 'Servers manage',
'UDF manage': 'UDF manage',
'Resource manage': 'Resource manage',
'Function manage': 'Function manage',
'Edit password': 'Edit password',
'Ordinary users': 'Ordinary users',
'Create process': 'Create process',
'Import process': 'Import process',
'Timing state': 'Timing state',
Timing: 'Timing',
TreeView: 'TreeView',
'Mailbox already exists! Recipients and copyers cannot repeat': 'Mailbox already exists! Recipients and copyers cannot repeat',
'Mailbox input is illegal': 'Mailbox input is illegal',
'Please set the parameters before starting': 'Please set the parameters before starting',
Continue: 'Continue',
End: 'End',
'Node execution': 'Node execution',
'Backward execution': 'Backward execution',
'Forward execution': 'Forward execution',
'Execute only the current node': 'Execute only the current node',
'Notification strategy': 'Notification strategy',
'Notification group': 'Notification group',
'Please select a notification group': 'Please select a notification group',
Recipient: 'Recipient',
Cc: 'Cc',
'Whether it is a complement process?': 'Whether it is a complement process?',
'Schedule date': 'Schedule date',
'Mode of execution': 'Mode of execution',
'Serial execution': 'Serial execution',
'Parallel execution': 'Parallel execution',
'Set parameters before timing': 'Set parameters before timing',
'Start and stop time': 'Start and stop time',
'Please select time': 'Please select time',
'Please enter crontab': 'Please enter crontab',
none_1: 'none',
success_1: 'success',
failure_1: 'failure',
All_1: 'All',
Toolbar: 'Toolbar',
'View variables': 'View variables',
'Format DAG': 'Format DAG',
'Refresh DAG status': 'Refresh DAG status',
Return_1: 'Return',
'Please enter format': 'Please enter format',
'connection parameter': 'connection parameter',
'Process definition details': 'Process definition details',
'Create process definition': 'Create process definition',
'Scheduled task list': 'Scheduled task list',
'Process instance details': 'Process instance details',
'Create Resource': 'Create Resource',
'User Center': 'User Center',
'Please enter method': 'Please enter method',
none: 'none',
name: 'name',
'Process priority': 'Process priority',
'Task priority': 'Task priority',
'Task timeout alarm': 'Task timeout alarm',
'Timeout strategy': 'Timeout strategy',
'Timeout alarm': 'Timeout alarm',
'Timeout failure': 'Timeout failure',
'Timeout period': 'Timeout period',
'Waiting Dependent complete': 'Waiting Dependent complete',
'Waiting Dependent start': 'Waiting Dependent start',
'Check interval': 'Check interval',
'Timeout must be longer than check interval': 'Timeout must be longer than check interval',
'Timeout strategy must be selected': 'Timeout strategy must be selected',
'Timeout must be a positive integer': 'Timeout must be a positive integer',
'Add dependency': 'Add dependency',
and: 'and',
or: 'or',
month: 'month',
week: 'week',
day: 'day',
hour: 'hour',
Running: 'Running',
'Waiting for dependency to complete': 'Waiting for dependency to complete',
Selected: 'Selected',
CurrentHour: 'CurrentHour',
Last1Hour: 'Last1Hour',
Last2Hours: 'Last2Hours',
Last3Hours: 'Last3Hours',
Last24Hours: 'Last24Hours',
today: 'today',
Last1Days: 'Last1Days',
Last2Days: 'Last2Days',
Last3Days: 'Last3Days',
Last7Days: 'Last7Days',
ThisWeek: 'ThisWeek',
LastWeek: 'LastWeek',
LastMonday: 'LastMonday',
LastTuesday: 'LastTuesday',
LastWednesday: 'LastWednesday',
LastThursday: 'LastThursday',
LastFriday: 'LastFriday',
LastSaturday: 'LastSaturday',
LastSunday: 'LastSunday',
ThisMonth: 'ThisMonth',
LastMonth: 'LastMonth',
LastMonthBegin: 'LastMonthBegin',
LastMonthEnd: 'LastMonthEnd',
'Refresh status succeeded': 'Refresh status succeeded',
'Queue manage': 'Yarn Queue manage',
'Create queue': 'Create queue',
'Edit queue': 'Edit queue',
'Datasource manage': 'Datasource',
'History task record': 'History task record',
'Please go online': 'Please go online',
'Queue value': 'Queue value',
'Please enter queue value': 'Please enter queue value',
'Worker group manage': 'Worker group manage',
'Create worker group': 'Create worker group',
'Edit worker group': 'Edit worker group',
'Token manage': 'Token manage',
'Create token': 'Create token',
'Edit token': 'Edit token',
'Please enter the IP address separated by commas': 'Please enter the IP address separated by commas',
'Note: Multiple IP addresses have been comma separated': 'Note: Multiple IP addresses have been comma separated',
'Failure time': 'Failure time',
'Expiration time': 'Expiration time',
User: 'User',
'Please enter token': 'Please enter token',
'Generate token': 'Generate token',
Monitor: 'Monitor',
Group: 'Group',
'Queue statistics': 'Queue statistics',
'Command status statistics': 'Command status statistics',
'Task kill': 'Task Kill',
'Task queue': 'Task queue',
'Error command count': 'Error command count',
'Normal command count': 'Normal command count',
Manage: ' Manage',
'Number of connections': 'Number of connections',
Sent: 'Sent',
Received: 'Received',
'Min latency': 'Min latency',
'Avg latency': 'Avg latency',
'Max latency': 'Max latency',
'Node count': 'Node count',
'Query time': 'Query time',
'Node self-test status': 'Node self-test status',
'Health status': 'Health status',
'Max connections': 'Max connections',
'Threads connections': 'Threads connections',
'Max used connections': 'Max used connections',
'Threads running connections': 'Threads running connections',
'Worker group': 'Worker group',
'Please enter a positive integer greater than 0': 'Please enter a positive integer greater than 0',
'Pre Statement': 'Pre Statement',
'Post Statement': 'Post Statement',
'Statement cannot be empty': 'Statement cannot be empty',
'Process Define Count': 'Work flow Define Count',
'Process Instance Running Count': 'Process Instance Running Count',
'command number of waiting for running': 'command number of waiting for running',
'failure command number': 'failure command number',
'tasks number of waiting running': 'tasks number of waiting running',
'task number of ready to kill': 'task number of ready to kill',
'Statistics manage': 'Statistics Manage',
statistics: 'Statistics',
'select tenant': 'select tenant',
'Please enter Principal': 'Please enter Principal',
'The start time must not be the same as the end': 'The start time must not be the same as the end',
'Startup parameter': 'Startup parameter',
'Startup type': 'Startup type',
'warning of timeout': 'warning of timeout',
'Next five execution times': 'Next five execution times',
'Execute time': 'Execute time',
'Complement range': 'Complement range',
'Flink Version': 'Flink Version',
'Slot Number': 'Slot Number',
'TaskManager Number': 'TaskManager Number',
'JobManager Memory': 'JobManager Memory',
'TaskManager Memory': 'TaskManager Memory',
'App Name': 'App Name',
'Http Url': 'Http Url',
'Http Method': 'Http Method',
'Http Parameters': 'Http Parameters',
'Http Parameters Key': 'Http Parameters Key',
'Http Parameters Position': 'Http Parameters Position',
'Http Parameters Value': 'Http Parameters Value',
'Http Check Condition': 'Http Check Condition',
'Http Condition': 'Http Condition',
'Please Enter Http Url': 'Please Enter Http Url(required)',
'Please Enter Http Condition': 'Please Enter Http Condition',
'There is no data for this period of time': 'There is no data for this period of time',
'IP address cannot be empty': 'IP address cannot be empty',
'Please enter the correct IP': 'Please enter the correct IP',
'Please generate token': 'Please generate token',
'Spark Version': 'Spark Version',
TargetDataBase: 'target database',
TargetTable: 'target table',
'Please enter the table of target': 'Please enter the table of target',
'Please enter a Target Table(required)': 'Please enter a Target Table(required)',
SpeedByte: 'speed(byte count)',
SpeedRecord: 'speed(record count)',
'0 means unlimited by byte': '0 means unlimited',
'0 means unlimited by count': '0 means unlimited',
'Modify User': 'Modify User',
'Whether directory': 'Whether directory',
Yes: 'Yes',
No: 'No',
'Hadoop Custom Params': 'Hadoop Params',
'Sqoop Advanced Parameters': 'Sqoop Params',
'Sqoop Job Name': 'Job Name',
'Please enter Mysql Database(required)': 'Please enter Mysql Database(required)',
'Please enter Mysql Table(required)': 'Please enter Mysql Table(required)',
'Please enter Columns (Comma separated)': 'Please enter Columns (Comma separated)',
'Please enter Target Dir(required)': 'Please enter Target Dir(required)',
'Please enter Export Dir(required)': 'Please enter Export Dir(required)',
'Please enter Hive Database(required)': 'Please enter Hive Databasec(required)',
'Please enter Hive Table(required)': 'Please enter Hive Table(required)',
'Please enter Hive Partition Keys': 'Please enter Hive Partition Key',
'Please enter Hive Partition Values': 'Please enter Partition Value',
'Please enter Replace Delimiter': 'Please enter Replace Delimiter',
'Please enter Fields Terminated': 'Please enter Fields Terminated',
'Please enter Lines Terminated': 'Please enter Lines Terminated',
'Please enter Concurrency': 'Please enter Concurrency',
'Please enter Update Key': 'Please enter Update Key',
'Please enter Job Name(required)': 'Please enter Job Name(required)',
'Please enter Custom Shell(required)': 'Please enter Custom Shell(required)',
Direct: 'Direct',
Type: 'Type',
ModelType: 'ModelType',
ColumnType: 'ColumnType',
Database: 'Database',
Column: 'Column',
'Map Column Hive': 'Map Column Hive',
'Map Column Java': 'Map Column Java',
'Export Dir': 'Export Dir',
'Hive partition Keys': 'Hive partition Keys',
'Hive partition Values': 'Hive partition Values',
FieldsTerminated: 'FieldsTerminated',
LinesTerminated: 'LinesTerminated',
IsUpdate: 'IsUpdate',
UpdateKey: 'UpdateKey',
UpdateMode: 'UpdateMode',
'Target Dir': 'Target Dir',
DeleteTargetDir: 'DeleteTargetDir',
FileType: 'FileType',
CompressionCodec: 'CompressionCodec',
CreateHiveTable: 'CreateHiveTable',
DropDelimiter: 'DropDelimiter',
OverWriteSrc: 'OverWriteSrc',
ReplaceDelimiter: 'ReplaceDelimiter',
Concurrency: 'Concurrency',
Form: 'Form',
OnlyUpdate: 'OnlyUpdate',
AllowInsert: 'AllowInsert',
'Data Source': 'Data Source',
'Data Target': 'Data Target',
'All Columns': 'All Columns',
'Some Columns': 'Some Columns',
'Branch flow': 'Branch flow',
'Custom Job': 'Custom Job',
'Custom Script': 'Custom Script',
'Cannot select the same node for successful branch flow and failed branch flow': 'Cannot select the same node for successful branch flow and failed branch flow',
'Successful branch flow and failed branch flow are required': 'conditions node Successful and failed branch flow are required',
'No resources exist': 'No resources exist',
'Please delete all non-existing resources': 'Please delete all non-existing resources',
'Unauthorized or deleted resources': 'Unauthorized or deleted resources',
'Please delete all non-existent resources': 'Please delete all non-existent resources',
Kinship: 'Workflow relationship',
Reset: 'Reset',
KinshipStateActive: 'Active',
KinshipState1: 'Online',
KinshipState0: 'Workflow is not online',
KinshipState10: 'Scheduling is not online',
'Dag label display control': 'Dag label display control',
Enable: 'Enable',
Disable: 'Disable',
'The Worker group no longer exists, please select the correct Worker group!': 'The Worker group no longer exists, please select the correct Worker group!',
'Please confirm whether the workflow has been saved before downloading': 'Please confirm whether the workflow has been saved before downloading',
'User name length is between 3 and 39': 'User name length is between 3 and 39',
'Timeout Settings': 'Timeout Settings',
'Connect Timeout': 'Connect Timeout',
'Socket Timeout': 'Socket Timeout',
'Connect timeout be a positive integer': 'Connect timeout be a positive integer',
'Socket Timeout be a positive integer': 'Socket Timeout be a positive integer',
ms: 'ms',
'Please Enter Url': 'Please Enter Url eg. 127.0.0.1:7077',
Master: 'Master',
'Please select the waterdrop resources': 'Please select the waterdrop resources',
zkDirectory: 'zkDirectory',
'Directory detail': 'Directory detail',
'Connection name': 'Connection name',
'Current connection settings': 'Current connection settings',
'Please save the DAG before formatting': 'Please save the DAG before formatting',
'Batch copy': 'Batch copy',
'Related items': 'Related items',
'Project name is required': 'Project name is required',
'Batch move': 'Batch move',
Version: 'Version',
'Pre tasks': 'Pre tasks',
'Running Memory': 'Running Memory',
'Max Memory': 'Max Memory',
'Min Memory': 'Min Memory',
'The workflow canvas is abnormal and cannot be saved, please recreate': 'The workflow canvas is abnormal and cannot be saved, please recreate',
Info: 'Info'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,428 | It is recommended to add an owner in the data source center and the Resource Center to make it easier to manage | It is recommended to add an owner in the data source center and the Resource Center to make it easier to manage
![image](https://user-images.githubusercontent.com/42087586/104288645-0ace2400-54f3-11eb-9466-04cb435d5fd0.png)
![image](https://user-images.githubusercontent.com/42087586/104288766-3224f100-54f3-11eb-9a9f-82d082a45670.png)
| https://github.com/apache/dolphinscheduler/issues/4428 | https://github.com/apache/dolphinscheduler/pull/4442 | 2a98aee1bd36039be1fd0ef03488432c70b6c63a | d0ac87a6389c2ff8766f477acaf3e236e11de7b1 | "2021-01-12T08:28:56Z" | java | "2021-01-14T09:10:14Z" | dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export default {
'User Name': '用户名',
'Please enter user name': '请输入用户名',
Password: '密码',
'Please enter your password': '请输入密码',
'Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22': '密码至少包含数字,字母和字符的两种组合,长度在6-22之间',
Login: '登录',
Home: '首页',
'Failed to create node to save': '未创建节点保存失败',
'Global parameters': '全局参数',
'Local parameters': '局部参数',
'Copy success': '复制成功',
'The browser does not support automatic copying': '该浏览器不支持自动复制',
'Whether to save the DAG graph': '是否保存DAG图',
'Current node settings': '当前节点设置',
'View history': '查看历史',
'View log': '查看日志',
'Force success': '强制成功',
'Enter this child node': '进入该子节点',
'Node name': '节点名称',
'Please enter name (required)': '请输入名称(必填)',
'Run flag': '运行标志',
Normal: '正常',
'Prohibition execution': '禁止执行',
'Please enter description': '请输入描述',
'Number of failed retries': '失败重试次数',
Times: '次',
'Failed retry interval': '失败重试间隔',
Minute: '分',
'Delay execution time': '延时执行时间',
'Delay execution': '延时执行',
'Forced success': '强制成功过',
Cancel: '取消',
'Confirm add': '确认添加',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': '新创建子工作流还未执行,不能进入子工作流',
'The task has not been executed and cannot enter the sub-Process': '该任务还未执行,不能进入子工作流',
'Name already exists': '名称已存在请重新输入',
'Download Log': '下载日志',
'Refresh Log': '刷新日志',
'Enter full screen': '进入全屏',
'Cancel full screen': '取消全屏',
Close: '关闭',
'Update log success': '更新日志成功',
'No more logs': '暂无更多日志',
'No log': '暂无日志',
'Loading Log...': '正在努力请求日志中...',
'Set the DAG diagram name': '设置DAG图名称',
'Please enter description(optional)': '请输入描述(选填)',
'Set global': '设置全局',
'Whether to update the process definition': '是否更新流程定义',
Add: '添加',
'DAG graph name cannot be empty': 'DAG图名称不能为空',
'Create Datasource': '创建数据源',
'Project Home': '项目首页',
'Project Manage': '项目管理',
'Create Project': '创建项目',
'Cron Manage': '定时管理',
'Copy Workflow': '复制工作流',
'Tenant Manage': '租户管理',
'Create Tenant': '创建租户',
'User Manage': '用户管理',
'Create User': '创建用户',
'User Information': '用户信息',
'Edit Password': '密码修改',
success: '成功',
failed: '失败',
delete: '删除',
'Please choose': '请选择',
'Please enter a positive integer': '请输入正整数',
'Program Type': '程序类型',
'Main class': '主函数的class',
'Main jar package': '主jar包',
'Please enter main jar package': '请选择主jar包',
'Please enter main class': '请填写主函数的class',
'Command-line parameters': '命令行参数',
'Please enter Command-line parameters': '请输入命令行参数',
'Other parameters': '其他参数',
'Please enter other parameters': '请输入其他参数',
Resources: '资源',
'Custom Parameters': '自定义参数',
'Custom template': '自定义模版',
Datasource: '数据源',
methods: '方法',
'Please enter method(optional)': '请输入方法(选填)',
Script: '脚本',
'Please enter script(required)': '请输入脚本(必填)',
'Deploy Mode': '部署方式',
'Driver cores': 'Driver内核数',
'Please enter Driver cores': '请输入Driver内核数',
'Driver memory': 'Driver内存数',
'Please enter Driver memory': '请输入Driver内存数',
'Executor Number': 'Executor数量',
'Please enter Executor number': '请输入Executor数量',
'Executor memory': 'Executor内存数',
'Please enter Executor memory': '请输入Executor内存数',
'Executor cores': 'Executor内核数',
'Please enter Executor cores': '请输入Executor内核数',
'The Executor Number should be a positive integer': 'Executor数量为正整数',
'Memory should be a positive integer': '内存数为数字',
'Core number should be positive integer': '内核数为正整数',
'Please enter JobManager memory': '请输入JobManager内存数',
'Please enter TaskManager memory': '请输入TaskManager内存数',
'Please enter Slot number': '请输入Slot数量',
'Please enter TaskManager number': '请输入TaskManager数量',
'Please enter app name(optional)': '请输入任务名称(选填)',
'SQL Type': 'sql类型',
Title: '主题',
'Please enter the title of email': '请输入邮件主题',
Table: '表名',
TableMode: '表格',
Attachment: '附件',
'SQL Parameter': 'sql参数',
'SQL Statement': 'sql语句',
'UDF Function': 'UDF函数',
'Please enter a SQL Statement(required)': '请输入sql语句(必填)',
'Please enter a JSON Statement(required)': '请输入json语句(必填)',
'One form or attachment must be selected': '表格、附件必须勾选一个',
'Recipient required': '收件人邮箱必填',
'Mail subject required': '邮件主题必填',
'Child Node': '子节点',
'Please select a sub-Process': '请选择子工作流',
Edit: '编辑',
'Switch To This Version': '切换到该版本',
'Datasource Name': '数据源名称',
'Please enter datasource name': '请输入数据源名称',
IP: 'IP主机名',
'Please enter IP': '请输入IP主机名',
Port: '端口',
'Please enter port': '请输入端口',
'Database Name': '数据库名',
'Please enter database name': '请输入数据库名',
'Oracle Connect Type': '服务名或SID',
'Oracle Service Name': '服务名',
'Oracle SID': 'SID',
'jdbc connect parameters': 'jdbc连接参数',
'Test Connect': '测试连接',
'Please enter resource name': '请输入数据源名称',
'Please enter resource folder name': '请输入资源文件夹名称',
'Please enter a non-query SQL statement': '请输入非查询sql语句',
'Please enter IP/hostname': '请输入IP/主机名',
'jdbc connection parameters is not a correct JSON format': 'jdbc连接参数不是一个正确的JSON格式',
'#': '编号',
'Datasource Type': '数据源类型',
'Datasource Parameter': '数据源参数',
'Create Time': '创建时间',
'Update Time': '更新时间',
Operation: '操作',
'Current Version': '当前版本',
'Click to view': '点击查看',
'Delete?': '确定删除吗?',
'Switch Version Successfully': '切换版本成功',
'Confirm Switch To This Version?': '确定切换到该版本吗?',
Confirm: '确定',
'Task status statistics': '任务状态统计',
Number: '数量',
State: '状态',
'Process Status Statistics': '流程状态统计',
'Process Definition Statistics': '流程定义统计',
'Project Name': '项目名称',
'Please enter name': '请输入名称',
'Owned Users': '所属用户',
'Process Pid': '进程Pid',
'Zk registration directory': 'zk注册目录',
cpuUsage: 'cpuUsage',
memoryUsage: 'memoryUsage',
'Last heartbeat time': '最后心跳时间',
'Edit Tenant': '编辑租户',
'OS Tenant Code': '操作系统租户',
'Tenant Name': '租户名称',
Queue: '队列',
'Please select a queue': '默认为租户关联队列',
'Please enter the os tenant code in English': '请输入操作系统租户只允许英文',
'Please enter os tenant code in English': '请输入英文操作系统租户',
'Please enter os tenant code': '请输入操作系统租户',
'Please enter tenant Name': '请输入租户名称',
'The os tenant code. Only letters or a combination of letters and numbers are allowed': '操作系统租户只允许字母或字母与数字组合',
'Edit User': '编辑用户',
Tenant: '租户',
Email: '邮件',
Phone: '手机',
'User Type': '用户类型',
'Please enter phone number': '请输入手机',
'Please enter email': '请输入邮箱',
'Please enter the correct email format': '请输入正确的邮箱格式',
'Please enter the correct mobile phone format': '请输入正确的手机格式',
Project: '项目',
Authorize: '授权',
'File resources': '文件资源',
'UDF resources': 'UDF资源',
'UDF resources directory': 'UDF资源目录',
'Please select UDF resources directory': '请选择UDF资源目录',
'Edit alarm group': '编辑告警组',
'Create alarm group': '创建告警组',
'Group Name': '组名称',
'Please enter group name': '请输入组名称',
'Group Type': '组类型',
Remarks: '备注',
SMS: '短信',
'Managing Users': '管理用户',
Permission: '权限',
Administrator: '管理员',
'Confirm Password': '确认密码',
'Please enter confirm password': '请输入确认密码',
'Password cannot be in Chinese': '密码不能为中文',
'Please enter a password (6-22) character password': '请输入密码(6-22)字符密码',
'Confirmation password cannot be in Chinese': '确认密码不能为中文',
'Please enter a confirmation password (6-22) character password': '请输入确认密码(6-22)字符密码',
'The password is inconsistent with the confirmation password': '密码与确认密码不一致,请重新确认',
'Please select the datasource': '请选择数据源',
'Please select resources': '请选择资源',
Query: '查询',
'Non Query': '非查询',
'prop(required)': 'prop(必填)',
'value(optional)': 'value(选填)',
'value(required)': 'value(必填)',
'prop is empty': 'prop不能为空',
'value is empty': 'value不能为空',
'prop is repeat': 'prop中有重复',
'Start Time': '开始时间',
'End Time': '结束时间',
crontab: 'crontab',
'Failure Strategy': '失败策略',
online: '上线',
offline: '下线',
'Task Status': '任务状态',
'Process Instance': '工作流实例',
'Task Instance': '任务实例',
'Select date range': '选择日期区间',
startDate: '开始日期',
endDate: '结束日期',
Date: '日期',
waiting: '等待',
execution: '执行中',
finish: '完成',
'Create File': '创建文件',
'Create folder': '创建文件夹',
'File Name': '文件名称',
'Folder Name': '文件夹名称',
'File Format': '文件格式',
'Folder Format': '文件夹格式',
'File Content': '文件内容',
'Upload File Size': '文件大小不能超过1G',
Create: '创建',
'Please enter the resource content': '请输入资源内容',
'Resource content cannot exceed 3000 lines': '资源内容不能超过3000行',
'File Details': '文件详情',
'Download Details': '下载详情',
Return: '返回',
Save: '保存',
'File Manage': '文件管理',
'Upload Files': '上传文件',
'Create UDF Function': '创建UDF函数',
'Upload UDF Resources': '上传UDF资源',
'Service-Master': '服务管理-Master',
'Service-Worker': '服务管理-Worker',
'Process Name': '工作流名称',
Executor: '执行用户',
'Run Type': '运行类型',
'Scheduling Time': '调度时间',
'Run Times': '运行次数',
host: 'host',
'fault-tolerant sign': '容错标识',
Rerun: '重跑',
'Recovery Failed': '恢复失败',
Stop: '停止',
Pause: '暂停',
'Recovery Suspend': '恢复运行',
Gantt: '甘特图',
Name: '名称',
'Node Type': '节点类型',
'Submit Time': '提交时间',
Duration: '运行时长',
'Retry Count': '重试次数',
'Task Name': '任务名称',
'Task Date': '任务日期',
'Source Table': '源表',
'Record Number': '记录数',
'Target Table': '目标表',
'Online viewing type is not supported': '不支持在线查看类型',
Size: '大小',
Rename: '重命名',
Download: '下载',
Export: '导出',
'Version Info': '版本信息',
Submit: '提交',
'Edit UDF Function': '编辑UDF函数',
type: '类型',
'UDF Function Name': 'UDF函数名称',
FILE: '文件',
UDF: 'UDF',
'File Subdirectory': '文件子目录',
'Please enter a function name': '请输入函数名',
'Package Name': '包名类名',
'Please enter a Package name': '请输入包名类名',
Parameter: '参数',
'Please enter a parameter': '请输入参数',
'UDF Resources': 'UDF资源',
'Upload Resources': '上传资源',
Instructions: '使用说明',
'Please enter a instructions': '请输入使用说明',
'Please enter a UDF function name': '请输入UDF函数名称',
'Select UDF Resources': '请选择UDF资源',
'Class Name': '类名',
'Jar Package': 'jar包',
'Library Name': '库名',
'UDF Resource Name': 'UDF资源名称',
'File Size': '文件大小',
Description: '描述',
'Drag Nodes and Selected Items': '拖动节点和选中项',
'Select Line Connection': '选择线条连接',
'Delete selected lines or nodes': '删除选中的线或节点',
'Full Screen': '全屏',
Unpublished: '未发布',
'Start Process': '启动工作流',
'Execute from the current node': '从当前节点开始执行',
'Recover tolerance fault process': '恢复被容错的工作流',
'Resume the suspension process': '恢复运行流程',
'Execute from the failed nodes': '从失败节点开始执行',
'Complement Data': '补数',
'Scheduling execution': '调度执行',
'Recovery waiting thread': '恢复等待线程',
'Submitted successfully': '提交成功',
Executing: '正在执行',
'Ready to pause': '准备暂停',
'Ready to stop': '准备停止',
'Need fault tolerance': '需要容错',
kill: 'kill',
'Waiting for thread': '等待线程',
'Waiting for dependence': '等待依赖',
Start: '运行',
Copy: '复制节点',
'Copy name': '复制名称',
Delete: '删除',
'Please enter keyword': '请输入关键词',
'File Upload': '文件上传',
'Drag the file into the current upload window': '请将文件拖拽到当前上传窗口内!',
'Drag area upload': '拖动区域上传',
Upload: '上传',
'ReUpload File': '重新上传文件',
'Please enter file name': '请输入文件名',
'Please select the file to upload': '请选择要上传的文件',
'Resources manage': '资源中心',
Security: '安全中心',
Logout: '退出',
'No data': '查询无数据',
'Uploading...': '文件上传中',
'Loading...': '正在努力加载中...',
List: '列表',
'Unable to download without proper url': '无下载url无法下载',
Process: '工作流',
'Process definition': '工作流定义',
'Task record': '任务记录',
'Warning group manage': '告警组管理',
'Servers manage': '服务管理',
'UDF manage': 'UDF管理',
'Resource manage': '资源管理',
'Function manage': '函数管理',
'Edit password': '修改密码',
'Ordinary users': '普通用户',
'Create process': '创建工作流',
'Import process': '导入工作流',
'Timing state': '定时状态',
Timing: '定时',
TreeView: '树形图',
'Mailbox already exists! Recipients and copyers cannot repeat': '邮箱已存在!收件人和抄送人不能重复',
'Mailbox input is illegal': '邮箱输入不合法',
'Please set the parameters before starting': '启动前请先设置参数',
Continue: '继续',
End: '结束',
'Node execution': '节点执行',
'Backward execution': '向后执行',
'Forward execution': '向前执行',
'Execute only the current node': '仅执行当前节点',
'Notification strategy': '通知策略',
'Notification group': '通知组',
'Please select a notification group': '请选择通知组',
Recipient: '收件人',
Cc: '抄送人',
'Whether it is a complement process?': '是否补数',
'Schedule date': '调度日期',
'Mode of execution': '执行方式',
'Serial execution': '串行执行',
'Parallel execution': '并行执行',
'Set parameters before timing': '定时前请先设置参数',
'Start and stop time': '起止时间',
'Please select time': '请选择时间',
'Please enter crontab': '请输入crontab',
none_1: '都不发',
success_1: '成功发',
failure_1: '失败发',
All_1: '成功或失败都发',
Toolbar: '工具栏',
'View variables': '查看变量',
'Format DAG': '格式化DAG',
'Refresh DAG status': '刷新DAG状态',
Return_1: '返回上一节点',
'Please enter format': '请输入格式为',
'connection parameter': '连接参数',
'Process definition details': '流程定义详情',
'Create process definition': '创建流程定义',
'Scheduled task list': '定时任务列表',
'Process instance details': '流程实例详情',
'Create Resource': '创建资源',
'User Center': '用户中心',
'Please enter method': '请输入方法',
none: '无',
name: '名称',
'Process priority': '流程优先级',
'Task priority': '任务优先级',
'Task timeout alarm': '任务超时告警',
'Timeout strategy': '超时策略',
'Timeout alarm': '超时告警',
'Timeout failure': '超时失败',
'Timeout period': '超时时长',
'Waiting Dependent complete': '等待依赖完成',
'Waiting Dependent start': '等待依赖启动',
'Check interval': '检查间隔',
'Timeout must be longer than check interval': '超时时间必须比检查间隔长',
'Timeout strategy must be selected': '超时策略必须选一个',
'Timeout must be a positive integer': '超时时长必须为正整数',
'Add dependency': '添加依赖',
and: '且',
or: '或',
month: '月',
week: '周',
day: '日',
hour: '时',
Running: '正在运行',
'Waiting for dependency to complete': '等待依赖完成',
Selected: '已选',
CurrentHour: '当前小时',
Last1Hour: '前1小时',
Last2Hours: '前2小时',
Last3Hours: '前3小时',
Last24Hours: '前24小时',
today: '今天',
Last1Days: '昨天',
Last2Days: '前两天',
Last3Days: '前三天',
Last7Days: '前七天',
ThisWeek: '本周',
LastWeek: '上周',
LastMonday: '上周一',
LastTuesday: '上周二',
LastWednesday: '上周三',
LastThursday: '上周四',
LastFriday: '上周五',
LastSaturday: '上周六',
LastSunday: '上周日',
ThisMonth: '本月',
LastMonth: '上月',
LastMonthBegin: '上月初',
LastMonthEnd: '上月末',
'Refresh status succeeded': '刷新状态成功',
'Queue manage': 'Yarn 队列管理',
'Create queue': '创建队列',
'Edit queue': '编辑队列',
'Datasource manage': '数据源中心',
'History task record': '历史任务记录',
'Please go online': '不要忘记上线',
'Queue value': '队列值',
'Please enter queue value': '请输入队列值',
'Worker group manage': 'Worker分组管理',
'Create worker group': '创建Worker分组',
'Edit worker group': '编辑Worker分组',
'Token manage': '令牌管理',
'Create token': '创建令牌',
'Edit token': '编辑令牌',
'Please enter the IP address separated by commas': '请输入IP地址多个用英文逗号隔开',
'Note: Multiple IP addresses have been comma separated': '注意:多个IP地址以英文逗号分割',
'Failure time': '失效时间',
'Expiration time': '失效时间',
User: '用户',
'Please enter token': '请输入令牌',
'Generate token': '生成令牌',
Monitor: '监控中心',
Group: '分组',
'Queue statistics': '队列统计',
'Command status statistics': '命令状态统计',
'Task kill': '等待kill任务',
'Task queue': '等待执行任务',
'Error command count': '错误指令数',
'Normal command count': '正确指令数',
Manage: '管理',
'Number of connections': '连接数',
Sent: '发送量',
Received: '接收量',
'Min latency': '最低延时',
'Avg latency': '平均延时',
'Max latency': '最大延时',
'Node count': '节点数',
'Query time': '当前查询时间',
'Node self-test status': '节点自检状态',
'Health status': '健康状态',
'Max connections': '最大连接数',
'Threads connections': '当前连接数',
'Max used connections': '同时使用连接最大数',
'Threads running connections': '数据库当前活跃连接数',
'Worker group': 'Worker分组',
'Please enter a positive integer greater than 0': '请输入大于 0 的正整数',
'Pre Statement': '前置sql',
'Post Statement': '后置sql',
'Statement cannot be empty': '语句不能为空',
'Process Define Count': '工作流定义数',
'Process Instance Running Count': '正在运行的流程数',
'command number of waiting for running': '待执行的命令数',
'failure command number': '执行失败的命令数',
'tasks number of waiting running': '待运行任务数',
'task number of ready to kill': '待杀死任务数',
'Statistics manage': '统计管理',
statistics: '统计',
'select tenant': '选择租户',
'Please enter Principal': '请输入Principal',
'The start time must not be the same as the end': '开始时间和结束时间不能相同',
'Startup parameter': '启动参数',
'Startup type': '启动类型',
'warning of timeout': '超时告警',
'Next five execution times': '接下来五次执行时间',
'Execute time': '执行时间',
'Complement range': '补数范围',
'Flink Version': 'Flink版本',
'Slot Number': 'Slot数量',
'TaskManager Number': 'TaskManager数量',
'JobManager Memory': 'JobManager内存数',
'TaskManager Memory': 'TaskManager内存数',
'App Name': '任务名称',
'Http Url': '请求地址',
'Http Method': '请求类型',
'Http Parameters': '请求参数',
'Http Parameters Key': '参数名',
'Http Parameters Position': '参数位置',
'Http Parameters Value': '参数值',
'Http Check Condition': '校验条件',
'Http Condition': '校验内容',
'Please Enter Http Url': '请填写请求地址(必填)',
'Please Enter Http Condition': '请填写校验内容',
'There is no data for this period of time': '该时间段无数据',
'IP address cannot be empty': 'IP地址不能为空',
'Please enter the correct IP': '请输入正确的IP',
'Please generate token': '请生成Token',
'Spark Version': 'Spark版本',
TargetDataBase: '目标库',
TargetTable: '目标表',
'Please enter the table of target': '请输入目标表名',
'Please enter a Target Table(required)': '请输入目标表(必填)',
SpeedByte: '限流(字节数)',
SpeedRecord: '限流(记录数)',
'0 means unlimited by byte': 'KB,0代表不限制',
'0 means unlimited by count': '0代表不限制',
'Modify User': '修改用户',
'Whether directory': '是否文件夹',
Yes: '是',
No: '否',
'Hadoop Custom Params': 'Hadoop参数',
'Sqoop Advanced Parameters': 'Sqoop参数',
'Sqoop Job Name': '任务名称',
'Please enter Mysql Database(required)': '请输入Mysql数据库(必填)',
'Please enter Mysql Table(required)': '请输入Mysql表名(必填)',
'Please enter Columns (Comma separated)': '请输入列名,用 , 隔开',
'Please enter Target Dir(required)': '请输入目标路径(必填)',
'Please enter Export Dir(required)': '请输入数据源路径(必填)',
'Please enter Hive Database(required)': '请输入Hive数据库(必填)',
'Please enter Hive Table(required)': '请输入Hive表名(必填)',
'Please enter Hive Partition Keys': '请输入分区键',
'Please enter Hive Partition Values': '请输入分区值',
'Please enter Replace Delimiter': '请输入替换分隔符',
'Please enter Fields Terminated': '请输入列分隔符',
'Please enter Lines Terminated': '请输入行分隔符',
'Please enter Concurrency': '请输入并发度',
'Please enter Update Key': '请输入更新列',
'Please enter Job Name(required)': '请输入任务名称(必填)',
'Please enter Custom Shell(required)': '请输入自定义脚本',
Direct: '流向',
Type: '类型',
ModelType: '模式',
ColumnType: '列类型',
Database: '数据库',
Column: '列',
'Map Column Hive': 'Hive类型映射',
'Map Column Java': 'Java类型映射',
'Export Dir': '数据源路径',
'Hive partition Keys': 'Hive 分区键',
'Hive partition Values': 'Hive 分区值',
FieldsTerminated: '列分隔符',
LinesTerminated: '行分隔符',
IsUpdate: '是否更新',
UpdateKey: '更新列',
UpdateMode: '更新类型',
'Target Dir': '目标路径',
DeleteTargetDir: '是否删除目录',
FileType: '保存格式',
CompressionCodec: '压缩类型',
CreateHiveTable: '是否创建新表',
DropDelimiter: '是否删除分隔符',
OverWriteSrc: '是否覆盖数据源',
ReplaceDelimiter: '替换分隔符',
Concurrency: '并发度',
Form: '表单',
OnlyUpdate: '只更新',
AllowInsert: '无更新便插入',
'Data Source': '数据来源',
'Data Target': '数据目的',
'All Columns': '全表导入',
'Some Columns': '选择列',
'Branch flow': '分支流转',
'Custom Job': '自定义任务',
'Custom Script': '自定义脚本',
'Cannot select the same node for successful branch flow and failed branch flow': '成功分支流转和失败分支流转不能选择同一个节点',
'Successful branch flow and failed branch flow are required': 'conditions节点成功和失败分支流转必填',
'No resources exist': '不存在资源',
'Please delete all non-existing resources': '请删除所有不存在资源',
'Unauthorized or deleted resources': '未授权或已删除资源',
'Please delete all non-existent resources': '请删除所有未授权或已删除资源',
Kinship: '工作流关系',
Reset: '重置',
KinshipStateActive: '当前选择',
KinshipState1: '已上线',
KinshipState0: '工作流未上线',
KinshipState10: '调度未上线',
'Dag label display control': 'Dag节点名称显隐',
Enable: '启用',
Disable: '停用',
'The Worker group no longer exists, please select the correct Worker group!': '该Worker分组已经不存在,请选择正确的Worker分组!',
'Please confirm whether the workflow has been saved before downloading': '下载前请确定工作流是否已保存',
'User name length is between 3 and 39': '用户名长度在3~39之间',
'Timeout Settings': '超时设置',
'Connect Timeout': '连接超时',
'Socket Timeout': 'Socket超时',
'Connect timeout be a positive integer': '连接超时必须为数字',
'Socket Timeout be a positive integer': 'Socket超时必须为数字',
ms: '毫秒',
'Please Enter Url': '请直接填写地址,例如:127.0.0.1:7077',
Master: 'Master',
'Please select the waterdrop resources': '请选择waterdrop配置文件',
zkDirectory: 'zk注册目录',
'Directory detail': '查看目录详情',
'Connection name': '连线名',
'Current connection settings': '当前连线设置',
'Please save the DAG before formatting': '格式化前请先保存DAG',
'Batch copy': '批量复制',
'Related items': '关联项目',
'Project name is required': '项目名称必填',
'Batch move': '批量移动',
Version: '版本',
'Pre tasks': '前置任务',
'Running Memory': '运行内存',
'Max Memory': '最大内存',
'Min Memory': '最小内存',
'The workflow canvas is abnormal and cannot be saved, please recreate': '该工作流画布异常,无法保存,请重新创建',
Info: '提示'
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,496 | [BUG][server] Add to! {} is used to mark the custom parameters to be output as-is in sql | The hdfs path mapped when hive adds partitions is passed in by custom parameters:
Sample statement: alter table test add if not exists partition(dt='${time}') location "/.../${time}/";
Custom parameters:time in varchar $[yyyy-MM-dd]
Problem Description:
Quotation marks are added to the custom parameters in the Hive load partition SQL, which causes the partition path mapping error:
before modification: location "/.../${time}/" Is parsed into location "/.../'2020-01-19'/"
After modification : location "/.../${time}/" Is parsed into location "/.../2020-01-19/"
After the modification, there are no quotation marks in the hdfs path, which meets the requirements of the partition of the hive table
--------------------------------------------------------------------------------------------------------------
hive添加分区时所映射的hdfs路径由自定义参数传入
样例语句:alter table test add if not exists partition(dt='${time}') location "/.../${time}/";
自定义参数:time in varchar $[yyyy-MM-dd]
问题描述:
Hive加载分区SQL里的自定义参数上加了引号,导致分区路径映射错误:
修改前 location "/.../${time}/ 被解析成 location "/.../'2020-01-19'/
修改后 location "/.../${time}/ 被解析成 location "/.../2020-01-19/
修改后hdfs路径中不再有引号,符合hive表的分区的需求 | https://github.com/apache/dolphinscheduler/issues/4496 | https://github.com/apache/dolphinscheduler/pull/4497 | 0ab246da5fc30cc21734d7fa511be4f9080c7a9c | 43586da376cf60f3d719bee5b35c312568408cc7 | "2021-01-19T13:35:25Z" | java | "2021-01-21T12:43:59Z" | dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.task.sql;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.lang.StringUtils;
import org.apache.dolphinscheduler.alert.utils.MailUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.ShowType;
import org.apache.dolphinscheduler.common.enums.TaskTimeoutStrategy;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.task.sql.SqlBinds;
import org.apache.dolphinscheduler.common.task.sql.SqlParameters;
import org.apache.dolphinscheduler.common.task.sql.SqlType;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.AlertDao;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.server.entity.SQLTaskExecutionContext;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.utils.ParamUtils;
import org.apache.dolphinscheduler.server.utils.UDFUtils;
import org.apache.dolphinscheduler.server.worker.task.AbstractTask;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.slf4j.Logger;
import java.sql.*;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import static org.apache.dolphinscheduler.common.Constants.*;
import static org.apache.dolphinscheduler.common.enums.DbType.HIVE;
/**
* sql task
*/
public class SqlTask extends AbstractTask {
/**
* sql parameters
*/
private SqlParameters sqlParameters;
/**
* alert dao
*/
private AlertDao alertDao;
/**
* base datasource
*/
private BaseDataSource baseDataSource;
/**
* taskExecutionContext
*/
private TaskExecutionContext taskExecutionContext;
/**
* default query sql limit
*/
private static final int LIMIT = 10000;
public SqlTask(TaskExecutionContext taskExecutionContext, Logger logger) {
super(taskExecutionContext, logger);
this.taskExecutionContext = taskExecutionContext;
logger.info("sql task params {}", taskExecutionContext.getTaskParams());
this.sqlParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), SqlParameters.class);
if (!sqlParameters.checkParameters()) {
throw new RuntimeException("sql task params is not valid");
}
this.alertDao = SpringApplicationContext.getBean(AlertDao.class);
}
@Override
public void handle() throws Exception {
// set the name of the current thread
String threadLoggerInfoName = String.format(Constants.TASK_LOG_INFO_FORMAT, taskExecutionContext.getTaskAppId());
Thread.currentThread().setName(threadLoggerInfoName);
logger.info("Full sql parameters: {}", sqlParameters);
logger.info("sql type : {}, datasource : {}, sql : {} , localParams : {},udfs : {},showType : {},connParams : {}",
sqlParameters.getType(),
sqlParameters.getDatasource(),
sqlParameters.getSql(),
sqlParameters.getLocalParams(),
sqlParameters.getUdfs(),
sqlParameters.getShowType(),
sqlParameters.getConnParams());
try {
SQLTaskExecutionContext sqlTaskExecutionContext = taskExecutionContext.getSqlTaskExecutionContext();
// load class
DataSourceFactory.loadClass(DbType.valueOf(sqlParameters.getType()));
// get datasource
baseDataSource = DataSourceFactory.getDatasource(DbType.valueOf(sqlParameters.getType()),
sqlTaskExecutionContext.getConnectionParams());
// ready to execute SQL and parameter entity Map
SqlBinds mainSqlBinds = getSqlAndSqlParamsMap(sqlParameters.getSql());
List<SqlBinds> preStatementSqlBinds = Optional.ofNullable(sqlParameters.getPreStatements())
.orElse(new ArrayList<>())
.stream()
.map(this::getSqlAndSqlParamsMap)
.collect(Collectors.toList());
List<SqlBinds> postStatementSqlBinds = Optional.ofNullable(sqlParameters.getPostStatements())
.orElse(new ArrayList<>())
.stream()
.map(this::getSqlAndSqlParamsMap)
.collect(Collectors.toList());
List<String> createFuncs = UDFUtils.createFuncs(sqlTaskExecutionContext.getUdfFuncTenantCodeMap(),
logger);
// execute sql task
executeFuncAndSql(mainSqlBinds, preStatementSqlBinds, postStatementSqlBinds, createFuncs);
setExitStatusCode(Constants.EXIT_CODE_SUCCESS);
} catch (Exception e) {
setExitStatusCode(Constants.EXIT_CODE_FAILURE);
logger.error("sql task error", e);
throw e;
}
}
/**
* ready to execute SQL and parameter entity Map
* @return SqlBinds
*/
private SqlBinds getSqlAndSqlParamsMap(String sql) {
Map<Integer,Property> sqlParamsMap = new HashMap<>();
StringBuilder sqlBuilder = new StringBuilder();
// find process instance by task id
Map<String, Property> paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()),
taskExecutionContext.getDefinedParams(),
sqlParameters.getLocalParametersMap(),
CommandType.of(taskExecutionContext.getCmdTypeIfComplement()),
taskExecutionContext.getScheduleTime());
// spell SQL according to the final user-defined variable
if(paramsMap == null){
sqlBuilder.append(sql);
return new SqlBinds(sqlBuilder.toString(), sqlParamsMap);
}
if (StringUtils.isNotEmpty(sqlParameters.getTitle())){
String title = ParameterUtils.convertParameterPlaceholders(sqlParameters.getTitle(),
ParamUtils.convert(paramsMap));
logger.info("SQL title : {}",title);
sqlParameters.setTitle(title);
}
//new
//replace variable TIME with $[YYYYmmddd...] in sql when history run job and batch complement job
sql = ParameterUtils.replaceScheduleTime(sql, taskExecutionContext.getScheduleTime());
// special characters need to be escaped, ${} needs to be escaped
String rgex = "['\"]*\\$\\{(.*?)\\}['\"]*";
setSqlParamsMap(sql, rgex, sqlParamsMap, paramsMap);
// replace the ${} of the SQL statement with the Placeholder
String formatSql = sql.replaceAll(rgex, "?");
sqlBuilder.append(formatSql);
// print repalce sql
printReplacedSql(sql, formatSql, rgex, sqlParamsMap);
return new SqlBinds(sqlBuilder.toString(), sqlParamsMap);
}
@Override
public AbstractParameters getParameters() {
return this.sqlParameters;
}
/**
* execute function and sql
* @param mainSqlBinds main sql binds
* @param preStatementsBinds pre statements binds
* @param postStatementsBinds post statements binds
* @param createFuncs create functions
*/
public void executeFuncAndSql(SqlBinds mainSqlBinds,
List<SqlBinds> preStatementsBinds,
List<SqlBinds> postStatementsBinds,
List<String> createFuncs){
Connection connection = null;
PreparedStatement stmt = null;
ResultSet resultSet = null;
try {
// if upload resource is HDFS and kerberos startup
CommonUtils.loadKerberosConf();
// create connection
connection = createConnection();
// create temp function
if (CollectionUtils.isNotEmpty(createFuncs)) {
createTempFunction(connection,createFuncs);
}
// pre sql
preSql(connection,preStatementsBinds);
stmt = prepareStatementAndBind(connection, mainSqlBinds);
// decide whether to executeQuery or executeUpdate based on sqlType
if (sqlParameters.getSqlType() == SqlType.QUERY.ordinal()) {
// query statements need to be convert to JsonArray and inserted into Alert to send
resultSet = stmt.executeQuery();
resultProcess(resultSet);
} else if (sqlParameters.getSqlType() == SqlType.NON_QUERY.ordinal()) {
// non query statement
stmt.executeUpdate();
}
postSql(connection,postStatementsBinds);
} catch (Exception e) {
logger.error("execute sql error",e);
throw new RuntimeException("execute sql error");
} finally {
close(resultSet,stmt,connection);
}
}
/**
* result process
*
* @param resultSet resultSet
* @throws Exception Exception
*/
private void resultProcess(ResultSet resultSet) throws Exception{
ArrayNode resultJSONArray = JSONUtils.createArrayNode();
ResultSetMetaData md = resultSet.getMetaData();
int num = md.getColumnCount();
int rowCount = 0;
while (rowCount < LIMIT && resultSet.next()) {
ObjectNode mapOfColValues = JSONUtils.createObjectNode();
for (int i = 1; i <= num; i++) {
mapOfColValues.set(md.getColumnLabel(i), JSONUtils.toJsonNode(resultSet.getObject(i)));
}
resultJSONArray.add(mapOfColValues);
rowCount++;
}
String result = JSONUtils.toJsonString(resultJSONArray);
logger.debug("execute sql : {}", result);
sendAttachment(StringUtils.isNotEmpty(sqlParameters.getTitle()) ?
sqlParameters.getTitle(): taskExecutionContext.getTaskName() + " query result sets",
JSONUtils.toJsonString(resultJSONArray));
}
/**
* pre sql
*
* @param connection connection
* @param preStatementsBinds preStatementsBinds
*/
private void preSql(Connection connection,
List<SqlBinds> preStatementsBinds) throws Exception{
for (SqlBinds sqlBind: preStatementsBinds) {
try (PreparedStatement pstmt = prepareStatementAndBind(connection, sqlBind)){
int result = pstmt.executeUpdate();
logger.info("pre statement execute result: {}, for sql: {}",result,sqlBind.getSql());
}
}
}
/**
* post sql
*
* @param connection connection
* @param postStatementsBinds postStatementsBinds
* @throws Exception
*/
private void postSql(Connection connection,
List<SqlBinds> postStatementsBinds) throws Exception{
for (SqlBinds sqlBind: postStatementsBinds) {
try (PreparedStatement pstmt = prepareStatementAndBind(connection, sqlBind)){
int result = pstmt.executeUpdate();
logger.info("post statement execute result: {},for sql: {}",result,sqlBind.getSql());
}
}
}
/**
* create temp function
*
* @param connection connection
* @param createFuncs createFuncs
* @throws Exception
*/
private void createTempFunction(Connection connection,
List<String> createFuncs) throws Exception{
try (Statement funcStmt = connection.createStatement()) {
for (String createFunc : createFuncs) {
logger.info("hive create function sql: {}", createFunc);
funcStmt.execute(createFunc);
}
}
}
/**
* create connection
*
* @return connection
* @throws Exception Exception
*/
private Connection createConnection() throws Exception{
// if hive , load connection params if exists
Connection connection = null;
if (HIVE == DbType.valueOf(sqlParameters.getType())) {
Properties paramProp = new Properties();
paramProp.setProperty(USER, baseDataSource.getUser());
paramProp.setProperty(PASSWORD, baseDataSource.getPassword());
Map<String, String> connParamMap = CollectionUtils.stringToMap(sqlParameters.getConnParams(),
SEMICOLON,
HIVE_CONF);
paramProp.putAll(connParamMap);
connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(),
paramProp);
}else{
connection = DriverManager.getConnection(baseDataSource.getJdbcUrl(),
baseDataSource.getUser(),
baseDataSource.getPassword());
}
return connection;
}
/**
* close jdbc resource
*
* @param resultSet resultSet
* @param pstmt pstmt
* @param connection connection
*/
private void close(ResultSet resultSet,
PreparedStatement pstmt,
Connection connection){
if (resultSet != null){
try {
resultSet.close();
} catch (SQLException e) {
logger.error("close result set error : {}",e.getMessage(),e);
}
}
if (pstmt != null){
try {
pstmt.close();
} catch (SQLException e) {
logger.error("close prepared statement error : {}",e.getMessage(),e);
}
}
if (connection != null){
try {
connection.close();
} catch (SQLException e) {
logger.error("close connection error : {}",e.getMessage(),e);
}
}
}
/**
* preparedStatement bind
* @param connection connection
* @param sqlBinds sqlBinds
* @return PreparedStatement
* @throws Exception Exception
*/
private PreparedStatement prepareStatementAndBind(Connection connection, SqlBinds sqlBinds) throws Exception {
// is the timeout set
boolean timeoutFlag = TaskTimeoutStrategy.of(taskExecutionContext.getTaskTimeoutStrategy()) == TaskTimeoutStrategy.FAILED ||
TaskTimeoutStrategy.of(taskExecutionContext.getTaskTimeoutStrategy()) == TaskTimeoutStrategy.WARNFAILED;
PreparedStatement stmt = connection.prepareStatement(sqlBinds.getSql());
if(timeoutFlag){
stmt.setQueryTimeout(taskExecutionContext.getTaskTimeout());
}
Map<Integer, Property> params = sqlBinds.getParamsMap();
if(params != null) {
for (Map.Entry<Integer, Property> entry : params.entrySet()) {
Property prop = entry.getValue();
ParameterUtils.setInParameter(entry.getKey(), stmt, prop.getType(), prop.getValue());
}
}
logger.info("prepare statement replace sql : {} ", stmt);
return stmt;
}
/**
* send mail as an attachment
* @param title title
* @param content content
*/
public void sendAttachment(String title,String content){
List<User> users = alertDao.queryUserByAlertGroupId(taskExecutionContext.getSqlTaskExecutionContext().getWarningGroupId());
// receiving group list
List<String> receiversList = new ArrayList<>();
for(User user:users){
receiversList.add(user.getEmail().trim());
}
// custom receiver
String receivers = sqlParameters.getReceivers();
if (StringUtils.isNotEmpty(receivers)){
String[] splits = receivers.split(COMMA);
for (String receiver : splits){
receiversList.add(receiver.trim());
}
}
// copy list
List<String> receiversCcList = new ArrayList<>();
// Custom Copier
String receiversCc = sqlParameters.getReceiversCc();
if (StringUtils.isNotEmpty(receiversCc)){
String[] splits = receiversCc.split(COMMA);
for (String receiverCc : splits){
receiversCcList.add(receiverCc.trim());
}
}
String showTypeName = sqlParameters.getShowType().replace(COMMA,"").trim();
if(EnumUtils.isValidEnum(ShowType.class,showTypeName)){
Map<String, Object> mailResult = MailUtils.sendMails(receiversList,
receiversCcList, title, content, ShowType.valueOf(showTypeName).getDescp());
if(!(boolean) mailResult.get(STATUS)){
throw new RuntimeException("send mail failed!");
}
}else{
logger.error("showType: {} is not valid " ,showTypeName);
throw new RuntimeException(String.format("showType: %s is not valid ",showTypeName));
}
}
/**
* regular expressions match the contents between two specified strings
* @param content content
* @param rgex rgex
* @param sqlParamsMap sql params map
* @param paramsPropsMap params props map
*/
public void setSqlParamsMap(String content, String rgex, Map<Integer,Property> sqlParamsMap, Map<String,Property> paramsPropsMap){
Pattern pattern = Pattern.compile(rgex);
Matcher m = pattern.matcher(content);
int index = 1;
while (m.find()) {
String paramName = m.group(1);
Property prop = paramsPropsMap.get(paramName);
sqlParamsMap.put(index,prop);
index ++;
}
}
/**
* print replace sql
* @param content content
* @param formatSql format sql
* @param rgex rgex
* @param sqlParamsMap sql params map
*/
public void printReplacedSql(String content, String formatSql,String rgex, Map<Integer,Property> sqlParamsMap){
//parameter print style
logger.info("after replace sql , preparing : {}" , formatSql);
StringBuilder logPrint = new StringBuilder("replaced sql , parameters:");
for(int i=1;i<=sqlParamsMap.size();i++){
logPrint.append(sqlParamsMap.get(i).getValue()+"("+sqlParamsMap.get(i).getType()+")");
}
logger.info("Sql Params are {}", logPrint);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,493 | [Improvement][api] The Process Definition interface parameter releaseState is changed to an enumeration type | **Describe the question**
The Process Definition interface parameter releaseState is changed to an enumeration type
**What are the current deficiencies and the benefits of improvement**
- The current Process Definition interface parameter releaseState variable uses an int type, and the releaseState mapping relationship needs to be recorded in both the front-end and back-end code
**Which version of DolphinScheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4493 | https://github.com/apache/dolphinscheduler/pull/4494 | 046940b1a008c63d6cad44b3f7aff954f6f9af83 | cfa88a4a804b3bc565f6571d5a6e842df18644a2 | "2021-01-19T10:21:18Z" | java | "2021-01-22T14:39:58Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.BATCH_COPY_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.BATCH_MOVE_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_PROCESS_DEFINITION;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_PROCESS_DEFINITION_VERSION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_LIST;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_PROCESS_DEFINITION_VERSIONS_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.RELEASE_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.User;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import com.fasterxml.jackson.core.JsonProcessingException;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import springfox.documentation.annotations.ApiIgnore;
/**
* process definition controller
*/
@Api(tags = "PROCESS_DEFINITION_TAG", position = 2)
@RestController
@RequestMapping("projects/{projectName}/process")
public class ProcessDefinitionController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionController.class);
@Autowired
private ProcessDefinitionService processDefinitionService;
@Autowired
private ProcessDefinitionVersionService processDefinitionVersionService;
/**
* create process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param json process definition json
* @param description description
* @param locations locations for nodes
* @param connects connects for nodes
* @return create result code
*/
@ApiOperation(value = "save", notes = "CREATE_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"),
@ApiImplicitParam(name = "processDefinitionJson", value = "PROCESS_DEFINITION_JSON", required = true, type = "String"),
@ApiImplicitParam(name = "locations", value = "PROCESS_DEFINITION_LOCATIONS", required = true, type = "String"),
@ApiImplicitParam(name = "connects", value = "PROCESS_DEFINITION_CONNECTS", required = true, type = "String"),
@ApiImplicitParam(name = "description", value = "PROCESS_DEFINITION_DESC", required = false, type = "String"),
})
@PostMapping(value = "/save")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_PROCESS_DEFINITION)
public Result createProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "name", required = true) String name,
@RequestParam(value = "processDefinitionJson", required = true) String json,
@RequestParam(value = "locations", required = true) String locations,
@RequestParam(value = "connects", required = true) String connects,
@RequestParam(value = "description", required = false) String description) throws JsonProcessingException {
logger.info("login user {}, create process definition, project name: {}, process definition name: {}, "
+ "process_definition_json: {}, desc: {} locations:{}, connects:{}",
loginUser.getUserName(), projectName, name, json, description, locations, connects);
Map<String, Object> result = processDefinitionService.createProcessDefinition(loginUser, projectName, name, json,
description, locations, connects);
return returnDataList(result);
}
/**
* copy process definition
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIds process definition ids
* @param targetProjectId target project id
* @return copy result code
*/
@ApiOperation(value = "copyProcessDefinition", notes = "COPY_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"),
@ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, dataType = "Int", example = "10")
})
@PostMapping(value = "/copy")
@ResponseStatus(HttpStatus.OK)
@ApiException(BATCH_COPY_PROCESS_DEFINITION_ERROR)
public Result copyProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds,
@RequestParam(value = "targetProjectId", required = true) int targetProjectId) {
logger.info("batch copy process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()),
StringUtils.replaceNRTtoUnderline(projectName),
StringUtils.replaceNRTtoUnderline(processDefinitionIds),
StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId)));
return returnDataList(
processDefinitionService.batchCopyProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId));
}
/**
* move process definition
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIds process definition ids
* @param targetProjectId target project id
* @return move result code
*/
@ApiOperation(value = "moveProcessDefinition", notes = "MOVE_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", required = true, dataType = "String", example = "3,4"),
@ApiImplicitParam(name = "targetProjectId", value = "TARGET_PROJECT_ID", required = true, dataType = "Int", example = "10")
})
@PostMapping(value = "/move")
@ResponseStatus(HttpStatus.OK)
@ApiException(BATCH_MOVE_PROCESS_DEFINITION_ERROR)
public Result moveProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionIds", required = true) String processDefinitionIds,
@RequestParam(value = "targetProjectId", required = true) int targetProjectId) {
logger.info("batch move process definition, login user:{}, project name:{}, process definition ids:{},target project id:{}",
StringUtils.replaceNRTtoUnderline(loginUser.getUserName()),
StringUtils.replaceNRTtoUnderline(projectName),
StringUtils.replaceNRTtoUnderline(processDefinitionIds),
StringUtils.replaceNRTtoUnderline(String.valueOf(targetProjectId)));
return returnDataList(
processDefinitionService.batchMoveProcessDefinition(loginUser, projectName, processDefinitionIds, targetProjectId));
}
/**
* verify process definition name unique
*
* @param loginUser login user
* @param projectName project name
* @param name name
* @return true if process definition name not exists, otherwise false
*/
@ApiOperation(value = "verify-name", notes = "VERIFY_PROCESS_DEFINITION_NAME_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String")
})
@GetMapping(value = "/verify-name")
@ResponseStatus(HttpStatus.OK)
@ApiException(VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR)
public Result verifyProcessDefinitionName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "name", required = true) String name) {
logger.info("verify process definition name unique, user:{}, project name:{}, process definition name:{}",
loginUser.getUserName(), projectName, name);
Map<String, Object> result = processDefinitionService.verifyProcessDefinitionName(loginUser, projectName, name);
return returnDataList(result);
}
/**
* update process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param id process definition id
* @param processDefinitionJson process definition json
* @param description description
* @param locations locations for nodes
* @param connects connects for nodes
* @return update result code
*/
@ApiOperation(value = "updateProcessDefinition", notes = "UPDATE_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"),
@ApiImplicitParam(name = "id", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "processDefinitionJson", value = "PROCESS_DEFINITION_JSON", required = true, type = "String"),
@ApiImplicitParam(name = "locations", value = "PROCESS_DEFINITION_LOCATIONS", required = true, type = "String"),
@ApiImplicitParam(name = "connects", value = "PROCESS_DEFINITION_CONNECTS", required = true, type = "String"),
@ApiImplicitParam(name = "description", value = "PROCESS_DEFINITION_DESC", required = false, type = "String"),
@ApiImplicitParam(name = "releaseState", value = "RELEASE_PROCESS_DEFINITION_NOTES", required = false, dataType = "Int", example = "0")
})
@PostMapping(value = "/update")
@ResponseStatus(HttpStatus.OK)
@ApiException(UPDATE_PROCESS_DEFINITION_ERROR)
public Result updateProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "name", required = true) String name,
@RequestParam(value = "id", required = true) int id,
@RequestParam(value = "processDefinitionJson", required = true) String processDefinitionJson,
@RequestParam(value = "locations", required = false) String locations,
@RequestParam(value = "connects", required = false) String connects,
@RequestParam(value = "description", required = false) String description,
@RequestParam(value = "releaseState", required = false, defaultValue = "0") int releaseState) {
logger.info("login user {}, update process define, project name: {}, process define name: {}, "
+ "process_definition_json: {}, desc: {}, locations:{}, connects:{}",
loginUser.getUserName(), projectName, name, processDefinitionJson, description, locations, connects);
Map<String, Object> result = processDefinitionService.updateProcessDefinition(loginUser, projectName, id, name,
processDefinitionJson, description, locations, connects);
// If the update fails, the result will be returned directly
Status status = (Status) result.get("status");
if (status.getCode() != 0) {
return returnDataList(result);
}
// Judge whether to go online after editing,0 means offline, 1 means online
if (releaseState == 1) {
result = processDefinitionService.releaseProcessDefinition(loginUser, projectName, id, releaseState);
}
return returnDataList(result);
}
/**
* query process definition version paging list info
*
* @param loginUser login user info
* @param projectName the process definition project name
* @param pageNo the process definition version list current page number
* @param pageSize the process definition version list page size
* @param processDefinitionId the process definition id
* @return the process definition version list
*/
@ApiOperation(value = "queryProcessDefinitionVersions", notes = "QUERY_PROCESS_DEFINITION_VERSIONS_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/versions")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PROCESS_DEFINITION_VERSIONS_ERROR)
public Result queryProcessDefinitionVersions(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "pageNo") int pageNo,
@RequestParam(value = "pageSize") int pageSize,
@RequestParam(value = "processDefinitionId") int processDefinitionId) {
Map<String, Object> result = processDefinitionVersionService.queryProcessDefinitionVersions(loginUser
, projectName, pageNo, pageSize, processDefinitionId);
return returnDataList(result);
}
/**
* switch certain process definition version
*
* @param loginUser login user info
* @param projectName the process definition project name
* @param processDefinitionId the process definition id
* @param version the version user want to switch
* @return switch version result code
*/
@ApiOperation(value = "switchProcessDefinitionVersion", notes = "SWITCH_PROCESS_DEFINITION_VERSION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "version", value = "VERSION", required = true, dataType = "Long", example = "100")
})
@GetMapping(value = "/version/switch")
@ResponseStatus(HttpStatus.OK)
@ApiException(SWITCH_PROCESS_DEFINITION_VERSION_ERROR)
public Result switchProcessDefinitionVersion(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionId") int processDefinitionId,
@RequestParam(value = "version") long version) {
Map<String, Object> result = processDefinitionService.switchProcessDefinitionVersion(loginUser, projectName
, processDefinitionId, version);
return returnDataList(result);
}
/**
* delete the certain process definition version by version and process definition id
*
* @param loginUser login user info
* @param projectName the process definition project name
* @param processDefinitionId process definition id
* @param version the process definition version user want to delete
* @return delete version result code
*/
@ApiOperation(value = "deleteProcessDefinitionVersion", notes = "DELETE_PROCESS_DEFINITION_VERSION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "version", value = "VERSION", required = true, dataType = "Long", example = "100")
})
@GetMapping(value = "/version/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_PROCESS_DEFINITION_VERSION_ERROR)
public Result deleteProcessDefinitionVersion(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionId") int processDefinitionId,
@RequestParam(value = "version") long version) {
Map<String, Object> result = processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion(loginUser, projectName, processDefinitionId, version);
return returnDataList(result);
}
/**
* release process definition
*
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @param releaseState release state
* @return release result code
*/
@ApiOperation(value = "releaseProcessDefinition", notes = "RELEASE_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "PROCESS_DEFINITION_NAME", required = true, type = "String"),
@ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "releaseState", value = "PROCESS_DEFINITION_CONNECTS", required = true, dataType = "Int", example = "100"),
})
@PostMapping(value = "/release")
@ResponseStatus(HttpStatus.OK)
@ApiException(RELEASE_PROCESS_DEFINITION_ERROR)
public Result releaseProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processId", required = true) int processId,
@RequestParam(value = "releaseState", required = true) int releaseState) {
logger.info("login user {}, release process definition, project name: {}, release state: {}",
loginUser.getUserName(), projectName, releaseState);
Map<String, Object> result = processDefinitionService.releaseProcessDefinition(loginUser, projectName, processId, releaseState);
return returnDataList(result);
}
/**
* query datail of process definition by id
*
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @return process definition detail
*/
@ApiOperation(value = "queryProcessDefinitionById", notes = "QUERY_PROCESS_DEFINITION_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/select-by-id")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR)
public Result queryProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processId") Integer processId
) {
logger.info("query detail of process definition, login user:{}, project name:{}, process definition id:{}",
loginUser.getUserName(), projectName, processId);
Map<String, Object> result = processDefinitionService.queryProcessDefinitionById(loginUser, projectName, processId);
return returnDataList(result);
}
/**
* query datail of process definition by name
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionName process definition name
* @return process definition detail
*/
@ApiOperation(value = "queryProcessDefinitionByName", notes = "QUERY_PROCESS_DEFINITION_BY_NAME_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionName", value = "PROCESS_DEFINITION_ID", required = true, dataType = "String")
})
@GetMapping(value = "/select-by-name")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_DATAIL_OF_PROCESS_DEFINITION_ERROR)
public Result<ProcessDefinition> queryProcessDefinitionByName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processDefinitionName") String processDefinitionName
) {
Map<String, Object> result = processDefinitionService.queryProcessDefinitionByName(loginUser, projectName, processDefinitionName);
return returnDataList(result);
}
/**
* query Process definition list
*
* @param loginUser login user
* @param projectName project name
* @return process definition list
*/
@ApiOperation(value = "queryProcessDefinitionList", notes = "QUERY_PROCESS_DEFINITION_LIST_NOTES")
@GetMapping(value = "/list")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PROCESS_DEFINITION_LIST)
public Result queryProcessDefinitionList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName
) {
logger.info("query process definition list, login user:{}, project name:{}",
loginUser.getUserName(), projectName);
Map<String, Object> result = processDefinitionService.queryProcessDefinitionList(loginUser, projectName);
return returnDataList(result);
}
/**
* query process definition list paging
*
* @param loginUser login user
* @param projectName project name
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param userId user id
* @return process definition page
*/
@ApiOperation(value = "queryProcessDefinitionListPaging", notes = "QUERY_PROCESS_DEFINITION_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", required = false, type = "String"),
@ApiImplicitParam(name = "userId", value = "USER_ID", required = false, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PROCESS_DEFINITION_LIST_PAGING_ERROR)
public Result queryProcessDefinitionListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam(value = "userId", required = false, defaultValue = "0") Integer userId,
@RequestParam("pageSize") Integer pageSize) {
logger.info("query process definition list paging, login user:{}, project name:{}", loginUser.getUserName(), projectName);
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return returnDataListPaging(result);
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = processDefinitionService.queryProcessDefinitionListPaging(loginUser, projectName, searchVal, pageNo, pageSize, userId);
return returnDataListPaging(result);
}
/**
* encapsulation treeview structure
*
* @param loginUser login user
* @param projectName project name
* @param id process definition id
* @param limit limit
* @return tree view json data
*/
@ApiOperation(value = "viewTree", notes = "VIEW_TREE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "limit", value = "LIMIT", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/view-tree")
@ResponseStatus(HttpStatus.OK)
@ApiException(ENCAPSULATION_TREEVIEW_STRUCTURE_ERROR)
public Result viewTree(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processId") Integer id,
@RequestParam("limit") Integer limit) throws Exception {
Map<String, Object> result = processDefinitionService.viewTree(id, limit);
return returnDataList(result);
}
/**
* get tasks list by process definition id
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @return task list
*/
@ApiOperation(value = "getNodeListByDefinitionId", notes = "GET_NODE_LIST_BY_DEFINITION_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "gen-task-list")
@ResponseStatus(HttpStatus.OK)
@ApiException(GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR)
public Result getNodeListByDefinitionId(
@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processDefinitionId") Integer processDefinitionId) throws Exception {
logger.info("query task node name list by definitionId, login user:{}, project name:{}, id : {}",
loginUser.getUserName(), projectName, processDefinitionId);
Map<String, Object> result = processDefinitionService.getTaskNodeListByDefinitionId(processDefinitionId);
return returnDataList(result);
}
/**
* get tasks list by process definition id
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIdList process definition id list
* @return node list data
*/
@ApiOperation(value = "getNodeListByDefinitionIdList", notes = "GET_NODE_LIST_BY_DEFINITION_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionIdList", value = "PROCESS_DEFINITION_ID_LIST", required = true, type = "String")
})
@GetMapping(value = "get-task-list")
@ResponseStatus(HttpStatus.OK)
@ApiException(GET_TASKS_LIST_BY_PROCESS_DEFINITION_ID_ERROR)
public Result getNodeListByDefinitionIdList(
@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processDefinitionIdList") String processDefinitionIdList) {
logger.info("query task node name list by definitionId list, login user:{}, project name:{}, id list: {}",
loginUser.getUserName(), projectName, processDefinitionIdList);
Map<String, Object> result = processDefinitionService.getTaskNodeListByDefinitionIdList(processDefinitionIdList);
return returnDataList(result);
}
/**
* delete process definition by id
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @return delete result code
*/
@ApiOperation(value = "deleteProcessDefinitionById", notes = "DELETE_PROCESS_DEFINITION_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", dataType = "Int", example = "100")
})
@GetMapping(value = "/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_PROCESS_DEFINE_BY_ID_ERROR)
public Result deleteProcessDefinitionById(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processDefinitionId") Integer processDefinitionId
) {
logger.info("delete process definition by id, login user:{}, project name:{}, process definition id:{}",
loginUser.getUserName(), projectName, processDefinitionId);
Map<String, Object> result = processDefinitionService.deleteProcessDefinitionById(loginUser, projectName, processDefinitionId);
return returnDataList(result);
}
/**
* batch delete process definition by ids
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIds process definition id list
* @return delete result code
*/
@ApiOperation(value = "batchDeleteProcessDefinitionByIds", notes = "BATCH_DELETE_PROCESS_DEFINITION_BY_IDS_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_IDS", type = "String")
})
@GetMapping(value = "/batch-delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR)
public Result batchDeleteProcessDefinitionByIds(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processDefinitionIds") String processDefinitionIds
) {
logger.info("delete process definition by ids, login user:{}, project name:{}, process definition ids:{}",
loginUser.getUserName(), projectName, processDefinitionIds);
Map<String, Object> result = new HashMap<>();
List<String> deleteFailedIdList = new ArrayList<>();
if (StringUtils.isNotEmpty(processDefinitionIds)) {
String[] processDefinitionIdArray = processDefinitionIds.split(",");
for (String strProcessDefinitionId : processDefinitionIdArray) {
int processDefinitionId = Integer.parseInt(strProcessDefinitionId);
try {
Map<String, Object> deleteResult = processDefinitionService.deleteProcessDefinitionById(loginUser, projectName, processDefinitionId);
if (!Status.SUCCESS.equals(deleteResult.get(Constants.STATUS))) {
deleteFailedIdList.add(strProcessDefinitionId);
logger.error((String) deleteResult.get(Constants.MSG));
}
} catch (Exception e) {
deleteFailedIdList.add(strProcessDefinitionId);
}
}
}
if (!deleteFailedIdList.isEmpty()) {
putMsg(result, Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR, String.join(",", deleteFailedIdList));
} else {
putMsg(result, Status.SUCCESS);
}
return returnDataList(result);
}
/**
* batch export process definition by ids
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIds process definition ids
* @param response response
*/
@ApiOperation(value = "batchExportProcessDefinitionByIds", notes = "BATCH_EXPORT_PROCESS_DEFINITION_BY_IDS_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionIds", value = "PROCESS_DEFINITION_ID", required = true, dataType = "String")
})
@GetMapping(value = "/export")
@ResponseBody
public void batchExportProcessDefinitionByIds(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processDefinitionIds") String processDefinitionIds,
HttpServletResponse response) {
try {
logger.info("batch export process definition by ids, login user:{}, project name:{}, process definition ids:{}",
loginUser.getUserName(), projectName, processDefinitionIds);
processDefinitionService.batchExportProcessDefinitionByIds(loginUser, projectName, processDefinitionIds, response);
} catch (Exception e) {
logger.error(Status.BATCH_EXPORT_PROCESS_DEFINE_BY_IDS_ERROR.getMsg(), e);
}
}
/**
* query process definition all by project id
*
* @param loginUser login user
* @param projectId project id
* @return process definition list
*/
@ApiOperation(value = "queryProcessDefinitionAllByProjectId", notes = "QUERY_PROCESS_DEFINITION_All_BY_PROJECT_ID_NOTES")
@GetMapping(value = "/queryProcessDefinitionAllByProjectId")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_PROCESS_DEFINITION_LIST)
public Result queryProcessDefinitionAllByProjectId(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("projectId") Integer projectId) {
logger.info("query process definition list, login user:{}, project id:{}",
loginUser.getUserName(), projectId);
Map<String, Object> result = processDefinitionService.queryProcessDefinitionAllByProjectId(projectId);
return returnDataList(result);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,493 | [Improvement][api] The Process Definition interface parameter releaseState is changed to an enumeration type | **Describe the question**
The Process Definition interface parameter releaseState is changed to an enumeration type
**What are the current deficiencies and the benefits of improvement**
- The current Process Definition interface parameter releaseState variable uses an int type, and the releaseState mapping relationship needs to be recorded in both the front-end and back-end code
**Which version of DolphinScheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4493 | https://github.com/apache/dolphinscheduler/pull/4494 | 046940b1a008c63d6cad44b3f7aff954f6f9af83 | cfa88a4a804b3bc565f6571d5a6e842df18644a2 | "2021-01-19T10:21:18Z" | java | "2021-01-22T14:39:58Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.springframework.web.multipart.MultipartFile;
import com.fasterxml.jackson.core.JsonProcessingException;
/**
* process definition service
*/
public interface ProcessDefinitionService {
/**
* create process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param processDefinitionJson process definition json
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @return create result code
* @throws JsonProcessingException JsonProcessingException
*/
Map<String, Object> createProcessDefinition(User loginUser,
String projectName,
String name,
String processDefinitionJson,
String desc,
String locations,
String connects) throws JsonProcessingException;
/**
* query process definition list
*
* @param loginUser login user
* @param projectName project name
* @return definition list
*/
Map<String, Object> queryProcessDefinitionList(User loginUser,
String projectName);
/**
* query process definition list paging
*
* @param loginUser login user
* @param projectName project name
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param userId user id
* @return process definition page
*/
Map<String, Object> queryProcessDefinitionListPaging(User loginUser,
String projectName,
String searchVal,
Integer pageNo,
Integer pageSize,
Integer userId);
/**
* query datail of process definition
*
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @return process definition detail
*/
Map<String, Object> queryProcessDefinitionById(User loginUser,
String projectName,
Integer processId);
/**
* query datail of process definition
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionName process definition name
* @return process definition detail
*/
Map<String, Object> queryProcessDefinitionByName(User loginUser,
String projectName,
String processDefinitionName);
/**
* batch copy process definition
*
* @param loginUser loginUser
* @param projectName projectName
* @param processDefinitionIds processDefinitionIds
* @param targetProjectId targetProjectId
*/
Map<String, Object> batchCopyProcessDefinition(User loginUser,
String projectName,
String processDefinitionIds,
int targetProjectId);
/**
* batch move process definition
*
* @param loginUser loginUser
* @param projectName projectName
* @param processDefinitionIds processDefinitionIds
* @param targetProjectId targetProjectId
*/
Map<String, Object> batchMoveProcessDefinition(User loginUser,
String projectName,
String processDefinitionIds,
int targetProjectId);
/**
* update process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param id process definition id
* @param processDefinitionJson process definition json
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @return update result code
*/
Map<String, Object> updateProcessDefinition(User loginUser,
String projectName,
int id,
String name,
String processDefinitionJson, String desc,
String locations, String connects);
/**
* verify process definition name unique
*
* @param loginUser login user
* @param projectName project name
* @param name name
* @return true if process definition name not exists, otherwise false
*/
Map<String, Object> verifyProcessDefinitionName(User loginUser,
String projectName,
String name);
/**
* delete process definition by id
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @return delete result code
*/
Map<String, Object> deleteProcessDefinitionById(User loginUser,
String projectName,
Integer processDefinitionId);
/**
* release process definition: online / offline
*
* @param loginUser login user
* @param projectName project name
* @param id process definition id
* @param releaseState release state
* @return release result code
*/
Map<String, Object> releaseProcessDefinition(User loginUser,
String projectName,
int id,
int releaseState);
/**
* batch export process definition by ids
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionIds process definition ids
* @param response http servlet response
*/
void batchExportProcessDefinitionByIds(User loginUser,
String projectName,
String processDefinitionIds,
HttpServletResponse response);
/**
* import process definition
*
* @param loginUser login user
* @param file process metadata json file
* @param currentProjectName current project name
* @return import process
*/
Map<String, Object> importProcessDefinition(User loginUser,
MultipartFile file,
String currentProjectName);
/**
* check the process definition node meets the specifications
*
* @param processData process data
* @param processDefinitionJson process definition json
* @return check result code
*/
Map<String, Object> checkProcessNodeList(ProcessData processData,
String processDefinitionJson);
/**
* get task node details based on process definition
*
* @param defineId define id
* @return task node list
*/
Map<String, Object> getTaskNodeListByDefinitionId(Integer defineId);
/**
* get task node details based on process definition
*
* @param defineIdList define id list
* @return task node list
*/
Map<String, Object> getTaskNodeListByDefinitionIdList(String defineIdList);
/**
* query process definition all by project id
*
* @param projectId project id
* @return process definitions in the project
*/
Map<String, Object> queryProcessDefinitionAllByProjectId(Integer projectId);
/**
* Encapsulates the TreeView structure
*
* @param processId process definition id
* @param limit limit
* @return tree view json data
* @throws Exception exception
*/
Map<String, Object> viewTree(Integer processId,
Integer limit) throws Exception;
/**
* switch the defined process definition verison
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @param version the version user want to switch
* @return switch process definition version result code
*/
Map<String, Object> switchProcessDefinitionVersion(User loginUser, String projectName
, int processDefinitionId, long version);
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,493 | [Improvement][api] The Process Definition interface parameter releaseState is changed to an enumeration type | **Describe the question**
The Process Definition interface parameter releaseState is changed to an enumeration type
**What are the current deficiencies and the benefits of improvement**
- The current Process Definition interface parameter releaseState variable uses an int type, and the releaseState mapping relationship needs to be recorded in both the front-end and back-end code
**Which version of DolphinScheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4493 | https://github.com/apache/dolphinscheduler/pull/4494 | 046940b1a008c63d6cad44b3f7aff954f6f9af83 | cfa88a4a804b3bc565f6571d5a6e842df18644a2 | "2021-01-19T10:21:18Z" | java | "2021-01-22T14:39:58Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID;
import org.apache.dolphinscheduler.api.dto.ProcessMeta;
import org.apache.dolphinscheduler.api.dto.treeview.Instance;
import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
import org.apache.dolphinscheduler.api.service.ProcessInstanceService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.service.SchedulerService;
import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.FileUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.exportprocess.ProcessAddTaskParam;
import org.apache.dolphinscheduler.api.utils.exportprocess.TaskNodeParamFactory;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.AuthorizationType;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StreamUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.TaskParametersUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import org.apache.dolphinscheduler.service.permission.PermissionCheck;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* process definition service impl
*/
@Service
public class ProcessDefinitionServiceImpl extends BaseService implements
ProcessDefinitionService {
private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionServiceImpl.class);
private static final String PROCESSDEFINITIONID = "processDefinitionId";
private static final String RELEASESTATE = "releaseState";
private static final String TASKS = "tasks";
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectService projectService;
@Autowired
private ProcessDefinitionVersionService processDefinitionVersionService;
@Autowired
private ProcessDefinitionMapper processDefineMapper;
@Autowired
private ProcessInstanceService processInstanceService;
@Autowired
private TaskInstanceMapper taskInstanceMapper;
@Autowired
private ScheduleMapper scheduleMapper;
@Autowired
private ProcessService processService;
/**
* create process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param processDefinitionJson process definition json
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @return create result code
*/
@Override
public Map<String, Object> createProcessDefinition(User loginUser,
String projectName,
String name,
String processDefinitionJson,
String desc,
String locations,
String connects) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefine = new ProcessDefinition();
Date now = new Date();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
Map<String, Object> checkProcessJson = checkProcessNodeList(processData, processDefinitionJson);
if (checkProcessJson.get(Constants.STATUS) != Status.SUCCESS) {
return checkProcessJson;
}
processDefine.setName(name);
processDefine.setReleaseState(ReleaseState.OFFLINE);
processDefine.setProjectId(project.getId());
processDefine.setUserId(loginUser.getId());
processDefine.setProcessDefinitionJson(processDefinitionJson);
processDefine.setDescription(desc);
processDefine.setLocations(locations);
processDefine.setConnects(connects);
processDefine.setTimeout(processData.getTimeout());
processDefine.setTenantId(processData.getTenantId());
processDefine.setModifyBy(loginUser.getUserName());
processDefine.setResourceIds(getResourceIds(processData));
//custom global params
List<Property> globalParamsList = processData.getGlobalParams();
if (CollectionUtils.isNotEmpty(globalParamsList)) {
Set<Property> globalParamsSet = new HashSet<>(globalParamsList);
globalParamsList = new ArrayList<>(globalParamsSet);
processDefine.setGlobalParamList(globalParamsList);
}
processDefine.setCreateTime(now);
processDefine.setUpdateTime(now);
processDefine.setFlag(Flag.YES);
// save the new process definition
processDefineMapper.insert(processDefine);
// add process definition version
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefine);
processDefine.setVersion(version);
processDefineMapper.updateVersionByProcessDefinitionId(processDefine.getId(), version);
// return processDefinition object with ID
result.put(Constants.DATA_LIST, processDefine.getId());
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get resource ids
*
* @param processData process data
* @return resource ids
*/
private String getResourceIds(ProcessData processData) {
List<TaskNode> tasks = processData.getTasks();
Set<Integer> resourceIds = new HashSet<>();
StringBuilder sb = new StringBuilder();
if (CollectionUtils.isEmpty(tasks)) {
return sb.toString();
}
for (TaskNode taskNode : tasks) {
String taskParameter = taskNode.getParams();
AbstractParameters params = TaskParametersUtils.getParameters(taskNode.getType(), taskParameter);
if (params == null) {
continue;
}
if (CollectionUtils.isNotEmpty(params.getResourceFilesList())) {
Set<Integer> tempSet = params.getResourceFilesList().
stream()
.filter(t -> t.getId() != 0)
.map(ResourceInfo::getId)
.collect(Collectors.toSet());
resourceIds.addAll(tempSet);
}
}
for (int i : resourceIds) {
if (sb.length() > 0) {
sb.append(",");
}
sb.append(i);
}
return sb.toString();
}
/**
* query process definition list
*
* @param loginUser login user
* @param projectName project name
* @return definition list
*/
@Override
public Map<String, Object> queryProcessDefinitionList(User loginUser, String projectName) {
HashMap<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
List<ProcessDefinition> resourceList = processDefineMapper.queryAllDefinitionList(project.getId());
result.put(Constants.DATA_LIST, resourceList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process definition list paging
*
* @param loginUser login user
* @param projectName project name
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param userId user id
* @return process definition page
*/
@Override
public Map<String, Object> queryProcessDefinitionListPaging(User loginUser, String projectName, String searchVal, Integer pageNo, Integer pageSize, Integer userId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
Page<ProcessDefinition> page = new Page<>(pageNo, pageSize);
IPage<ProcessDefinition> processDefinitionIPage = processDefineMapper.queryDefineListPaging(
page, searchVal, userId, project.getId(), isAdmin(loginUser));
PageInfo<ProcessDefinition> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount((int) processDefinitionIPage.getTotal());
pageInfo.setLists(processDefinitionIPage.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query datail of process definition
*
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @return process definition detail
*/
@Override
public Map<String, Object> queryProcessDefinitionById(User loginUser, String projectName, Integer processId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId);
} else {
result.put(Constants.DATA_LIST, processDefinition);
putMsg(result, Status.SUCCESS);
}
return result;
}
@Override
public Map<String, Object> queryProcessDefinitionByName(User loginUser, String projectName, String processDefinitionName) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.queryByDefineName(project.getId(),processDefinitionName);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionName);
} else {
result.put(Constants.DATA_LIST, processDefinition);
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* update process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param id process definition id
* @param processDefinitionJson process definition json
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @return update result code
*/
@Override
public Map<String, Object> updateProcessDefinition(User loginUser,
String projectName,
int id,
String name,
String processDefinitionJson,
String desc,
String locations,
String connects) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
Map<String, Object> checkProcessJson = checkProcessNodeList(processData, processDefinitionJson);
if ((checkProcessJson.get(Constants.STATUS) != Status.SUCCESS)) {
return checkProcessJson;
}
ProcessDefinition processDefine = processService.findProcessDefineById(id);
// check process definition exists
if (processDefine == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, id);
return result;
}
if (processDefine.getReleaseState() == ReleaseState.ONLINE) {
// online can not permit edit
putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefine.getName());
return result;
}
if (!name.equals(processDefine.getName())) {
// check whether the new process define name exist
ProcessDefinition definition = processDefineMapper.verifyByDefineName(project.getId(), name);
if (definition != null) {
putMsg(result, Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR, name);
return result;
}
}
Date now = new Date();
processDefine.setId(id);
processDefine.setName(name);
processDefine.setReleaseState(ReleaseState.OFFLINE);
processDefine.setProjectId(project.getId());
processDefine.setProcessDefinitionJson(processDefinitionJson);
processDefine.setDescription(desc);
processDefine.setLocations(locations);
processDefine.setConnects(connects);
processDefine.setTimeout(processData.getTimeout());
processDefine.setTenantId(processData.getTenantId());
processDefine.setModifyBy(loginUser.getUserName());
processDefine.setResourceIds(getResourceIds(processData));
//custom global params
List<Property> globalParamsList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(processData.getGlobalParams())) {
Set<Property> userDefParamsSet = new HashSet<>(processData.getGlobalParams());
globalParamsList = new ArrayList<>(userDefParamsSet);
}
processDefine.setGlobalParamList(globalParamsList);
processDefine.setUpdateTime(now);
processDefine.setFlag(Flag.YES);
// add process definition version
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefine);
processDefine.setVersion(version);
if (processDefineMapper.updateById(processDefine) > 0) {
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefineMapper.queryByDefineId(id));
} else {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
return result;
}
/**
* verify process definition name unique
*
* @param loginUser login user
* @param projectName project name
* @param name name
* @return true if process definition name not exists, otherwise false
*/
@Override
public Map<String, Object> verifyProcessDefinitionName(User loginUser, String projectName, String name) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.verifyByDefineName(project.getId(), name);
if (processDefinition == null) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR, name);
}
return result;
}
/**
* delete process definition by id
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @return delete result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.selectById(processDefinitionId);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionId);
return result;
}
// Determine if the login user is the owner of the process definition
if (loginUser.getId() != processDefinition.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
// check process definition is already online
if (processDefinition.getReleaseState() == ReleaseState.ONLINE) {
putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE, processDefinitionId);
return result;
}
// check process instances is already running
List<ProcessInstance> processInstances = processInstanceService.queryByProcessDefineIdAndStatus(processDefinitionId, Constants.NOT_TERMINATED_STATES);
if (CollectionUtils.isNotEmpty(processInstances)) {
putMsg(result, Status.DELETE_PROCESS_DEFINITION_BY_ID_FAIL, processInstances.size());
return result;
}
// get the timing according to the process definition
List<Schedule> schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId);
if (!schedules.isEmpty() && schedules.size() > 1) {
logger.warn("scheduler num is {},Greater than 1", schedules.size());
putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR);
return result;
} else if (schedules.size() == 1) {
Schedule schedule = schedules.get(0);
if (schedule.getReleaseState() == ReleaseState.OFFLINE) {
scheduleMapper.deleteById(schedule.getId());
} else if (schedule.getReleaseState() == ReleaseState.ONLINE) {
putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId());
return result;
}
}
int delete = processDefineMapper.deleteById(processDefinitionId);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR);
}
return result;
}
/**
* release process definition: online / offline
*
* @param loginUser login user
* @param projectName project name
* @param id process definition id
* @param releaseState release state
* @return release result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> releaseProcessDefinition(User loginUser, String projectName, int id, int releaseState) {
HashMap<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ReleaseState state = ReleaseState.getEnum(releaseState);
// check state
if (null == state) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
ProcessDefinition processDefinition = processDefineMapper.selectById(id);
switch (state) {
case ONLINE:
// To check resources whether they are already cancel authorized or deleted
String resourceIds = processDefinition.getResourceIds();
if (StringUtils.isNotBlank(resourceIds)) {
Integer[] resourceIdArray = Arrays.stream(resourceIds.split(",")).map(Integer::parseInt).toArray(Integer[]::new);
PermissionCheck<Integer> permissionCheck = new PermissionCheck<>(AuthorizationType.RESOURCE_FILE_ID, processService, resourceIdArray, loginUser.getId(), logger);
try {
permissionCheck.checkPermission();
} catch (Exception e) {
logger.error(e.getMessage(), e);
putMsg(result, Status.RESOURCE_NOT_EXIST_OR_NO_PERMISSION, RELEASESTATE);
return result;
}
}
processDefinition.setReleaseState(state);
processDefineMapper.updateById(processDefinition);
break;
case OFFLINE:
processDefinition.setReleaseState(state);
processDefineMapper.updateById(processDefinition);
List<Schedule> scheduleList = scheduleMapper.selectAllByProcessDefineArray(
new int[]{processDefinition.getId()}
);
for (Schedule schedule : scheduleList) {
logger.info("set schedule offline, project id: {}, schedule id: {}, process definition id: {}", project.getId(), schedule.getId(), id);
// set status
schedule.setReleaseState(ReleaseState.OFFLINE);
scheduleMapper.updateById(schedule);
SchedulerService.deleteSchedule(project.getId(), schedule.getId());
}
break;
default:
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* batch export process definition by ids
*/
@Override
public void batchExportProcessDefinitionByIds(User loginUser, String projectName, String processDefinitionIds, HttpServletResponse response) {
if (StringUtils.isEmpty(processDefinitionIds)) {
return;
}
//export project info
Project project = projectMapper.queryByName(projectName);
//check user access for project
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return;
}
List<ProcessMeta> processDefinitionList =
getProcessDefinitionList(processDefinitionIds);
if (CollectionUtils.isNotEmpty(processDefinitionList)) {
downloadProcessDefinitionFile(response, processDefinitionList);
}
}
/**
* get process definition list by ids
*/
private List<ProcessMeta> getProcessDefinitionList(String processDefinitionIds) {
List<ProcessMeta> processDefinitionList = new ArrayList<>();
String[] processDefinitionIdArray = processDefinitionIds.split(",");
for (String strProcessDefinitionId : processDefinitionIdArray) {
//get workflow info
int processDefinitionId = Integer.parseInt(strProcessDefinitionId);
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processDefinitionId);
if (null != processDefinition) {
processDefinitionList.add(exportProcessMetaData(processDefinitionId, processDefinition));
}
}
return processDefinitionList;
}
/**
* download the process definition file
*/
private void downloadProcessDefinitionFile(HttpServletResponse response, List<ProcessMeta> processDefinitionList) {
response.setContentType(MediaType.APPLICATION_JSON_UTF8_VALUE);
BufferedOutputStream buff = null;
ServletOutputStream out = null;
try {
out = response.getOutputStream();
buff = new BufferedOutputStream(out);
buff.write(JSONUtils.toJsonString(processDefinitionList).getBytes(StandardCharsets.UTF_8));
buff.flush();
buff.close();
} catch (IOException e) {
logger.warn("export process fail", e);
} finally {
if (null != buff) {
try {
buff.close();
} catch (Exception e) {
logger.warn("export process buffer not close", e);
}
}
if (null != out) {
try {
out.close();
} catch (Exception e) {
logger.warn("export process output stream not close", e);
}
}
}
}
/**
* get export process metadata string
*
* @param processDefinitionId process definition id
* @param processDefinition process definition
* @return export process metadata string
*/
public String exportProcessMetaDataStr(Integer processDefinitionId, ProcessDefinition processDefinition) {
//create workflow json file
return JSONUtils.toJsonString(exportProcessMetaData(processDefinitionId, processDefinition));
}
/**
* get export process metadata string
*
* @param processDefinitionId process definition id
* @param processDefinition process definition
* @return export process metadata string
*/
public ProcessMeta exportProcessMetaData(Integer processDefinitionId, ProcessDefinition processDefinition) {
//correct task param which has data source or dependent param
String correctProcessDefinitionJson = addExportTaskNodeSpecialParam(processDefinition.getProcessDefinitionJson());
processDefinition.setProcessDefinitionJson(correctProcessDefinitionJson);
//export process metadata
ProcessMeta exportProcessMeta = new ProcessMeta();
exportProcessMeta.setProjectName(processDefinition.getProjectName());
exportProcessMeta.setProcessDefinitionName(processDefinition.getName());
exportProcessMeta.setProcessDefinitionJson(processDefinition.getProcessDefinitionJson());
exportProcessMeta.setProcessDefinitionDescription(processDefinition.getDescription());
exportProcessMeta.setProcessDefinitionLocations(processDefinition.getLocations());
exportProcessMeta.setProcessDefinitionConnects(processDefinition.getConnects());
//schedule info
List<Schedule> schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId);
if (!schedules.isEmpty()) {
Schedule schedule = schedules.get(0);
exportProcessMeta.setScheduleWarningType(schedule.getWarningType().toString());
exportProcessMeta.setScheduleWarningGroupId(schedule.getWarningGroupId());
exportProcessMeta.setScheduleStartTime(DateUtils.dateToString(schedule.getStartTime()));
exportProcessMeta.setScheduleEndTime(DateUtils.dateToString(schedule.getEndTime()));
exportProcessMeta.setScheduleCrontab(schedule.getCrontab());
exportProcessMeta.setScheduleFailureStrategy(String.valueOf(schedule.getFailureStrategy()));
exportProcessMeta.setScheduleReleaseState(String.valueOf(ReleaseState.OFFLINE));
exportProcessMeta.setScheduleProcessInstancePriority(String.valueOf(schedule.getProcessInstancePriority()));
exportProcessMeta.setScheduleWorkerGroupName(schedule.getWorkerGroup());
}
//create workflow json file
return exportProcessMeta;
}
/**
* correct task param which has datasource or dependent
*
* @param processDefinitionJson processDefinitionJson
* @return correct processDefinitionJson
*/
private String addExportTaskNodeSpecialParam(String processDefinitionJson) {
ObjectNode jsonObject = JSONUtils.parseObject(processDefinitionJson);
ArrayNode jsonArray = (ArrayNode) jsonObject.path(TASKS);
for (int i = 0; i < jsonArray.size(); i++) {
JsonNode taskNode = jsonArray.path(i);
if (StringUtils.isNotEmpty(taskNode.path("type").asText())) {
String taskType = taskNode.path("type").asText();
ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType);
if (null != addTaskParam) {
addTaskParam.addExportSpecialParam(taskNode);
}
}
}
jsonObject.set(TASKS, jsonArray);
return jsonObject.toString();
}
/**
* check task if has sub process
*
* @param taskType task type
* @return if task has sub process return true else false
*/
private boolean checkTaskHasSubProcess(String taskType) {
return taskType.equals(TaskType.SUB_PROCESS.name());
}
/**
* import process definition
*
* @param loginUser login user
* @param file process metadata json file
* @param currentProjectName current project name
* @return import process
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) {
Map<String, Object> result = new HashMap<>(5);
String processMetaJson = FileUtils.file2String(file);
List<ProcessMeta> processMetaList = JSONUtils.toList(processMetaJson, ProcessMeta.class);
//check file content
if (CollectionUtils.isEmpty(processMetaList)) {
putMsg(result, Status.DATA_IS_NULL, "fileContent");
return result;
}
for (ProcessMeta processMeta : processMetaList) {
if (!checkAndImportProcessDefinition(loginUser, currentProjectName, result, processMeta)) {
return result;
}
}
return result;
}
/**
* check and import process definition
*/
private boolean checkAndImportProcessDefinition(User loginUser, String currentProjectName, Map<String, Object> result, ProcessMeta processMeta) {
if (!checkImportanceParams(processMeta, result)) {
return false;
}
//deal with process name
String processDefinitionName = processMeta.getProcessDefinitionName();
//use currentProjectName to query
Project targetProject = projectMapper.queryByName(currentProjectName);
if (null != targetProject) {
processDefinitionName = recursionProcessDefinitionName(targetProject.getId(),
processDefinitionName, 1);
}
//unique check
Map<String, Object> checkResult = verifyProcessDefinitionName(loginUser, currentProjectName, processDefinitionName);
Status status = (Status) checkResult.get(Constants.STATUS);
if (Status.SUCCESS.equals(status)) {
putMsg(result, Status.SUCCESS);
} else {
result.putAll(checkResult);
return false;
}
// get create process result
Map<String, Object> createProcessResult =
getCreateProcessResult(loginUser,
currentProjectName,
result,
processMeta,
processDefinitionName,
addImportTaskNodeParam(loginUser, processMeta.getProcessDefinitionJson(), targetProject));
if (createProcessResult == null) {
return false;
}
//create process definition
Integer processDefinitionId =
Objects.isNull(createProcessResult.get(Constants.DATA_LIST))
? null : Integer.parseInt(createProcessResult.get(Constants.DATA_LIST).toString());
//scheduler param
return getImportProcessScheduleResult(loginUser,
currentProjectName,
result,
processMeta,
processDefinitionName,
processDefinitionId);
}
/**
* get create process result
*/
private Map<String, Object> getCreateProcessResult(User loginUser,
String currentProjectName,
Map<String, Object> result,
ProcessMeta processMeta,
String processDefinitionName,
String importProcessParam) {
Map<String, Object> createProcessResult = null;
try {
createProcessResult = createProcessDefinition(loginUser
, currentProjectName,
processDefinitionName + "_import_" + DateUtils.getCurrentTimeStamp(),
importProcessParam,
processMeta.getProcessDefinitionDescription(),
processMeta.getProcessDefinitionLocations(),
processMeta.getProcessDefinitionConnects());
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
logger.error("import process meta json data: {}", e.getMessage(), e);
putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
}
return createProcessResult;
}
/**
* get import process schedule result
*/
private boolean getImportProcessScheduleResult(User loginUser,
String currentProjectName,
Map<String, Object> result,
ProcessMeta processMeta,
String processDefinitionName,
Integer processDefinitionId) {
if (null != processMeta.getScheduleCrontab() && null != processDefinitionId) {
int scheduleInsert = importProcessSchedule(loginUser,
currentProjectName,
processMeta,
processDefinitionName,
processDefinitionId);
if (0 == scheduleInsert) {
putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
return false;
}
}
return true;
}
/**
* check importance params
*/
private boolean checkImportanceParams(ProcessMeta processMeta, Map<String, Object> result) {
if (StringUtils.isEmpty(processMeta.getProjectName())) {
putMsg(result, Status.DATA_IS_NULL, "projectName");
return false;
}
if (StringUtils.isEmpty(processMeta.getProcessDefinitionName())) {
putMsg(result, Status.DATA_IS_NULL, "processDefinitionName");
return false;
}
if (StringUtils.isEmpty(processMeta.getProcessDefinitionJson())) {
putMsg(result, Status.DATA_IS_NULL, "processDefinitionJson");
return false;
}
return true;
}
/**
* import process add special task param
*
* @param loginUser login user
* @param processDefinitionJson process definition json
* @param targetProject target project
* @return import process param
*/
private String addImportTaskNodeParam(User loginUser, String processDefinitionJson, Project targetProject) {
ObjectNode jsonObject = JSONUtils.parseObject(processDefinitionJson);
ArrayNode jsonArray = (ArrayNode) jsonObject.get(TASKS);
//add sql and dependent param
for (int i = 0; i < jsonArray.size(); i++) {
JsonNode taskNode = jsonArray.path(i);
String taskType = taskNode.path("type").asText();
ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType);
if (null != addTaskParam) {
addTaskParam.addImportSpecialParam(taskNode);
}
}
//recursive sub-process parameter correction map key for old process id value for new process id
Map<Integer, Integer> subProcessIdMap = new HashMap<>(20);
List<Object> subProcessList = StreamUtils.asStream(jsonArray.elements())
.filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).path("type").asText()))
.collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(subProcessList)) {
importSubProcess(loginUser, targetProject, jsonArray, subProcessIdMap);
}
jsonObject.set(TASKS, jsonArray);
return jsonObject.toString();
}
/**
* import process schedule
*
* @param loginUser login user
* @param currentProjectName current project name
* @param processMeta process meta data
* @param processDefinitionName process definition name
* @param processDefinitionId process definition id
* @return insert schedule flag
*/
public int importProcessSchedule(User loginUser, String currentProjectName, ProcessMeta processMeta,
String processDefinitionName, Integer processDefinitionId) {
Date now = new Date();
Schedule scheduleObj = new Schedule();
scheduleObj.setProjectName(currentProjectName);
scheduleObj.setProcessDefinitionId(processDefinitionId);
scheduleObj.setProcessDefinitionName(processDefinitionName);
scheduleObj.setCreateTime(now);
scheduleObj.setUpdateTime(now);
scheduleObj.setUserId(loginUser.getId());
scheduleObj.setUserName(loginUser.getUserName());
scheduleObj.setCrontab(processMeta.getScheduleCrontab());
if (null != processMeta.getScheduleStartTime()) {
scheduleObj.setStartTime(DateUtils.stringToDate(processMeta.getScheduleStartTime()));
}
if (null != processMeta.getScheduleEndTime()) {
scheduleObj.setEndTime(DateUtils.stringToDate(processMeta.getScheduleEndTime()));
}
if (null != processMeta.getScheduleWarningType()) {
scheduleObj.setWarningType(WarningType.valueOf(processMeta.getScheduleWarningType()));
}
if (null != processMeta.getScheduleWarningGroupId()) {
scheduleObj.setWarningGroupId(processMeta.getScheduleWarningGroupId());
}
if (null != processMeta.getScheduleFailureStrategy()) {
scheduleObj.setFailureStrategy(FailureStrategy.valueOf(processMeta.getScheduleFailureStrategy()));
}
if (null != processMeta.getScheduleReleaseState()) {
scheduleObj.setReleaseState(ReleaseState.valueOf(processMeta.getScheduleReleaseState()));
}
if (null != processMeta.getScheduleProcessInstancePriority()) {
scheduleObj.setProcessInstancePriority(Priority.valueOf(processMeta.getScheduleProcessInstancePriority()));
}
if (null != processMeta.getScheduleWorkerGroupName()) {
scheduleObj.setWorkerGroup(processMeta.getScheduleWorkerGroupName());
}
return scheduleMapper.insert(scheduleObj);
}
/**
* check import process has sub process
* recursion create sub process
*
* @param loginUser login user
* @param targetProject target project
* @param jsonArray process task array
* @param subProcessIdMap correct sub process id map
*/
private void importSubProcess(User loginUser, Project targetProject, ArrayNode jsonArray, Map<Integer, Integer> subProcessIdMap) {
for (int i = 0; i < jsonArray.size(); i++) {
ObjectNode taskNode = (ObjectNode) jsonArray.path(i);
String taskType = taskNode.path("type").asText();
if (!checkTaskHasSubProcess(taskType)) {
continue;
}
//get sub process info
ObjectNode subParams = (ObjectNode) taskNode.path("params");
Integer subProcessId = subParams.path(PROCESSDEFINITIONID).asInt();
ProcessDefinition subProcess = processDefineMapper.queryByDefineId(subProcessId);
//check is sub process exist in db
if (null == subProcess) {
continue;
}
String subProcessJson = subProcess.getProcessDefinitionJson();
//check current project has sub process
ProcessDefinition currentProjectSubProcess = processDefineMapper.queryByDefineName(targetProject.getId(), subProcess.getName());
if (null == currentProjectSubProcess) {
ArrayNode subJsonArray = (ArrayNode) JSONUtils.parseObject(subProcess.getProcessDefinitionJson()).get(TASKS);
List<Object> subProcessList = StreamUtils.asStream(subJsonArray.elements())
.filter(item -> checkTaskHasSubProcess(JSONUtils.parseObject(item.toString()).path("type").asText()))
.collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(subProcessList)) {
importSubProcess(loginUser, targetProject, subJsonArray, subProcessIdMap);
//sub process processId correct
if (!subProcessIdMap.isEmpty()) {
for (Map.Entry<Integer, Integer> entry : subProcessIdMap.entrySet()) {
String oldSubProcessId = "\"processDefinitionId\":" + entry.getKey();
String newSubProcessId = "\"processDefinitionId\":" + entry.getValue();
subProcessJson = subProcessJson.replaceAll(oldSubProcessId, newSubProcessId);
}
subProcessIdMap.clear();
}
}
//if sub-process recursion
Date now = new Date();
//create sub process in target project
ProcessDefinition processDefine = new ProcessDefinition();
processDefine.setName(subProcess.getName());
processDefine.setVersion(subProcess.getVersion());
processDefine.setReleaseState(subProcess.getReleaseState());
processDefine.setProjectId(targetProject.getId());
processDefine.setUserId(loginUser.getId());
processDefine.setProcessDefinitionJson(subProcessJson);
processDefine.setDescription(subProcess.getDescription());
processDefine.setLocations(subProcess.getLocations());
processDefine.setConnects(subProcess.getConnects());
processDefine.setTimeout(subProcess.getTimeout());
processDefine.setTenantId(subProcess.getTenantId());
processDefine.setGlobalParams(subProcess.getGlobalParams());
processDefine.setCreateTime(now);
processDefine.setUpdateTime(now);
processDefine.setFlag(subProcess.getFlag());
processDefine.setWarningGroupId(subProcess.getWarningGroupId());
processDefineMapper.insert(processDefine);
logger.info("create sub process, project: {}, process name: {}", targetProject.getName(), processDefine.getName());
//modify task node
ProcessDefinition newSubProcessDefine = processDefineMapper.queryByDefineName(processDefine.getProjectId(), processDefine.getName());
if (null != newSubProcessDefine) {
subProcessIdMap.put(subProcessId, newSubProcessDefine.getId());
subParams.put(PROCESSDEFINITIONID, newSubProcessDefine.getId());
taskNode.set("params", subParams);
}
}
}
}
/**
* check the process definition node meets the specifications
*
* @param processData process data
* @param processDefinitionJson process definition json
* @return check result code
*/
@Override
public Map<String, Object> checkProcessNodeList(ProcessData processData, String processDefinitionJson) {
Map<String, Object> result = new HashMap<>();
try {
if (processData == null) {
logger.error("process data is null");
putMsg(result, Status.DATA_IS_NOT_VALID, processDefinitionJson);
return result;
}
// Check whether the task node is normal
List<TaskNode> taskNodes = processData.getTasks();
if (taskNodes == null) {
logger.error("process node info is empty");
putMsg(result, Status.DATA_IS_NULL, processDefinitionJson);
return result;
}
// check has cycle
if (graphHasCycle(taskNodes)) {
logger.error("process DAG has cycle");
putMsg(result, Status.PROCESS_NODE_HAS_CYCLE);
return result;
}
// check whether the process definition json is normal
for (TaskNode taskNode : taskNodes) {
if (!CheckUtils.checkTaskNodeParameters(taskNode.getParams(), taskNode.getType())) {
logger.error("task node {} parameter invalid", taskNode.getName());
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskNode.getName());
return result;
}
// check extra params
CheckUtils.checkOtherParams(taskNode.getExtras());
}
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, e.getMessage());
}
return result;
}
/**
* get task node details based on process definition
*
* @param defineId define id
* @return task node list
*/
@Override
public Map<String, Object> getTaskNodeListByDefinitionId(Integer defineId) {
Map<String, Object> result = new HashMap<>();
ProcessDefinition processDefinition = processDefineMapper.selectById(defineId);
if (processDefinition == null) {
logger.info("process define not exists");
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, defineId);
return result;
}
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
//process data check
if (null == processData) {
logger.error("process data is null");
putMsg(result, Status.DATA_IS_NOT_VALID, processDefinitionJson);
return result;
}
List<TaskNode> taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks();
result.put(Constants.DATA_LIST, taskNodeList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get task node details based on process definition
*
* @param defineIdList define id list
* @return task node list
*/
@Override
public Map<String, Object> getTaskNodeListByDefinitionIdList(String defineIdList) {
Map<String, Object> result = new HashMap<>();
Map<Integer, List<TaskNode>> taskNodeMap = new HashMap<>();
String[] idList = defineIdList.split(",");
List<Integer> idIntList = new ArrayList<>();
for (String definitionId : idList) {
idIntList.add(Integer.parseInt(definitionId));
}
Integer[] idArray = idIntList.toArray(new Integer[0]);
List<ProcessDefinition> processDefinitionList = processDefineMapper.queryDefinitionListByIdList(idArray);
if (CollectionUtils.isEmpty(processDefinitionList)) {
logger.info("process definition not exists");
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, defineIdList);
return result;
}
for (ProcessDefinition processDefinition : processDefinitionList) {
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks();
taskNodeMap.put(processDefinition.getId(), taskNodeList);
}
result.put(Constants.DATA_LIST, taskNodeMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process definition all by project id
*
* @param projectId project id
* @return process definitions in the project
*/
@Override
public Map<String, Object> queryProcessDefinitionAllByProjectId(Integer projectId) {
HashMap<String, Object> result = new HashMap<>(5);
List<ProcessDefinition> resourceList = processDefineMapper.queryAllDefinitionList(projectId);
result.put(Constants.DATA_LIST, resourceList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* Encapsulates the TreeView structure
*
* @param processId process definition id
* @param limit limit
* @return tree view json data
* @throws Exception exception
*/
@Override
public Map<String, Object> viewTree(Integer processId, Integer limit) throws Exception {
Map<String, Object> result = new HashMap<>();
ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (null == processDefinition) {
logger.info("process define not exists");
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinition);
return result;
}
DAG<String, TaskNode, TaskNodeRelation> dag = genDagGraph(processDefinition);
/**
* nodes that is running
*/
Map<String, List<TreeViewDto>> runningNodeMap = new ConcurrentHashMap<>();
/**
* nodes that is waiting torun
*/
Map<String, List<TreeViewDto>> waitingRunningNodeMap = new ConcurrentHashMap<>();
/**
* List of process instances
*/
List<ProcessInstance> processInstanceList = processInstanceService.queryByProcessDefineId(processId, limit);
for (ProcessInstance processInstance : processInstanceList) {
processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(), processInstance.getEndTime()));
}
if (limit > processInstanceList.size()) {
limit = processInstanceList.size();
}
TreeViewDto parentTreeViewDto = new TreeViewDto();
parentTreeViewDto.setName("DAG");
parentTreeViewDto.setType("");
// Specify the process definition, because it is a TreeView for a process definition
for (int i = limit - 1; i >= 0; i--) {
ProcessInstance processInstance = processInstanceList.get(i);
Date endTime = processInstance.getEndTime() == null ? new Date() : processInstance.getEndTime();
parentTreeViewDto.getInstances().add(new Instance(processInstance.getId(), processInstance.getName(), "", processInstance.getState().toString()
, processInstance.getStartTime(), endTime, processInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - processInstance.getStartTime().getTime())));
}
List<TreeViewDto> parentTreeViewDtoList = new ArrayList<>();
parentTreeViewDtoList.add(parentTreeViewDto);
// Here is the encapsulation task instance
for (String startNode : dag.getBeginNode()) {
runningNodeMap.put(startNode, parentTreeViewDtoList);
}
while (Stopper.isRunning()) {
Set<String> postNodeList = null;
Iterator<Map.Entry<String, List<TreeViewDto>>> iter = runningNodeMap.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<String, List<TreeViewDto>> en = iter.next();
String nodeName = en.getKey();
parentTreeViewDtoList = en.getValue();
TreeViewDto treeViewDto = new TreeViewDto();
treeViewDto.setName(nodeName);
TaskNode taskNode = dag.getNode(nodeName);
treeViewDto.setType(taskNode.getType());
//set treeViewDto instances
for (int i = limit - 1; i >= 0; i--) {
ProcessInstance processInstance = processInstanceList.get(i);
TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), nodeName);
if (taskInstance == null) {
treeViewDto.getInstances().add(new Instance(-1, "not running", "null"));
} else {
Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime();
Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime();
int subProcessId = 0;
/**
* if process is sub process, the return sub id, or sub id=0
*/
if (taskInstance.getTaskType().equals(TaskType.SUB_PROCESS.name())) {
String taskJson = taskInstance.getTaskJson();
taskNode = JSONUtils.parseObject(taskJson, TaskNode.class);
subProcessId = Integer.parseInt(JSONUtils.parseObject(
taskNode.getParams()).path(CMD_PARAM_SUB_PROCESS_DEFINE_ID).asText());
}
treeViewDto.getInstances().add(new Instance(taskInstance.getId(), taskInstance.getName(), taskInstance.getTaskType(), taskInstance.getState().toString()
, taskInstance.getStartTime(), taskInstance.getEndTime(), taskInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - startTime.getTime()), subProcessId));
}
}
for (TreeViewDto pTreeViewDto : parentTreeViewDtoList) {
pTreeViewDto.getChildren().add(treeViewDto);
}
postNodeList = dag.getSubsequentNodes(nodeName);
if (CollectionUtils.isNotEmpty(postNodeList)) {
for (String nextNodeName : postNodeList) {
List<TreeViewDto> treeViewDtoList = waitingRunningNodeMap.get(nextNodeName);
if (CollectionUtils.isEmpty(treeViewDtoList)) {
treeViewDtoList = new ArrayList<>();
}
treeViewDtoList.add(treeViewDto);
waitingRunningNodeMap.put(nextNodeName, treeViewDtoList);
}
}
runningNodeMap.remove(nodeName);
}
if (waitingRunningNodeMap == null || waitingRunningNodeMap.size() == 0) {
break;
} else {
runningNodeMap.putAll(waitingRunningNodeMap);
waitingRunningNodeMap.clear();
}
}
result.put(Constants.DATA_LIST, parentTreeViewDto);
result.put(Constants.STATUS, Status.SUCCESS);
result.put(Constants.MSG, Status.SUCCESS.getMsg());
return result;
}
/**
* Generate the DAG Graph based on the process definition id
*
* @param processDefinition process definition
* @return dag graph
*/
private DAG<String, TaskNode, TaskNodeRelation> genDagGraph(ProcessDefinition processDefinition) {
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
//check process data
if (null != processData) {
List<TaskNode> taskNodeList = processData.getTasks();
processDefinition.setGlobalParamList(processData.getGlobalParams());
ProcessDag processDag = DagHelper.getProcessDag(taskNodeList);
// Generate concrete Dag to be executed
return DagHelper.buildDagGraph(processDag);
}
return new DAG<>();
}
/**
* whether the graph has a ring
*
* @param taskNodeResponseList task node response list
* @return if graph has cycle flag
*/
private boolean graphHasCycle(List<TaskNode> taskNodeResponseList) {
DAG<String, TaskNode, String> graph = new DAG<>();
// Fill the vertices
for (TaskNode taskNodeResponse : taskNodeResponseList) {
graph.addNode(taskNodeResponse.getName(), taskNodeResponse);
}
// Fill edge relations
for (TaskNode taskNodeResponse : taskNodeResponseList) {
taskNodeResponse.getPreTasks();
List<String> preTasks = JSONUtils.toList(taskNodeResponse.getPreTasks(), String.class);
if (CollectionUtils.isNotEmpty(preTasks)) {
for (String preTask : preTasks) {
if (!graph.addEdge(preTask, taskNodeResponse.getName())) {
return true;
}
}
}
}
return graph.hasCycle();
}
private String recursionProcessDefinitionName(Integer projectId, String processDefinitionName, int num) {
ProcessDefinition processDefinition = processDefineMapper.queryByDefineName(projectId, processDefinitionName);
if (processDefinition != null) {
if (num > 1) {
String str = processDefinitionName.substring(0, processDefinitionName.length() - 3);
processDefinitionName = str + "(" + num + ")";
} else {
processDefinitionName = processDefinition.getName() + "(" + num + ")";
}
} else {
return processDefinitionName;
}
return recursionProcessDefinitionName(projectId, processDefinitionName, num + 1);
}
private Map<String, Object> copyProcessDefinition(User loginUser,
Integer processId,
Project targetProject) throws JsonProcessingException {
Map<String, Object> result = new HashMap<>(5);
ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId);
return result;
} else {
return createProcessDefinition(
loginUser,
targetProject.getName(),
processDefinition.getName() + "_copy_" + DateUtils.getCurrentTimeStamp(),
processDefinition.getProcessDefinitionJson(),
processDefinition.getDescription(),
processDefinition.getLocations(),
processDefinition.getConnects());
}
}
/**
* batch copy process definition
*
* @param loginUser loginUser
* @param projectName projectName
* @param processDefinitionIds processDefinitionIds
* @param targetProjectId targetProjectId
*/
@Override
public Map<String, Object> batchCopyProcessDefinition(User loginUser,
String projectName,
String processDefinitionIds,
int targetProjectId) {
Map<String, Object> result = new HashMap<>();
List<String> failedProcessList = new ArrayList<>();
if (StringUtils.isEmpty(processDefinitionIds)) {
putMsg(result, Status.PROCESS_DEFINITION_IDS_IS_EMPTY, processDefinitionIds);
return result;
}
//check src project auth
Map<String, Object> checkResult = checkProjectAndAuth(loginUser, projectName);
if (checkResult != null) {
return checkResult;
}
Project targetProject = projectMapper.queryDetailById(targetProjectId);
if (targetProject == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, targetProjectId);
return result;
}
if (!(targetProject.getName()).equals(projectName)) {
Map<String, Object> checkTargetProjectResult = checkProjectAndAuth(loginUser, targetProject.getName());
if (checkTargetProjectResult != null) {
return checkTargetProjectResult;
}
}
String[] processDefinitionIdList = processDefinitionIds.split(Constants.COMMA);
doBatchCopyProcessDefinition(loginUser, targetProject, failedProcessList, processDefinitionIdList);
checkBatchOperateResult(projectName, targetProject.getName(), result, failedProcessList, true);
return result;
}
/**
* batch move process definition
*
* @param loginUser loginUser
* @param projectName projectName
* @param processDefinitionIds processDefinitionIds
* @param targetProjectId targetProjectId
*/
@Override
public Map<String, Object> batchMoveProcessDefinition(User loginUser,
String projectName,
String processDefinitionIds,
int targetProjectId) {
Map<String, Object> result = new HashMap<>();
List<String> failedProcessList = new ArrayList<>();
//check src project auth
Map<String, Object> checkResult = checkProjectAndAuth(loginUser, projectName);
if (checkResult != null) {
return checkResult;
}
if (StringUtils.isEmpty(processDefinitionIds)) {
putMsg(result, Status.PROCESS_DEFINITION_IDS_IS_EMPTY, processDefinitionIds);
return result;
}
Project targetProject = projectMapper.queryDetailById(targetProjectId);
if (targetProject == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, targetProjectId);
return result;
}
if (!(targetProject.getName()).equals(projectName)) {
Map<String, Object> checkTargetProjectResult = checkProjectAndAuth(loginUser, targetProject.getName());
if (checkTargetProjectResult != null) {
return checkTargetProjectResult;
}
}
String[] processDefinitionIdList = processDefinitionIds.split(Constants.COMMA);
doBatchMoveProcessDefinition(targetProject, failedProcessList, processDefinitionIdList);
checkBatchOperateResult(projectName, targetProject.getName(), result, failedProcessList, false);
return result;
}
/**
* switch the defined process definition verison
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @param version the version user want to switch
* @return switch process definition version result code
*/
@Override
public Map<String, Object> switchProcessDefinitionVersion(User loginUser, String projectName
, int processDefinitionId, long version) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processDefinitionId);
if (Objects.isNull(processDefinition)) {
putMsg(result
, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR
, processDefinitionId);
return result;
}
ProcessDefinitionVersion processDefinitionVersion = processDefinitionVersionService
.queryByProcessDefinitionIdAndVersion(processDefinitionId, version);
if (Objects.isNull(processDefinitionVersion)) {
putMsg(result
, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR
, processDefinitionId
, version);
return result;
}
processDefinition.setVersion(processDefinitionVersion.getVersion());
processDefinition.setProcessDefinitionJson(processDefinitionVersion.getProcessDefinitionJson());
processDefinition.setDescription(processDefinitionVersion.getDescription());
processDefinition.setLocations(processDefinitionVersion.getLocations());
processDefinition.setConnects(processDefinitionVersion.getConnects());
processDefinition.setTimeout(processDefinitionVersion.getTimeout());
processDefinition.setGlobalParams(processDefinitionVersion.getGlobalParams());
processDefinition.setUpdateTime(new Date());
processDefinition.setWarningGroupId(processDefinitionVersion.getWarningGroupId());
processDefinition.setResourceIds(processDefinitionVersion.getResourceIds());
if (processDefineMapper.updateById(processDefinition) > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR);
}
return result;
}
/**
* do batch move process definition
*
* @param targetProject targetProject
* @param failedProcessList failedProcessList
* @param processDefinitionIdList processDefinitionIdList
*/
private void doBatchMoveProcessDefinition(Project targetProject, List<String> failedProcessList, String[] processDefinitionIdList) {
for (String processDefinitionId : processDefinitionIdList) {
try {
Map<String, Object> moveProcessDefinitionResult =
moveProcessDefinition(Integer.valueOf(processDefinitionId), targetProject);
if (!Status.SUCCESS.equals(moveProcessDefinitionResult.get(Constants.STATUS))) {
setFailedProcessList(failedProcessList, processDefinitionId);
logger.error((String) moveProcessDefinitionResult.get(Constants.MSG));
}
} catch (Exception e) {
setFailedProcessList(failedProcessList, processDefinitionId);
}
}
}
/**
* batch copy process definition
*
* @param loginUser loginUser
* @param targetProject targetProject
* @param failedProcessList failedProcessList
* @param processDefinitionIdList processDefinitionIdList
*/
private void doBatchCopyProcessDefinition(User loginUser, Project targetProject, List<String> failedProcessList, String[] processDefinitionIdList) {
for (String processDefinitionId : processDefinitionIdList) {
try {
Map<String, Object> copyProcessDefinitionResult =
copyProcessDefinition(loginUser, Integer.valueOf(processDefinitionId), targetProject);
if (!Status.SUCCESS.equals(copyProcessDefinitionResult.get(Constants.STATUS))) {
setFailedProcessList(failedProcessList, processDefinitionId);
logger.error((String) copyProcessDefinitionResult.get(Constants.MSG));
}
} catch (Exception e) {
setFailedProcessList(failedProcessList, processDefinitionId);
}
}
}
/**
* set failed processList
*
* @param failedProcessList failedProcessList
* @param processDefinitionId processDefinitionId
*/
private void setFailedProcessList(List<String> failedProcessList, String processDefinitionId) {
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(Integer.valueOf(processDefinitionId));
if (processDefinition != null) {
failedProcessList.add(processDefinitionId + "[" + processDefinition.getName() + "]");
} else {
failedProcessList.add(processDefinitionId + "[null]");
}
}
/**
* check project and auth
*
* @param loginUser loginUser
* @param projectName projectName
*/
private Map<String, Object> checkProjectAndAuth(User loginUser, String projectName) {
Project project = projectMapper.queryByName(projectName);
//check user access for project
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
return null;
}
/**
* move process definition
*
* @param processId processId
* @param targetProject targetProject
* @return move result code
*/
private Map<String, Object> moveProcessDefinition(Integer processId,
Project targetProject) {
Map<String, Object> result = new HashMap<>();
ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId);
return result;
}
processDefinition.setProjectId(targetProject.getId());
processDefinition.setUpdateTime(new Date());
if (processDefineMapper.updateById(processDefinition) > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
return result;
}
/**
* check batch operate result
*
* @param srcProjectName srcProjectName
* @param targetProjectName targetProjectName
* @param result result
* @param failedProcessList failedProcessList
* @param isCopy isCopy
*/
private void checkBatchOperateResult(String srcProjectName, String targetProjectName,
Map<String, Object> result, List<String> failedProcessList, boolean isCopy) {
if (!failedProcessList.isEmpty()) {
if (isCopy) {
putMsg(result, Status.COPY_PROCESS_DEFINITION_ERROR, srcProjectName, targetProjectName, String.join(",", failedProcessList));
} else {
putMsg(result, Status.MOVE_PROCESS_DEFINITION_ERROR, srcProjectName, targetProjectName, String.join(",", failedProcessList));
}
} else {
putMsg(result, Status.SUCCESS);
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,493 | [Improvement][api] The Process Definition interface parameter releaseState is changed to an enumeration type | **Describe the question**
The Process Definition interface parameter releaseState is changed to an enumeration type
**What are the current deficiencies and the benefits of improvement**
- The current Process Definition interface parameter releaseState variable uses an int type, and the releaseState mapping relationship needs to be recorded in both the front-end and back-end code
**Which version of DolphinScheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4493 | https://github.com/apache/dolphinscheduler/pull/4494 | 046940b1a008c63d6cad44b3f7aff954f6f9af83 | cfa88a4a804b3bc565f6571d5a6e842df18644a2 | "2021-01-19T10:21:18Z" | java | "2021-01-22T14:39:58Z" | dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/controller/ProcessDefinitionControllerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
import org.apache.dolphinscheduler.api.service.impl.ProcessDefinitionServiceImpl;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.User;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletResponse;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.mock.web.MockHttpServletResponse;
/**
* process definition controller test
*/
@RunWith(MockitoJUnitRunner.Silent.class)
public class ProcessDefinitionControllerTest {
private static Logger logger = LoggerFactory.getLogger(ProcessDefinitionControllerTest.class);
@InjectMocks
private ProcessDefinitionController processDefinitionController;
@Mock
private ProcessDefinitionServiceImpl processDefinitionService;
@Mock
private ProcessDefinitionVersionService processDefinitionVersionService;
protected User user;
@Before
public void before() {
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.GENERAL_USER);
loginUser.setUserName("admin");
user = loginUser;
}
@Test
public void testCreateProcessDefinition() throws Exception {
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\""
+ ":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\"
+ "necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\""
+ ",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},"
+ "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
String projectName = "test";
String name = "dag_test";
String description = "desc test";
String connects = "[]";
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, 1);
Mockito.when(processDefinitionService.createProcessDefinition(user, projectName, name, json,
description, locations, connects)).thenReturn(result);
Result response = processDefinitionController.createProcessDefinition(user, projectName, name, json,
locations, connects, description);
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
private void putMsg(Map<String, Object> result, Status status, Object... statusParams) {
result.put(Constants.STATUS, status);
if (statusParams != null && statusParams.length > 0) {
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams));
} else {
result.put(Constants.MSG, status.getMsg());
}
}
@Test
public void testVerifyProcessDefinitionName() throws Exception {
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR);
String projectName = "test";
String name = "dag_test";
Mockito.when(processDefinitionService.verifyProcessDefinitionName(user, projectName, name)).thenReturn(result);
Result response = processDefinitionController.verifyProcessDefinitionName(user, projectName, name);
Assert.assertEquals(Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR.getCode(), response.getCode().intValue());
}
@Test
public void updateProcessDefinition() throws Exception {
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\""
+ ",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"}"
+ ",\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\""
+ ":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\""
+ ":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
String projectName = "test";
String name = "dag_test";
String description = "desc test";
String connects = "[]";
int id = 1;
int releaseState = 0;
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put("processDefinitionId", 1);
Mockito.when(processDefinitionService.updateProcessDefinition(user, projectName, id, name, json,
description, locations, connects)).thenReturn(result);
Result response = processDefinitionController.updateProcessDefinition(user, projectName, name, id, json,
locations, connects, description,releaseState);
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testReleaseProcessDefinition() throws Exception {
String projectName = "test";
int id = 1;
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.releaseProcessDefinition(user, projectName, id, ReleaseState.OFFLINE.ordinal())).thenReturn(result);
Result response = processDefinitionController.releaseProcessDefinition(user, projectName, id, ReleaseState.OFFLINE.ordinal());
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testQueryProcessDefinitionById() throws Exception {
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1"
+ "\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}"
+ "\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\""
+ ":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":"
+ "\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
String projectName = "test";
String name = "dag_test";
String description = "desc test";
String connects = "[]";
int id = 1;
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setProjectName(projectName);
processDefinition.setConnects(connects);
processDefinition.setDescription(description);
processDefinition.setId(id);
processDefinition.setLocations(locations);
processDefinition.setName(name);
processDefinition.setProcessDefinitionJson(json);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefinition);
Mockito.when(processDefinitionService.queryProcessDefinitionById(user, projectName, id)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionById(user, projectName, id);
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testBatchCopyProcessDefinition() throws Exception {
String projectName = "test";
int targetProjectId = 2;
String id = "1";
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.batchCopyProcessDefinition(user, projectName, id, targetProjectId)).thenReturn(result);
Result response = processDefinitionController.copyProcessDefinition(user, projectName, id, targetProjectId);
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testBatchMoveProcessDefinition() throws Exception {
String projectName = "test";
int targetProjectId = 2;
String id = "1";
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.batchMoveProcessDefinition(user, projectName, id, targetProjectId)).thenReturn(result);
Result response = processDefinitionController.moveProcessDefinition(user, projectName, id, targetProjectId);
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testQueryProcessDefinitionList() throws Exception {
String projectName = "test";
List<ProcessDefinition> resourceList = getDefinitionList();
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, resourceList);
Mockito.when(processDefinitionService.queryProcessDefinitionList(user, projectName)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionList(user, projectName);
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
public List<ProcessDefinition> getDefinitionList() {
List<ProcessDefinition> resourceList = new ArrayList<>();
String json = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1"
+ "\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}"
+ "\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval"
+ "\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\""
+ ":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}";
String locations = "{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}";
String projectName = "test";
String name = "dag_test";
String description = "desc test";
String connects = "[]";
int id = 1;
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setProjectName(projectName);
processDefinition.setConnects(connects);
processDefinition.setDescription(description);
processDefinition.setId(id);
processDefinition.setLocations(locations);
processDefinition.setName(name);
processDefinition.setProcessDefinitionJson(json);
String name2 = "dag_test";
int id2 = 2;
ProcessDefinition processDefinition2 = new ProcessDefinition();
processDefinition2.setProjectName(projectName);
processDefinition2.setConnects(connects);
processDefinition2.setDescription(description);
processDefinition2.setId(id2);
processDefinition2.setLocations(locations);
processDefinition2.setName(name2);
processDefinition2.setProcessDefinitionJson(json);
resourceList.add(processDefinition);
resourceList.add(processDefinition2);
return resourceList;
}
@Test
public void testDeleteProcessDefinitionById() throws Exception {
String projectName = "test";
int id = 1;
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.deleteProcessDefinitionById(user, projectName, id)).thenReturn(result);
Result response = processDefinitionController.deleteProcessDefinitionById(user, projectName, id);
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testGetNodeListByDefinitionId() throws Exception {
String projectName = "test";
int id = 1;
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.getTaskNodeListByDefinitionId(id)).thenReturn(result);
Result response = processDefinitionController.getNodeListByDefinitionId(user, projectName, id);
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testGetNodeListByDefinitionIdList() throws Exception {
String projectName = "test";
String idList = "1,2,3";
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.getTaskNodeListByDefinitionIdList(idList)).thenReturn(result);
Result response = processDefinitionController.getNodeListByDefinitionIdList(user, projectName, idList);
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testQueryProcessDefinitionAllByProjectId() throws Exception {
int projectId = 1;
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.queryProcessDefinitionAllByProjectId(projectId)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionAllByProjectId(user, projectId);
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testViewTree() throws Exception {
String projectName = "test";
int processId = 1;
int limit = 2;
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.viewTree(processId, limit)).thenReturn(result);
Result response = processDefinitionController.viewTree(user, projectName, processId, limit);
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testQueryProcessDefinitionListPaging() throws Exception {
String projectName = "test";
int pageNo = 1;
int pageSize = 10;
String searchVal = "";
int userId = 1;
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, new PageInfo<Resource>(1, 10));
Mockito.when(processDefinitionService.queryProcessDefinitionListPaging(user, projectName, searchVal, pageNo, pageSize, userId)).thenReturn(result);
Result response = processDefinitionController.queryProcessDefinitionListPaging(user, projectName, pageNo, searchVal, userId, pageSize);
Assert.assertEquals(Status.SUCCESS.getCode(), response.getCode().intValue());
}
@Test
public void testBatchExportProcessDefinitionByIds() throws Exception {
String processDefinitionIds = "1,2";
String projectName = "test";
HttpServletResponse response = new MockHttpServletResponse();
Mockito.doNothing().when(this.processDefinitionService).batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response);
processDefinitionController.batchExportProcessDefinitionByIds(user, projectName, processDefinitionIds, response);
}
@Test
public void testQueryProcessDefinitionVersions() {
String projectName = "test";
Map<String, Object> resultMap = new HashMap<>();
putMsg(resultMap, Status.SUCCESS);
resultMap.put(Constants.DATA_LIST, new PageInfo<ProcessDefinitionVersion>(1, 10));
Mockito.when(processDefinitionVersionService.queryProcessDefinitionVersions(
user
, projectName
, 1
, 10
, 1))
.thenReturn(resultMap);
Result result = processDefinitionController.queryProcessDefinitionVersions(
user
, projectName
, 1
, 10
, 1);
Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode());
}
@Test
public void testSwitchProcessDefinitionVersion() {
String projectName = "test";
Map<String, Object> resultMap = new HashMap<>();
putMsg(resultMap, Status.SUCCESS);
Mockito.when(processDefinitionService.switchProcessDefinitionVersion(
user
, projectName
, 1
, 10))
.thenReturn(resultMap);
Result result = processDefinitionController.switchProcessDefinitionVersion(
user
, projectName
, 1
, 10);
Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode());
}
@Test
public void testDeleteProcessDefinitionVersion() {
String projectName = "test";
Map<String, Object> resultMap = new HashMap<>();
putMsg(resultMap, Status.SUCCESS);
Mockito.when(processDefinitionVersionService.deleteByProcessDefinitionIdAndVersion(
user
, projectName
, 1
, 10))
.thenReturn(resultMap);
Result result = processDefinitionController.deleteProcessDefinitionVersion(
user
, projectName
, 1
, 10);
Assert.assertEquals(Status.SUCCESS.getCode(), (int) result.getCode());
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,493 | [Improvement][api] The Process Definition interface parameter releaseState is changed to an enumeration type | **Describe the question**
The Process Definition interface parameter releaseState is changed to an enumeration type
**What are the current deficiencies and the benefits of improvement**
- The current Process Definition interface parameter releaseState variable uses an int type, and the releaseState mapping relationship needs to be recorded in both the front-end and back-end code
**Which version of DolphinScheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4493 | https://github.com/apache/dolphinscheduler/pull/4494 | 046940b1a008c63d6cad44b3f7aff954f6f9af83 | cfa88a4a804b3bc565f6571d5a6e842df18644a2 | "2021-01-19T10:21:18Z" | java | "2021-01-22T14:39:58Z" | dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import static org.assertj.core.api.Assertions.assertThat;
import static org.powermock.api.mockito.PowerMockito.mock;
import static org.powermock.api.mockito.PowerMockito.when;
import org.apache.dolphinscheduler.api.dto.ProcessMeta;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProcessDefinitionServiceImpl;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.shell.ShellParameters;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.FileUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.http.entity.ContentType;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.lang.reflect.Method;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.mock.web.MockMultipartFile;
import org.springframework.util.ReflectionUtils;
import org.springframework.web.multipart.MultipartFile;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
@RunWith(MockitoJUnitRunner.class)
public class ProcessDefinitionServiceTest {
private static final String SHELL_JSON = "{\n"
+ " \"globalParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"tasks\": [\n"
+ " {\n"
+ " \"type\": \"SHELL\",\n"
+ " \"id\": \"tasks-9527\",\n"
+ " \"name\": \"shell-1\",\n"
+ " \"params\": {\n"
+ " \"resourceList\": [\n"
+ " \n"
+ " ],\n"
+ " \"localParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"rawScript\": \"#!/bin/bash\\necho \\\"shell-1\\\"\"\n"
+ " },\n"
+ " \"description\": \"\",\n"
+ " \"runFlag\": \"NORMAL\",\n"
+ " \"dependence\": {\n"
+ " \n"
+ " },\n"
+ " \"maxRetryTimes\": \"0\",\n"
+ " \"retryInterval\": \"1\",\n"
+ " \"timeout\": {\n"
+ " \"strategy\": \"\",\n"
+ " \"interval\": 1,\n"
+ " \"enable\": false\n"
+ " },\n"
+ " \"taskInstancePriority\": \"MEDIUM\",\n"
+ " \"workerGroupId\": -1,\n"
+ " \"preTasks\": [\n"
+ " \n"
+ " ]\n"
+ " }\n"
+ " ],\n"
+ " \"tenantId\": 1,\n"
+ " \"timeout\": 0\n"
+ "}";
private static final String CYCLE_SHELL_JSON = "{\n"
+ " \"globalParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"tasks\": [\n"
+ " {\n"
+ " \"type\": \"SHELL\",\n"
+ " \"id\": \"tasks-9527\",\n"
+ " \"name\": \"shell-1\",\n"
+ " \"params\": {\n"
+ " \"resourceList\": [\n"
+ " \n"
+ " ],\n"
+ " \"localParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"rawScript\": \"#!/bin/bash\\necho \\\"shell-1\\\"\"\n"
+ " },\n"
+ " \"description\": \"\",\n"
+ " \"runFlag\": \"NORMAL\",\n"
+ " \"dependence\": {\n"
+ " \n"
+ " },\n"
+ " \"maxRetryTimes\": \"0\",\n"
+ " \"retryInterval\": \"1\",\n"
+ " \"timeout\": {\n"
+ " \"strategy\": \"\",\n"
+ " \"interval\": 1,\n"
+ " \"enable\": false\n"
+ " },\n"
+ " \"taskInstancePriority\": \"MEDIUM\",\n"
+ " \"workerGroupId\": -1,\n"
+ " \"preTasks\": [\n"
+ " \"tasks-9529\"\n"
+ " ]\n"
+ " },\n"
+ " {\n"
+ " \"type\": \"SHELL\",\n"
+ " \"id\": \"tasks-9528\",\n"
+ " \"name\": \"shell-1\",\n"
+ " \"params\": {\n"
+ " \"resourceList\": [\n"
+ " \n"
+ " ],\n"
+ " \"localParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"rawScript\": \"#!/bin/bash\\necho \\\"shell-1\\\"\"\n"
+ " },\n"
+ " \"description\": \"\",\n"
+ " \"runFlag\": \"NORMAL\",\n"
+ " \"dependence\": {\n"
+ " \n"
+ " },\n"
+ " \"maxRetryTimes\": \"0\",\n"
+ " \"retryInterval\": \"1\",\n"
+ " \"timeout\": {\n"
+ " \"strategy\": \"\",\n"
+ " \"interval\": 1,\n"
+ " \"enable\": false\n"
+ " },\n"
+ " \"taskInstancePriority\": \"MEDIUM\",\n"
+ " \"workerGroupId\": -1,\n"
+ " \"preTasks\": [\n"
+ " \"tasks-9527\"\n"
+ " ]\n"
+ " },\n"
+ " {\n"
+ " \"type\": \"SHELL\",\n"
+ " \"id\": \"tasks-9529\",\n"
+ " \"name\": \"shell-1\",\n"
+ " \"params\": {\n"
+ " \"resourceList\": [\n"
+ " \n"
+ " ],\n"
+ " \"localParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"rawScript\": \"#!/bin/bash\\necho \\\"shell-1\\\"\"\n"
+ " },\n"
+ " \"description\": \"\",\n"
+ " \"runFlag\": \"NORMAL\",\n"
+ " \"dependence\": {\n"
+ " \n"
+ " },\n"
+ " \"maxRetryTimes\": \"0\",\n"
+ " \"retryInterval\": \"1\",\n"
+ " \"timeout\": {\n"
+ " \"strategy\": \"\",\n"
+ " \"interval\": 1,\n"
+ " \"enable\": false\n"
+ " },\n"
+ " \"taskInstancePriority\": \"MEDIUM\",\n"
+ " \"workerGroupId\": -1,\n"
+ " \"preTasks\": [\n"
+ " \"tasks-9528\"\n"
+ " ]\n"
+ " }\n"
+ " ],\n"
+ " \"tenantId\": 1,\n"
+ " \"timeout\": 0\n"
+ "}";
@InjectMocks
private ProcessDefinitionServiceImpl processDefinitionService;
@Mock
private ProcessDefinitionMapper processDefineMapper;
@Mock
private ProjectMapper projectMapper;
@Mock
private ProjectServiceImpl projectService;
@Mock
private ScheduleMapper scheduleMapper;
@Mock
private ProcessService processService;
@Mock
private ProcessInstanceService processInstanceService;
@Mock
private TaskInstanceMapper taskInstanceMapper;
@Mock
private ProcessDefinitionVersionService processDefinitionVersionService;
@Test
public void testQueryProcessDefinitionList() {
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Project project = getProject(projectName);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project not found
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.queryProcessDefinitionList(loginUser, "project_test1");
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
//project check auth success
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
List<ProcessDefinition> resourceList = new ArrayList<>();
resourceList.add(getProcessDefinition());
Mockito.when(processDefineMapper.queryAllDefinitionList(project.getId())).thenReturn(resourceList);
Map<String, Object> checkSuccessRes = processDefinitionService.queryProcessDefinitionList(loginUser, "project_test1");
Assert.assertEquals(Status.SUCCESS, checkSuccessRes.get(Constants.STATUS));
}
@Test
@SuppressWarnings("unchecked")
public void testQueryProcessDefinitionListPaging() {
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Project project = getProject(projectName);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project not found
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.queryProcessDefinitionListPaging(loginUser, "project_test1", "", 1, 5, 0);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
putMsg(result, Status.SUCCESS, projectName);
loginUser.setId(1);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Page<ProcessDefinition> page = new Page<>(1, 10);
page.setTotal(30);
Mockito.when(processDefineMapper.queryDefineListPaging(
Mockito.any(IPage.class)
, Mockito.eq("")
, Mockito.eq(loginUser.getId())
, Mockito.eq(project.getId())
, Mockito.anyBoolean())).thenReturn(page);
Map<String, Object> map1 = processDefinitionService.queryProcessDefinitionListPaging(
loginUser, projectName, "", 1, 10, loginUser.getId());
Assert.assertEquals(Status.SUCCESS, map1.get(Constants.STATUS));
}
@Test
public void testQueryProcessDefinitionById() {
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Project project = getProject(projectName);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project check auth fail
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.queryProcessDefinitionById(loginUser,
"project_test1", 1);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
//project check auth success, instance not exist
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Mockito.when(processDefineMapper.selectById(1)).thenReturn(null);
Map<String, Object> instanceNotexitRes = processDefinitionService.queryProcessDefinitionById(loginUser,
"project_test1", 1);
Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, instanceNotexitRes.get(Constants.STATUS));
//instance exit
Mockito.when(processDefineMapper.selectById(46)).thenReturn(getProcessDefinition());
Map<String, Object> successRes = processDefinitionService.queryProcessDefinitionById(loginUser,
"project_test1", 46);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testQueryProcessDefinitionByName() {
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Project project = getProject(projectName);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
//project check auth fail
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.queryProcessDefinitionByName(loginUser,
"project_test1", "test_def");
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
//project check auth success, instance not exist
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Mockito.when(processDefineMapper.queryByDefineName(project.getId(),"test_def")).thenReturn(null);
Map<String, Object> instanceNotexitRes = processDefinitionService.queryProcessDefinitionByName(loginUser,
"project_test1", "test_def");
Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, instanceNotexitRes.get(Constants.STATUS));
//instance exit
Mockito.when(processDefineMapper.queryByDefineName(project.getId(),"test")).thenReturn(getProcessDefinition());
Map<String, Object> successRes = processDefinitionService.queryProcessDefinitionByName(loginUser,
"project_test1", "test");
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testBatchCopyProcessDefinition() {
String projectName = "project_test1";
Project project = getProject(projectName);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
// copy project definition ids empty test
Map<String, Object> map = processDefinitionService.batchCopyProcessDefinition(loginUser, projectName, StringUtils.EMPTY, 0);
Assert.assertEquals(Status.PROCESS_DEFINITION_IDS_IS_EMPTY, map.get(Constants.STATUS));
Map<String, Object> result = new HashMap<>();
// project check auth fail
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> map1 = processDefinitionService.batchCopyProcessDefinition(
loginUser, projectName, String.valueOf(project.getId()), 0);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map1.get(Constants.STATUS));
// project check auth success, target project is null
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Mockito.when(projectMapper.queryDetailById(0)).thenReturn(null);
Map<String, Object> map2 = processDefinitionService.batchCopyProcessDefinition(
loginUser, projectName, String.valueOf(project.getId()), 0);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map2.get(Constants.STATUS));
// project check auth success, target project name not equal project name, check auth target project fail
Project project1 = getProject(projectName);
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project1);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
putMsg(result, Status.SUCCESS, projectName);
String projectName2 = "project_test2";
Project project2 = getProject(projectName2);
Mockito.when(projectMapper.queryByName(projectName2)).thenReturn(project2);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project2, projectName2)).thenReturn(result);
Mockito.when(projectMapper.queryDetailById(1)).thenReturn(project2);
// instance exit
ProcessDefinition definition = getProcessDefinition();
definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}");
definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\","
+ "\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234"
+ "\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\","
+ "\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},"
+ "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}");
definition.setConnects("[]");
Mockito.when(processDefineMapper.selectById(46)).thenReturn(definition);
Map<String, Object> map3 = processDefinitionService.batchCopyProcessDefinition(
loginUser, projectName, "46", 1);
Assert.assertEquals(Status.SUCCESS, map3.get(Constants.STATUS));
}
@Test
public void testBatchMoveProcessDefinition() {
String projectName = "project_test1";
Project project1 = getProject(projectName);
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project1);
String projectName2 = "project_test2";
Project project2 = getProject(projectName2);
Mockito.when(projectMapper.queryByName(projectName2)).thenReturn(project2);
int targetProjectId = 2;
Mockito.when(projectMapper.queryDetailById(targetProjectId)).thenReturn(getProjectById(targetProjectId));
Project project = getProject(projectName);
Project targetProject = getProjectById(targetProjectId);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, projectName);
Map<String, Object> result2 = new HashMap<>();
putMsg(result2, Status.SUCCESS, targetProject.getName());
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project2, projectName2)).thenReturn(result);
ProcessDefinition definition = getProcessDefinition();
definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}");
definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\""
+ ",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234"
+ "\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\","
+ "\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},"
+ "\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}");
definition.setConnects("[]");
// check target project result == null
Mockito.when(processDefineMapper.updateById(definition)).thenReturn(46);
Mockito.when(processDefineMapper.selectById(46)).thenReturn(definition);
putMsg(result, Status.SUCCESS);
Map<String, Object> successRes = processDefinitionService.batchMoveProcessDefinition(
loginUser, "project_test1", "46", 2);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void deleteProcessDefinitionByIdTest() {
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Project project = getProject(projectName);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
//project check auth fail
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.deleteProcessDefinitionById(loginUser, "project_test1", 6);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
//project check auth success, instance not exist
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Mockito.when(processDefineMapper.selectById(1)).thenReturn(null);
Map<String, Object> instanceNotexitRes = processDefinitionService.deleteProcessDefinitionById(loginUser,
"project_test1", 1);
Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, instanceNotexitRes.get(Constants.STATUS));
ProcessDefinition processDefinition = getProcessDefinition();
//user no auth
loginUser.setUserType(UserType.GENERAL_USER);
Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition);
Map<String, Object> userNoAuthRes = processDefinitionService.deleteProcessDefinitionById(loginUser,
"project_test1", 46);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, userNoAuthRes.get(Constants.STATUS));
//process definition online
loginUser.setUserType(UserType.ADMIN_USER);
processDefinition.setReleaseState(ReleaseState.ONLINE);
Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition);
Map<String, Object> dfOnlineRes = processDefinitionService.deleteProcessDefinitionById(loginUser,
"project_test1", 46);
Assert.assertEquals(Status.PROCESS_DEFINE_STATE_ONLINE, dfOnlineRes.get(Constants.STATUS));
//scheduler list elements > 1
processDefinition.setReleaseState(ReleaseState.OFFLINE);
Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition);
List<Schedule> schedules = new ArrayList<>();
schedules.add(getSchedule());
schedules.add(getSchedule());
Mockito.when(scheduleMapper.queryByProcessDefinitionId(46)).thenReturn(schedules);
Map<String, Object> schedulerGreaterThanOneRes = processDefinitionService.deleteProcessDefinitionById(loginUser,
"project_test1", 46);
Assert.assertEquals(Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR, schedulerGreaterThanOneRes.get(Constants.STATUS));
//scheduler online
schedules.clear();
Schedule schedule = getSchedule();
schedule.setReleaseState(ReleaseState.ONLINE);
schedules.add(schedule);
Mockito.when(scheduleMapper.queryByProcessDefinitionId(46)).thenReturn(schedules);
Map<String, Object> schedulerOnlineRes = processDefinitionService.deleteProcessDefinitionById(loginUser,
"project_test1", 46);
Assert.assertEquals(Status.SCHEDULE_CRON_STATE_ONLINE, schedulerOnlineRes.get(Constants.STATUS));
//delete fail
schedules.clear();
schedule.setReleaseState(ReleaseState.OFFLINE);
schedules.add(schedule);
Mockito.when(scheduleMapper.queryByProcessDefinitionId(46)).thenReturn(schedules);
Mockito.when(processDefineMapper.deleteById(46)).thenReturn(0);
Map<String, Object> deleteFail = processDefinitionService.deleteProcessDefinitionById(loginUser,
"project_test1", 46);
Assert.assertEquals(Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR, deleteFail.get(Constants.STATUS));
//delete success
Mockito.when(processDefineMapper.deleteById(46)).thenReturn(1);
Map<String, Object> deleteSuccess = processDefinitionService.deleteProcessDefinitionById(loginUser,
"project_test1", 46);
Assert.assertEquals(Status.SUCCESS, deleteSuccess.get(Constants.STATUS));
}
@Test
public void testReleaseProcessDefinition() {
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Project project = getProject(projectName);
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.GENERAL_USER);
//project check auth fail
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1",
6, ReleaseState.OFFLINE.getCode());
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
// project check auth success, processs definition online
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(processDefineMapper.selectById(46)).thenReturn(getProcessDefinition());
Map<String, Object> onlineRes = processDefinitionService.releaseProcessDefinition(
loginUser, "project_test1", 46, ReleaseState.ONLINE.getCode());
Assert.assertEquals(Status.SUCCESS, onlineRes.get(Constants.STATUS));
// project check auth success, processs definition online
ProcessDefinition processDefinition1 = getProcessDefinition();
processDefinition1.setResourceIds("1,2");
Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition1);
Mockito.when(processService.getUserById(1)).thenReturn(loginUser);
Map<String, Object> onlineWithResourceRes = processDefinitionService.releaseProcessDefinition(
loginUser, "project_test1", 46, ReleaseState.ONLINE.getCode());
Assert.assertEquals(Status.SUCCESS, onlineWithResourceRes.get(Constants.STATUS));
// release error code
Map<String, Object> failRes = processDefinitionService.releaseProcessDefinition(
loginUser, "project_test1", 46, 2);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, failRes.get(Constants.STATUS));
//FIXME has function exit code 1 when exception
//process definition offline
// List<Schedule> schedules = new ArrayList<>();
// Schedule schedule = getSchedule();
// schedules.add(schedule);
// Mockito.when(scheduleMapper.selectAllByProcessDefineArray(new int[]{46})).thenReturn(schedules);
// Mockito.when(scheduleMapper.updateById(schedule)).thenReturn(1);
// Map<String, Object> offlineRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1",
// 46, ReleaseState.OFFLINE.getCode());
// Assert.assertEquals(Status.SUCCESS, offlineRes.get(Constants.STATUS));
}
@Test
public void testVerifyProcessDefinitionName() {
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Project project = getProject(projectName);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
//project check auth fail
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> map = processDefinitionService.verifyProcessDefinitionName(loginUser,
"project_test1", "test_pdf");
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
//project check auth success, process not exist
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(processDefineMapper.verifyByDefineName(project.getId(), "test_pdf")).thenReturn(null);
Map<String, Object> processNotExistRes = processDefinitionService.verifyProcessDefinitionName(loginUser,
"project_test1", "test_pdf");
Assert.assertEquals(Status.SUCCESS, processNotExistRes.get(Constants.STATUS));
//process exist
Mockito.when(processDefineMapper.verifyByDefineName(project.getId(), "test_pdf")).thenReturn(getProcessDefinition());
Map<String, Object> processExistRes = processDefinitionService.verifyProcessDefinitionName(loginUser,
"project_test1", "test_pdf");
Assert.assertEquals(Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR, processExistRes.get(Constants.STATUS));
}
@Test
public void testCheckProcessNodeList() {
Map<String, Object> dataNotValidRes = processDefinitionService.checkProcessNodeList(null, "");
Assert.assertEquals(Status.DATA_IS_NOT_VALID, dataNotValidRes.get(Constants.STATUS));
// task not empty
String processDefinitionJson = SHELL_JSON;
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
Assert.assertNotNull(processData);
Map<String, Object> taskEmptyRes = processDefinitionService.checkProcessNodeList(processData, processDefinitionJson);
Assert.assertEquals(Status.SUCCESS, taskEmptyRes.get(Constants.STATUS));
// task empty
processData.setTasks(null);
Map<String, Object> taskNotEmptyRes = processDefinitionService.checkProcessNodeList(processData, processDefinitionJson);
Assert.assertEquals(Status.DATA_IS_NULL, taskNotEmptyRes.get(Constants.STATUS));
// task cycle
String processDefinitionJsonCycle = CYCLE_SHELL_JSON;
ProcessData processDataCycle = JSONUtils.parseObject(processDefinitionJsonCycle, ProcessData.class);
Map<String, Object> taskCycleRes = processDefinitionService.checkProcessNodeList(processDataCycle, processDefinitionJsonCycle);
Assert.assertEquals(Status.PROCESS_NODE_HAS_CYCLE, taskCycleRes.get(Constants.STATUS));
//json abnormal
String abnormalJson = processDefinitionJson.replaceAll("SHELL", "");
processData = JSONUtils.parseObject(abnormalJson, ProcessData.class);
Map<String, Object> abnormalTaskRes = processDefinitionService.checkProcessNodeList(processData, abnormalJson);
Assert.assertEquals(Status.PROCESS_NODE_S_PARAMETER_INVALID, abnormalTaskRes.get(Constants.STATUS));
}
@Test
public void testGetTaskNodeListByDefinitionId() {
//process definition not exist
Mockito.when(processDefineMapper.selectById(46)).thenReturn(null);
Map<String, Object> processDefinitionNullRes = processDefinitionService.getTaskNodeListByDefinitionId(46);
Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionNullRes.get(Constants.STATUS));
//process data null
ProcessDefinition processDefinition = getProcessDefinition();
Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition);
Map<String, Object> successRes = processDefinitionService.getTaskNodeListByDefinitionId(46);
Assert.assertEquals(Status.DATA_IS_NOT_VALID, successRes.get(Constants.STATUS));
//success
processDefinition.setProcessDefinitionJson(SHELL_JSON);
Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition);
Map<String, Object> dataNotValidRes = processDefinitionService.getTaskNodeListByDefinitionId(46);
Assert.assertEquals(Status.SUCCESS, dataNotValidRes.get(Constants.STATUS));
}
@Test
public void testGetTaskNodeListByDefinitionIdList() {
//process definition not exist
String defineIdList = "46";
Integer[] idArray = {46};
Mockito.when(processDefineMapper.queryDefinitionListByIdList(idArray)).thenReturn(null);
Map<String, Object> processNotExistRes = processDefinitionService.getTaskNodeListByDefinitionIdList(defineIdList);
Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, processNotExistRes.get(Constants.STATUS));
//process definition exist
ProcessDefinition processDefinition = getProcessDefinition();
processDefinition.setProcessDefinitionJson(SHELL_JSON);
List<ProcessDefinition> processDefinitionList = new ArrayList<>();
processDefinitionList.add(processDefinition);
Mockito.when(processDefineMapper.queryDefinitionListByIdList(idArray)).thenReturn(processDefinitionList);
Map<String, Object> successRes = processDefinitionService.getTaskNodeListByDefinitionIdList(defineIdList);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testQueryProcessDefinitionAllByProjectId() {
int projectId = 1;
ProcessDefinition processDefinition = getProcessDefinition();
processDefinition.setProcessDefinitionJson(SHELL_JSON);
List<ProcessDefinition> processDefinitionList = new ArrayList<>();
processDefinitionList.add(processDefinition);
Mockito.when(processDefineMapper.queryAllDefinitionList(projectId)).thenReturn(processDefinitionList);
Map<String, Object> successRes = processDefinitionService.queryProcessDefinitionAllByProjectId(projectId);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testViewTree() throws Exception {
//process definition not exist
ProcessDefinition processDefinition = getProcessDefinition();
processDefinition.setProcessDefinitionJson(SHELL_JSON);
Mockito.when(processDefineMapper.selectById(46)).thenReturn(null);
Map<String, Object> processDefinitionNullRes = processDefinitionService.viewTree(46, 10);
Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionNullRes.get(Constants.STATUS));
List<ProcessInstance> processInstanceList = new ArrayList<>();
ProcessInstance processInstance = new ProcessInstance();
processInstance.setId(1);
processInstance.setName("test_instance");
processInstance.setState(ExecutionStatus.RUNNING_EXECUTION);
processInstance.setHost("192.168.xx.xx");
processInstance.setStartTime(new Date());
processInstance.setEndTime(new Date());
processInstanceList.add(processInstance);
TaskInstance taskInstance = new TaskInstance();
taskInstance.setStartTime(new Date());
taskInstance.setEndTime(new Date());
taskInstance.setTaskType("SHELL");
taskInstance.setId(1);
taskInstance.setName("test_task_instance");
taskInstance.setState(ExecutionStatus.RUNNING_EXECUTION);
taskInstance.setHost("192.168.xx.xx");
//task instance not exist
Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition);
Mockito.when(processInstanceService.queryByProcessDefineId(46, 10)).thenReturn(processInstanceList);
Mockito.when(taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), "shell-1")).thenReturn(null);
Map<String, Object> taskNullRes = processDefinitionService.viewTree(46, 10);
Assert.assertEquals(Status.SUCCESS, taskNullRes.get(Constants.STATUS));
//task instance exist
Mockito.when(taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), "shell-1")).thenReturn(taskInstance);
Map<String, Object> taskNotNuLLRes = processDefinitionService.viewTree(46, 10);
Assert.assertEquals(Status.SUCCESS, taskNotNuLLRes.get(Constants.STATUS));
}
@Test
public void testSubProcessViewTree() throws Exception {
ProcessDefinition processDefinition = getProcessDefinition();
processDefinition.setProcessDefinitionJson(SHELL_JSON);
List<ProcessInstance> processInstanceList = new ArrayList<>();
ProcessInstance processInstance = new ProcessInstance();
processInstance.setId(1);
processInstance.setName("test_instance");
processInstance.setState(ExecutionStatus.RUNNING_EXECUTION);
processInstance.setHost("192.168.xx.xx");
processInstance.setStartTime(new Date());
processInstance.setEndTime(new Date());
processInstanceList.add(processInstance);
TaskInstance taskInstance = new TaskInstance();
taskInstance.setStartTime(new Date());
taskInstance.setEndTime(new Date());
taskInstance.setTaskType("SUB_PROCESS");
taskInstance.setId(1);
taskInstance.setName("test_task_instance");
taskInstance.setState(ExecutionStatus.RUNNING_EXECUTION);
taskInstance.setHost("192.168.xx.xx");
taskInstance.setTaskJson("{\n"
+ " \"conditionResult\": {\n"
+ " \"failedNode\": [\n"
+ " \"\"\n"
+ " ],\n"
+ " \"successNode\": [\n"
+ " \"\"\n"
+ " ]\n"
+ " },\n"
+ " \"delayTime\": \"0\",\n"
+ " \"dependence\": {},\n"
+ " \"description\": \"\",\n"
+ " \"id\": \"1\",\n"
+ " \"maxRetryTimes\": \"0\",\n"
+ " \"name\": \"test_task_instance\",\n"
+ " \"params\": {\n"
+ " \"processDefinitionId\": \"222\",\n"
+ " \"resourceList\": []\n"
+ " },\n"
+ " \"preTasks\": [],\n"
+ " \"retryInterval\": \"1\",\n"
+ " \"runFlag\": \"NORMAL\",\n"
+ " \"taskInstancePriority\": \"MEDIUM\",\n"
+ " \"timeout\": {\n"
+ " \"enable\": false,\n"
+ " \"interval\": null,\n"
+ " \"strategy\": \"\"\n"
+ " },\n"
+ " \"type\": \"SUB_PROCESS\",\n"
+ " \"workerGroup\": \"default\"\n"
+ "}");
//task instance exist
Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition);
Mockito.when(processInstanceService.queryByProcessDefineId(46, 10)).thenReturn(processInstanceList);
Mockito.when(taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), "shell-1")).thenReturn(taskInstance);
Map<String, Object> taskNotNuLLRes = processDefinitionService.viewTree(46, 10);
Assert.assertEquals(Status.SUCCESS, taskNotNuLLRes.get(Constants.STATUS));
}
@Test
public void testImportProcessDefinitionById() throws IOException {
String processJson = "[\n"
+ " {\n"
+ " \"projectName\": \"testProject\",\n"
+ " \"processDefinitionName\": \"shell-4\",\n"
+ " \"processDefinitionJson\": \"{\\\"tenantId\\\":1"
+ ",\\\"globalParams\\\":[],\\\"tasks\\\":[{\\\"workerGroupId\\\":\\\"3\\\",\\\"description\\\""
+ ":\\\"\\\",\\\"runFlag\\\":\\\"NORMAL\\\",\\\"type\\\":\\\"SHELL\\\",\\\"params\\\":{\\\"rawScript\\\""
+ ":\\\"#!/bin/bash\\\\necho \\\\\\\"shell-4\\\\\\\"\\\",\\\"localParams\\\":[],\\\"resourceList\\\":[]}"
+ ",\\\"timeout\\\":{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"},\\\"maxRetryTimes\\\":\\\"0\\\""
+ ",\\\"taskInstancePriority\\\":\\\"MEDIUM\\\",\\\"name\\\":\\\"shell-4\\\",\\\"dependence\\\":{}"
+ ",\\\"retryInterval\\\":\\\"1\\\",\\\"preTasks\\\":[],\\\"id\\\":\\\"tasks-84090\\\"}"
+ ",{\\\"taskInstancePriority\\\":\\\"MEDIUM\\\",\\\"name\\\":\\\"shell-5\\\",\\\"workerGroupId\\\""
+ ":\\\"3\\\",\\\"description\\\":\\\"\\\",\\\"dependence\\\":{},\\\"preTasks\\\":[\\\"shell-4\\\"]"
+ ",\\\"id\\\":\\\"tasks-87364\\\",\\\"runFlag\\\":\\\"NORMAL\\\",\\\"type\\\":\\\"SUB_PROCESS\\\""
+ ",\\\"params\\\":{\\\"processDefinitionId\\\":46},\\\"timeout\\\":{\\\"enable\\\":false"
+ ",\\\"strategy\\\":\\\"\\\"}}],\\\"timeout\\\":0}\",\n"
+ " \"processDefinitionDescription\": \"\",\n"
+ " \"processDefinitionLocations\": \"{\\\"tasks-84090\\\":{\\\"name\\\":\\\"shell-4\\\""
+ ",\\\"targetarr\\\":\\\"\\\",\\\"x\\\":128,\\\"y\\\":114},\\\"tasks-87364\\\":{\\\"name\\\""
+ ":\\\"shell-5\\\",\\\"targetarr\\\":\\\"tasks-84090\\\",\\\"x\\\":266,\\\"y\\\":115}}\",\n"
+ " \"processDefinitionConnects\": \"[{\\\"endPointSourceId\\\":\\\"tasks-84090\\\""
+ ",\\\"endPointTargetId\\\":\\\"tasks-87364\\\"}]\"\n"
+ " }\n"
+ "]";
String subProcessJson = "{\n"
+ " \"globalParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"tasks\": [\n"
+ " {\n"
+ " \"type\": \"SHELL\",\n"
+ " \"id\": \"tasks-52423\",\n"
+ " \"name\": \"shell-5\",\n"
+ " \"params\": {\n"
+ " \"resourceList\": [\n"
+ " \n"
+ " ],\n"
+ " \"localParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"rawScript\": \"echo \\\"shell-5\\\"\"\n"
+ " },\n"
+ " \"description\": \"\",\n"
+ " \"runFlag\": \"NORMAL\",\n"
+ " \"dependence\": {\n"
+ " \n"
+ " },\n"
+ " \"maxRetryTimes\": \"0\",\n"
+ " \"retryInterval\": \"1\",\n"
+ " \"timeout\": {\n"
+ " \"strategy\": \"\",\n"
+ " \"interval\": null,\n"
+ " \"enable\": false\n"
+ " },\n"
+ " \"taskInstancePriority\": \"MEDIUM\",\n"
+ " \"workerGroupId\": \"3\",\n"
+ " \"preTasks\": [\n"
+ " \n"
+ " ]\n"
+ " }\n"
+ " ],\n"
+ " \"tenantId\": 1,\n"
+ " \"timeout\": 0\n"
+ "}";
FileUtils.writeStringToFile(new File("/tmp/task.json"), processJson);
File file = new File("/tmp/task.json");
FileInputStream fileInputStream = new FileInputStream("/tmp/task.json");
MultipartFile multipartFile = new MockMultipartFile(file.getName(), file.getName(),
ContentType.APPLICATION_OCTET_STREAM.toString(), fileInputStream);
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.ADMIN_USER);
String currentProjectName = "testProject";
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS, currentProjectName);
ProcessDefinition shellDefinition2 = new ProcessDefinition();
shellDefinition2.setId(46);
shellDefinition2.setName("shell-5");
shellDefinition2.setProjectId(2);
shellDefinition2.setProcessDefinitionJson(subProcessJson);
Mockito.when(projectMapper.queryByName(currentProjectName)).thenReturn(getProject(currentProjectName));
Mockito.when(projectService.checkProjectAndAuth(loginUser, getProject(currentProjectName), currentProjectName)).thenReturn(result);
Mockito.when(processDefineMapper.queryByDefineId(46)).thenReturn(shellDefinition2);
Map<String, Object> importProcessResult = processDefinitionService.importProcessDefinition(loginUser, multipartFile, currentProjectName);
Assert.assertEquals(Status.SUCCESS, importProcessResult.get(Constants.STATUS));
boolean delete = file.delete();
Assert.assertTrue(delete);
}
@Test
public void testUpdateProcessDefinition() {
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.ADMIN_USER);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.SUCCESS);
String projectName = "project_test1";
Project project = getProject(projectName);
ProcessDefinition processDefinition = getProcessDefinition();
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Mockito.when(processService.findProcessDefineById(1)).thenReturn(processDefinition);
Mockito.when(processDefinitionVersionService.addProcessDefinitionVersion(processDefinition)).thenReturn(1L);
String sqlDependentJson = "{\n"
+ " \"globalParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"tasks\": [\n"
+ " {\n"
+ " \"type\": \"SQL\",\n"
+ " \"id\": \"tasks-27297\",\n"
+ " \"name\": \"sql\",\n"
+ " \"params\": {\n"
+ " \"type\": \"MYSQL\",\n"
+ " \"datasource\": 1,\n"
+ " \"sql\": \"select * from test\",\n"
+ " \"udfs\": \"\",\n"
+ " \"sqlType\": \"1\",\n"
+ " \"title\": \"\",\n"
+ " \"receivers\": \"\",\n"
+ " \"receiversCc\": \"\",\n"
+ " \"showType\": \"TABLE\",\n"
+ " \"localParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"connParams\": \"\",\n"
+ " \"preStatements\": [\n"
+ " \n"
+ " ],\n"
+ " \"postStatements\": [\n"
+ " \n"
+ " ]\n"
+ " },\n"
+ " \"description\": \"\",\n"
+ " \"runFlag\": \"NORMAL\",\n"
+ " \"dependence\": {\n"
+ " \n"
+ " },\n"
+ " \"maxRetryTimes\": \"0\",\n"
+ " \"retryInterval\": \"1\",\n"
+ " \"timeout\": {\n"
+ " \"strategy\": \"\",\n"
+ " \"enable\": false\n"
+ " },\n"
+ " \"taskInstancePriority\": \"MEDIUM\",\n"
+ " \"workerGroupId\": -1,\n"
+ " \"preTasks\": [\n"
+ " \"dependent\"\n"
+ " ]\n"
+ " },\n"
+ " {\n"
+ " \"type\": \"DEPENDENT\",\n"
+ " \"id\": \"tasks-33787\",\n"
+ " \"name\": \"dependent\",\n"
+ " \"params\": {\n"
+ " \n"
+ " },\n"
+ " \"description\": \"\",\n"
+ " \"runFlag\": \"NORMAL\",\n"
+ " \"dependence\": {\n"
+ " \"relation\": \"AND\",\n"
+ " \"dependTaskList\": [\n"
+ " {\n"
+ " \"relation\": \"AND\",\n"
+ " \"dependItemList\": [\n"
+ " {\n"
+ " \"projectId\": 2,\n"
+ " \"definitionId\": 46,\n"
+ " \"depTasks\": \"ALL\",\n"
+ " \"cycle\": \"day\",\n"
+ " \"dateValue\": \"today\"\n"
+ " }\n"
+ " ]\n"
+ " }\n"
+ " ]\n"
+ " },\n"
+ " \"maxRetryTimes\": \"0\",\n"
+ " \"retryInterval\": \"1\",\n"
+ " \"timeout\": {\n"
+ " \"strategy\": \"\",\n"
+ " \"enable\": false\n"
+ " },\n"
+ " \"taskInstancePriority\": \"MEDIUM\",\n"
+ " \"workerGroupId\": -1,\n"
+ " \"preTasks\": [\n"
+ " \n"
+ " ]\n"
+ " }\n"
+ " ],\n"
+ " \"tenantId\": 1,\n"
+ " \"timeout\": 0\n"
+ "}";
Map<String, Object> updateResult = processDefinitionService.updateProcessDefinition(loginUser, projectName, 1, "test",
sqlDependentJson, "", "", "");
Assert.assertEquals(Status.UPDATE_PROCESS_DEFINITION_ERROR, updateResult.get(Constants.STATUS));
}
@Test
public void testBatchExportProcessDefinitionByIds() throws IOException {
processDefinitionService.batchExportProcessDefinitionByIds(
null, null, null, null);
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.ADMIN_USER);
String projectName = "project_test1";
Project project = getProject(projectName);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT);
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
processDefinitionService.batchExportProcessDefinitionByIds(
loginUser, projectName, "1", null);
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setId(1);
processDefinition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"conditionResult\":"
+ "{\"failedNode\":[\"\"],\"successNode\":[\"\"]},\"delayTime\":\"0\",\"dependence\":{}"
+ ",\"description\":\"\",\"id\":\"tasks-3011\",\"maxRetryTimes\":\"0\",\"name\":\"tsssss\""
+ ",\"params\":{\"localParams\":[],\"rawScript\":\"echo \\\"123123\\\"\",\"resourceList\":[]}"
+ ",\"preTasks\":[],\"retryInterval\":\"1\",\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\""
+ ",\"timeout\":{\"enable\":false,\"interval\":null,\"strategy\":\"\"},\"type\":\"SHELL\""
+ ",\"waitStartTimeout\":{},\"workerGroup\":\"default\"}],\"tenantId\":4,\"timeout\":0}");
Map<String, Object> checkResult = new HashMap<>();
checkResult.put(Constants.STATUS, Status.SUCCESS);
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(checkResult);
Mockito.when(processDefineMapper.queryByDefineId(1)).thenReturn(processDefinition);
HttpServletResponse response = mock(HttpServletResponse.class);
ServletOutputStream outputStream = mock(ServletOutputStream.class);
when(response.getOutputStream()).thenReturn(outputStream);
processDefinitionService.batchExportProcessDefinitionByIds(
loginUser, projectName, "1", response);
}
@Test
public void testGetResourceIds() throws Exception {
// set up
Method testMethod = ReflectionUtils.findMethod(ProcessDefinitionServiceImpl.class, "getResourceIds", ProcessData.class);
assertThat(testMethod).isNotNull();
testMethod.setAccessible(true);
// when processData has empty task, then return empty string
ProcessData input1 = new ProcessData();
input1.setTasks(Collections.emptyList());
String output1 = (String) testMethod.invoke(processDefinitionService, input1);
assertThat(output1).isEmpty();
// when task is null, then return empty string
ProcessData input2 = new ProcessData();
input2.setTasks(null);
String output2 = (String) testMethod.invoke(processDefinitionService, input2);
assertThat(output2).isEmpty();
// when task type is incorrect mapping, then return empty string
ProcessData input3 = new ProcessData();
TaskNode taskNode3 = new TaskNode();
taskNode3.setType("notExistType");
input3.setTasks(Collections.singletonList(taskNode3));
String output3 = (String) testMethod.invoke(processDefinitionService, input3);
assertThat(output3).isEmpty();
// when task parameter list is null, then return empty string
ProcessData input4 = new ProcessData();
TaskNode taskNode4 = new TaskNode();
taskNode4.setType("SHELL");
taskNode4.setParams(null);
input4.setTasks(Collections.singletonList(taskNode4));
String output4 = (String) testMethod.invoke(processDefinitionService, input4);
assertThat(output4).isEmpty();
// when resource id list is 0 1, then return 0,1
ProcessData input5 = new ProcessData();
TaskNode taskNode5 = new TaskNode();
taskNode5.setType("SHELL");
ShellParameters shellParameters5 = new ShellParameters();
ResourceInfo resourceInfo5A = new ResourceInfo();
resourceInfo5A.setId(1);
ResourceInfo resourceInfo5B = new ResourceInfo();
resourceInfo5B.setId(2);
shellParameters5.setResourceList(Arrays.asList(resourceInfo5A, resourceInfo5B));
taskNode5.setParams(JSONUtils.toJsonString(shellParameters5));
input5.setTasks(Collections.singletonList(taskNode5));
String output5 = (String) testMethod.invoke(processDefinitionService, input5);
assertThat(output5.split(",")).hasSize(2)
.containsExactlyInAnyOrder("1", "2");
// when resource id list is 0 1 1 2, then return 0,1,2
ProcessData input6 = new ProcessData();
TaskNode taskNode6 = new TaskNode();
taskNode6.setType("SHELL");
ShellParameters shellParameters6 = new ShellParameters();
ResourceInfo resourceInfo6A = new ResourceInfo();
resourceInfo6A.setId(3);
ResourceInfo resourceInfo6B = new ResourceInfo();
resourceInfo6B.setId(1);
ResourceInfo resourceInfo6C = new ResourceInfo();
resourceInfo6C.setId(1);
ResourceInfo resourceInfo6D = new ResourceInfo();
resourceInfo6D.setId(2);
shellParameters6.setResourceList(Arrays.asList(resourceInfo6A, resourceInfo6B, resourceInfo6C, resourceInfo6D));
taskNode6.setParams(JSONUtils.toJsonString(shellParameters6));
input6.setTasks(Collections.singletonList(taskNode6));
String output6 = (String) testMethod.invoke(processDefinitionService, input6);
assertThat(output6.split(",")).hasSize(3)
.containsExactlyInAnyOrder("3", "1", "2");
}
/**
* get mock datasource
*
* @return DataSource
*/
private DataSource getDataSource() {
DataSource dataSource = new DataSource();
dataSource.setId(2);
dataSource.setName("test");
return dataSource;
}
/**
* get mock processDefinition
*
* @return ProcessDefinition
*/
private ProcessDefinition getProcessDefinition() {
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setId(46);
processDefinition.setName("test_pdf");
processDefinition.setProjectId(2);
processDefinition.setTenantId(1);
processDefinition.setDescription("");
return processDefinition;
}
/**
* get mock Project
*
* @param projectName projectName
* @return Project
*/
private Project getProject(String projectName) {
Project project = new Project();
project.setId(1);
project.setName(projectName);
project.setUserId(1);
return project;
}
/**
* get mock Project
*
* @param projectId projectId
* @return Project
*/
private Project getProjectById(int projectId) {
Project project = new Project();
project.setId(projectId);
project.setName("project_test2");
project.setUserId(1);
return project;
}
/**
* get mock schedule
*
* @return schedule
*/
private Schedule getSchedule() {
Date date = new Date();
Schedule schedule = new Schedule();
schedule.setId(46);
schedule.setProcessDefinitionId(1);
schedule.setStartTime(date);
schedule.setEndTime(date);
schedule.setCrontab("0 0 5 * * ? *");
schedule.setFailureStrategy(FailureStrategy.END);
schedule.setUserId(1);
schedule.setReleaseState(ReleaseState.OFFLINE);
schedule.setProcessInstancePriority(Priority.MEDIUM);
schedule.setWarningType(WarningType.NONE);
schedule.setWarningGroupId(1);
schedule.setWorkerGroup(Constants.DEFAULT_WORKER_GROUP);
return schedule;
}
/**
* get mock processMeta
*
* @return processMeta
*/
private ProcessMeta getProcessMeta() {
ProcessMeta processMeta = new ProcessMeta();
Schedule schedule = getSchedule();
processMeta.setScheduleCrontab(schedule.getCrontab());
processMeta.setScheduleStartTime(DateUtils.dateToString(schedule.getStartTime()));
processMeta.setScheduleEndTime(DateUtils.dateToString(schedule.getEndTime()));
processMeta.setScheduleWarningType(String.valueOf(schedule.getWarningType()));
processMeta.setScheduleWarningGroupId(schedule.getWarningGroupId());
processMeta.setScheduleFailureStrategy(String.valueOf(schedule.getFailureStrategy()));
processMeta.setScheduleReleaseState(String.valueOf(schedule.getReleaseState()));
processMeta.setScheduleProcessInstancePriority(String.valueOf(schedule.getProcessInstancePriority()));
processMeta.setScheduleWorkerGroupName("workgroup1");
return processMeta;
}
private List<Schedule> getSchedulerList() {
List<Schedule> scheduleList = new ArrayList<>();
scheduleList.add(getSchedule());
return scheduleList;
}
private void putMsg(Map<String, Object> result, Status status, Object... statusParams) {
result.put(Constants.STATUS, status);
if (statusParams != null && statusParams.length > 0) {
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams));
} else {
result.put(Constants.MSG, status.getMsg());
}
}
@Test
public void testExportProcessMetaData() {
Integer processDefinitionId = 111;
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setId(processDefinitionId);
processDefinition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"conditionResult\":"
+ "{\"failedNode\":[\"\"],\"successNode\":"
+ "[\"\"]},\"delayTime\":\"0\",\"dependence\":{},"
+ "\"description\":\"\",\"id\":\"tasks-3011\",\"maxRetryTimes\":\"0\",\"name\":\"tsssss\","
+ "\"params\":{\"localParams\":[],\"rawScript\":\"echo \\\"123123\\\"\",\"resourceList\":[]},"
+ "\"preTasks\":[],\"retryInterval\":\"1\",\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\","
+ "\"timeout\":{\"enable\":false,\"interval\":null,\"strategy\":\"\"},\"type\":\"SHELL\","
+ "\"waitStartTimeout\":{},\"workerGroup\":\"default\"}],\"tenantId\":4,\"timeout\":0}");
Assert.assertNotNull(processDefinitionService.exportProcessMetaData(processDefinitionId, processDefinition));
}
@Test
public void testImportProcessSchedule() {
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.ADMIN_USER);
Integer processDefinitionId = 111;
String processDefinitionName = "testProcessDefinition";
String projectName = "project_test1";
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT);
ProcessMeta processMeta = new ProcessMeta();
Assert.assertEquals(0, processDefinitionService.importProcessSchedule(loginUser, projectName, processMeta, processDefinitionName, processDefinitionId));
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,493 | [Improvement][api] The Process Definition interface parameter releaseState is changed to an enumeration type | **Describe the question**
The Process Definition interface parameter releaseState is changed to an enumeration type
**What are the current deficiencies and the benefits of improvement**
- The current Process Definition interface parameter releaseState variable uses an int type, and the releaseState mapping relationship needs to be recorded in both the front-end and back-end code
**Which version of DolphinScheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4493 | https://github.com/apache/dolphinscheduler/pull/4494 | 046940b1a008c63d6cad44b3f7aff954f6f9af83 | cfa88a4a804b3bc565f6571d5a6e842df18644a2 | "2021-01-19T10:21:18Z" | java | "2021-01-22T14:39:58Z" | dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/udp/udp.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="udp-model">
<div class="scrollbar contpi-boxt">
<div>
<el-input
type="text"
size="small"
v-model="name"
:disabled="router.history.current.name === 'projects-instance-details'"
:placeholder="$t('Please enter name (required)')">
</el-input>
</div>
<template v-if="router.history.current.name !== 'projects-instance-details'">
<div style="padding-top: 12px;">
<el-input
type="textarea"
size="small"
v-model="description"
:autosize="{minRows:2}"
:placeholder="$t('Please enter description(optional)')"
autocomplete="off">
</el-input>
</div>
</template>
<div class="title" style="padding-top: 6px;">
<span class="text-b">{{$t('select tenant')}}</span>
<form-tenant v-model="tenantId"></form-tenant>
</div>
<div class="title" style="padding-top: 6px;">
<span class="text-b">{{$t('warning of timeout')}}</span>
<span style="padding-left: 6px;">
<el-switch v-model="checkedTimeout" size="small"></el-switch>
</span>
</div>
<div class="content" style="padding-bottom: 10px;" v-if="checkedTimeout">
<span>
<el-input v-model="timeout" style="width: 160px;" maxlength="9" size="small">
<span slot="append">{{$t('Minute')}}</span>
</el-input>
</span>
</div>
<div class="title" style="padding-top: 6px;">
<span>{{$t('Set global')}}</span>
</div>
<div class="content">
<div>
<m-local-params
ref="refLocalParams"
@on-local-params="_onLocalParams"
:udp-list="udpList"
:hide="false">
</m-local-params>
</div>
</div>
</div>
<div class="bottom">
<div class="submit">
<template v-if="router.history.current.name === 'projects-definition-details'">
<div class="lint-pt">
<el-checkbox v-model="releaseState" size="small" :false-label="0" :true-label="1">{{$t('Whether to go online the process definition')}}</el-checkbox>
</div>
</template>
<template v-if="router.history.current.name === 'projects-instance-details'">
<div class="lint-pt">
<el-checkbox v-model="syncDefine" size="small">{{$t('Whether to update the process definition')}}</el-checkbox>
</div>
</template>
<el-button type="text" size="small" @click="close()"> {{$t('Cancel')}} </el-button>
<el-button type="primary" size="small" round :disabled="isDetails" @click="ok()">{{$t('Add')}}</el-button>
</div>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import i18n from '@/module/i18n'
import mLocalParams from '../formModel/tasks/_source/localParams'
import disabledState from '@/module/mixin/disabledState'
import Affirm from '../jumpAffirm'
import FormTenant from './_source/selectTenant'
export default {
name: 'udp',
data () {
return {
originalName: '',
// dag name
name: '',
// dag description
description: '',
// Global custom parameters
udpList: [],
// Global custom parameters
udpListCache: [],
// Whether to go online the process definition
releaseState: 1,
// Whether to update the process definition
syncDefine: true,
// Timeout alarm
timeout: 0,
tenantId: -1,
// checked Timeout alarm
checkedTimeout: true
}
},
mixins: [disabledState],
props: {
},
methods: {
/**
* udp data
*/
_onLocalParams (a) {
this.udpList = a
},
_verifTimeout () {
const reg = /^[1-9]\d*$/
if (!reg.test(this.timeout)) {
this.$message.warning(`${i18n.$t('Please enter a positive integer greater than 0')}`)
return false
}
return true
},
_accuStore () {
this.store.commit('dag/setGlobalParams', _.cloneDeep(this.udpList))
this.store.commit('dag/setName', _.cloneDeep(this.name))
this.store.commit('dag/setTimeout', _.cloneDeep(this.timeout))
this.store.commit('dag/setTenantId', _.cloneDeep(this.tenantId))
this.store.commit('dag/setDesc', _.cloneDeep(this.description))
this.store.commit('dag/setSyncDefine', this.syncDefine)
this.store.commit('dag/setReleaseState', this.releaseState)
},
/**
* submit
*/
ok () {
if (!this.name) {
this.$message.warning(`${i18n.$t('DAG graph name cannot be empty')}`)
return
}
let _verif = () => {
// verification udf
if (!this.$refs.refLocalParams._verifProp()) {
return
}
// verification timeout
if (this.checkedTimeout && !this._verifTimeout()) {
return
}
// Storage global globalParams
this._accuStore()
Affirm.setIsPop(false)
this.$emit('onUdp')
}
if (this.originalName !== this.name) {
this.store.dispatch('dag/verifDAGName', this.name).then(res => {
_verif()
}).catch(e => {
this.$message.error(e.msg || '')
})
} else {
_verif()
}
},
/**
* Close the popup
*/
close () {
this.$emit('close')
}
},
watch: {
checkedTimeout (val) {
if (!val) {
this.timeout = 0
this.store.commit('dag/setTimeout', _.cloneDeep(this.timeout))
}
}
},
created () {
const dag = _.cloneDeep(this.store.state.dag)
this.udpList = dag.globalParams
this.udpListCache = dag.globalParams
this.name = dag.name
this.originalName = dag.name
this.description = dag.description
this.syncDefine = dag.syncDefine
this.releaseState = dag.releaseState
this.timeout = dag.timeout || 0
this.checkedTimeout = this.timeout !== 0
this.$nextTick(() => {
if (dag.tenantId === -1) {
this.tenantId = this.store.state.user.userInfo.tenantId
} else {
this.tenantId = dag.tenantId
}
})
},
mounted () {},
components: { FormTenant, mLocalParams }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.udp-model {
width: 624px;
min-height: 420px;
background: #fff;
border-radius: 3px;
padding:20px 0 ;
position: relative;
.contpi-boxt {
max-height: 600px;
overflow-y: scroll;
padding:0 20px;
}
.title {
line-height: 36px;
padding-bottom: 10px;
span {
font-size: 16px;
color: #333;
}
}
.bottom{
position: absolute;
bottom: 0;
left: 0;
width: 100%;
text-align: right;
height: 56px;
line-height: 56px;
border-top: 1px solid #DCDEDC;
background: #fff;
.submit {
padding-right: 20px;
margin-top: -4px;
}
.lint-pt {
position: absolute;
left: 20px;
top: -2px;
>label {
font-weight: normal;
}
}
}
.content {
padding-bottom: 50px;
.user-def-params-model {
.add {
a {
color: #0097e0;
}
}
}
}
}
</style>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,493 | [Improvement][api] The Process Definition interface parameter releaseState is changed to an enumeration type | **Describe the question**
The Process Definition interface parameter releaseState is changed to an enumeration type
**What are the current deficiencies and the benefits of improvement**
- The current Process Definition interface parameter releaseState variable uses an int type, and the releaseState mapping relationship needs to be recorded in both the front-end and back-end code
**Which version of DolphinScheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4493 | https://github.com/apache/dolphinscheduler/pull/4494 | 046940b1a008c63d6cad44b3f7aff954f6f9af83 | cfa88a4a804b3bc565f6571d5a6e842df18644a2 | "2021-01-19T10:21:18Z" | java | "2021-01-22T14:39:58Z" | dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/definition/pages/list/_source/list.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="list-model" style="position: relative;">
<div class="table-box">
<el-table :data="list" size="mini" style="width: 100%" @selection-change="_arrDelChange">
<el-table-column type="selection" width="50" :selectable="selectable"></el-table-column>
<el-table-column type="index" :label="$t('#')" width="50"></el-table-column>
<el-table-column :label="$t('Process Name')" min-width="200">
<template slot-scope="scope">
<el-popover trigger="hover" placement="top">
<p>{{ scope.row.name }}</p>
<div slot="reference" class="name-wrapper">
<router-link :to="{ path: '/projects/definition/list/' + scope.row.id}" tag="a" class="links" :title="scope.row.name">
{{scope.row.name}}
</router-link>
</div>
</el-popover>
</template>
</el-table-column>
<el-table-column :label="$t('State')">
<template slot-scope="scope">
{{_rtPublishStatus(scope.row.releaseState)}}
</template>
</el-table-column>
<el-table-column :label="$t('Create Time')" width="135">
<template slot-scope="scope">
<span>{{scope.row.createTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Update Time')" width="135">
<template slot-scope="scope">
<span>{{scope.row.updateTime | formatDate}}</span>
</template>
</el-table-column>
<el-table-column :label="$t('Description')">
<template slot-scope="scope">
<span>{{scope.row.description | filterNull}}</span>
</template>
</el-table-column>
<el-table-column prop="modifyBy" :label="$t('Modify User')"></el-table-column>
<el-table-column :label="$t('Timing state')">
<template slot-scope="scope">
<span v-if="scope.row.scheduleReleaseState === 'OFFLINE'">{{$t('offline')}}</span>
<span v-if="scope.row.scheduleReleaseState === 'ONLINE'">{{$t('online')}}</span>
<span v-if="!scope.row.scheduleReleaseState">-</span>
</template>
</el-table-column>
<el-table-column :label="$t('Operation')" width="335" fixed="right">
<template slot-scope="scope">
<el-tooltip :content="$t('Edit')" placement="top" :enterable="false">
<el-button type="primary" size="mini" icon="el-icon-edit-outline" :disabled="scope.row.releaseState === 'ONLINE'" @click="_edit(scope.row)" circle></el-button>
</el-tooltip>
<el-tooltip :content="$t('Start')" placement="top" :enterable="false">
<span><el-button type="success" size="mini" :disabled="scope.row.releaseState !== 'ONLINE'" icon="el-icon-video-play" @click="_start(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Timing')" placement="top" :enterable="false">
<el-button type="primary" size="mini" icon="el-icon-time" :disabled="scope.row.releaseState !== 'ONLINE' || scope.row.scheduleReleaseState !== null" @click="_timing(scope.row)" circle></el-button>
</el-tooltip>
<el-tooltip :content="$t('online')" placement="top" :enterable="false">
<span><el-button type="warning" size="mini" v-if="scope.row.releaseState === 'OFFLINE'" icon="el-icon-upload2" @click="_poponline(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('offline')" placement="top" :enterable="false">
<el-button type="danger" size="mini" icon="el-icon-download" v-if="scope.row.releaseState === 'ONLINE'" @click="_downline(scope.row)" circle></el-button>
</el-tooltip>
<el-tooltip :content="$t('Copy Workflow')" placement="top" :enterable="false">
<span><el-button type="primary" size="mini" :disabled="scope.row.releaseState === 'ONLINE'" icon="el-icon-document-copy" @click="_copyProcess(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Cron Manage')" placement="top" :enterable="false">
<el-button type="primary" size="mini" icon="el-icon-date" :disabled="scope.row.releaseState !== 'ONLINE'" @click="_timingManage(scope.row)" circle></el-button>
</el-tooltip>
<el-tooltip :content="$t('delete')" placement="top" :enterable="false">
<el-button type="danger" size="mini" icon="el-icon-delete" circle></el-button>
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
icon="el-icon-info"
iconColor="red"
:title="$t('Delete?')"
@onConfirm="_delete(scope.row,scope.row.id)"
>
<el-button type="danger" size="mini" icon="el-icon-delete" :disabled="scope.row.releaseState === 'ONLINE'" circle slot="reference"></el-button>
</el-popconfirm>
</el-tooltip>
<el-tooltip :content="$t('TreeView')" placement="top" :enterable="false">
<el-button type="primary" size="mini" icon="el-icon-s-data" @click="_treeView(scope.row)" circle></el-button>
</el-tooltip>
<el-tooltip :content="$t('Export')" placement="top" :enterable="false">
<span><el-button type="primary" size="mini" icon="el-icon-s-unfold" @click="_export(scope.row)" circle></el-button></span>
</el-tooltip>
<el-tooltip :content="$t('Version Info')" placement="top" :enterable="false">
<el-button type="primary" size="mini" icon="el-icon-info" :disabled="scope.row.releaseState === 'ONLINE'" @click="_version(scope.row)" circle></el-button>
</el-tooltip>
</template>
</el-table-column>
</el-table>
</div>
<el-tooltip :content="$t('delete')" placement="top">
<el-popconfirm
:confirmButtonText="$t('Confirm')"
:cancelButtonText="$t('Cancel')"
:title="$t('Delete?')"
@onConfirm="_delete({},-1)"
>
<el-button style="position: absolute; bottom: -48px; left: 19px;" type="primary" size="mini" :disabled="!strSelectIds" slot="reference">{{$t('Delete')}}</el-button>
</el-popconfirm>
</el-tooltip>
<el-button type="primary" size="mini" :disabled="!strSelectIds" style="position: absolute; bottom: -48px; left: 80px;" @click="_batchExport(item)" >{{$t('Export')}}</el-button>
<span><el-button type="primary" size="mini" :disabled="!strSelectIds" style="position: absolute; bottom: -48px; left: 140px;" @click="_batchCopy(item)" >{{$t('Batch copy')}}</el-button></span>
<el-button type="primary" size="mini" :disabled="!strSelectIds" style="position: absolute; bottom: -48px; left: 225px;" @click="_batchMove(item)" >{{$t('Batch move')}}</el-button>
<el-drawer
:visible.sync="drawer"
size=""
:with-header="false">
<m-versions :versionData = versionData @mVersionSwitchProcessDefinitionVersion="mVersionSwitchProcessDefinitionVersion" @mVersionGetProcessDefinitionVersionsPage="mVersionGetProcessDefinitionVersionsPage" @mVersionDeleteProcessDefinitionVersion="mVersionDeleteProcessDefinitionVersion" @closeVersion="closeVersion"></m-versions>
</el-drawer>
<el-dialog
:title="$t('Please set the parameters before starting')"
:visible.sync="startDialog"
width="auto">
<m-start :startData= "startData" @onUpdateStart="onUpdateStart" @closeStart="closeStart"></m-start>
</el-dialog>
<el-dialog
:title="$t('Set parameters before timing')"
:visible.sync="timingDialog"
width="auto">
<m-timing :timingData="timingData" @onUpdateTiming="onUpdateTiming" @closeTiming="closeTiming"></m-timing>
</el-dialog>
<el-dialog
:title="$t('Info')"
:visible.sync="relatedItemsDialog"
width="auto">
<m-related-items :tmp="tmp" @onBatchCopy="onBatchCopy" @onBatchMove="onBatchMove" @closeRelatedItems="closeRelatedItems"></m-related-items>
</el-dialog>
</div>
</template>
<script>
import _ from 'lodash'
import mStart from './start'
import mTiming from './timing'
import mRelatedItems from './relatedItems'
import { mapActions } from 'vuex'
import { publishStatus } from '@/conf/home/pages/dag/_source/config'
import mVersions from './versions'
export default {
name: 'definition-list',
data () {
return {
list: [],
strSelectIds: '',
checkAll: false,
drawer: false,
versionData: {
processDefinition: {},
processDefinitionVersions: [],
total: null,
pageNo: null,
pageSize: null
},
startDialog: false,
startData: {},
timingDialog: false,
timingData: {
item: {},
type: ''
},
relatedItemsDialog: false,
tmp: false
}
},
props: {
processList: Array,
pageNo: Number,
pageSize: Number
},
methods: {
...mapActions('dag', ['editProcessState', 'getStartCheck', 'deleteDefinition', 'batchDeleteDefinition', 'exportDefinition', 'getProcessDefinitionVersionsPage', 'copyProcess', 'switchProcessDefinitionVersion', 'deleteProcessDefinitionVersion', 'moveProcess']),
...mapActions('security', ['getWorkerGroupsAll']),
selectable (row, index) {
if (row.releaseState === 'ONLINE') {
return false
} else {
return true
}
},
_rtPublishStatus (code) {
return _.filter(publishStatus, v => v.code === code)[0].desc
},
_treeView (item) {
this.$router.push({ path: `/projects/definition/tree/${item.id}` })
},
/**
* Start
*/
_start (item) {
this.getWorkerGroupsAll()
this.getStartCheck({ processDefinitionId: item.id }).then(res => {
this.startData = item
this.startDialog = true
}).catch(e => {
this.$message.error(e.msg || '')
})
},
onUpdateStart () {
this._onUpdate()
this.startDialog = false
},
closeStart () {
this.startDialog = false
},
/**
* timing
*/
_timing (item) {
this.timingData.item = item
this.timingData.type = 'timing'
this.timingDialog = true
},
onUpdateTiming () {
this._onUpdate()
this.timingDialog = false
},
closeTiming () {
this.timingDialog = false
},
/**
* Timing manage
*/
_timingManage (item) {
this.$router.push({ path: `/projects/definition/list/timing/${item.id}` })
},
/**
* delete
*/
_delete (item, i) {
// remove tow++
if (i < 0) {
this._batchDelete()
return
}
// remove one
this.deleteDefinition({
processDefinitionId: item.id
}).then(res => {
this._onUpdate()
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* edit
*/
_edit (item) {
this.$router.push({ path: `/projects/definition/list/${item.id}` })
},
/**
* Offline
*/
_downline (item) {
this._upProcessState({
processId: item.id,
releaseState: 0
})
},
/**
* online
*/
_poponline (item) {
this._upProcessState({
processId: item.id,
releaseState: 1
})
},
/**
* copy
*/
_copyProcess (item) {
this.copyProcess({
processDefinitionIds: item.id,
targetProjectId: item.projectId
}).then(res => {
this.strSelectIds = ''
this.$message.success(res.msg)
// $('body').find('.tooltip.fade.top.in').remove()
this._onUpdate()
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* move
*/
_moveProcess (item) {
this.moveProcess({
processDefinitionIds: item.id,
targetProjectId: item.projectId
}).then(res => {
this.strSelectIds = ''
this.$message.success(res.msg)
$('body').find('.tooltip.fade.top.in').remove()
this._onUpdate()
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_export (item) {
this.exportDefinition({
processDefinitionIds: item.id,
fileName: item.name
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* switch version in process definition version list
*
* @param version the version user want to change
* @param processDefinitionId the process definition id
* @param fromThis fromThis
*/
mVersionSwitchProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) {
this.switchProcessDefinitionVersion({
version: version,
processDefinitionId: processDefinitionId
}).then(res => {
this.$message.success($t('Switch Version Successfully'))
this.$router.push({ path: `/projects/definition/list/${processDefinitionId}` })
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* Paging event of process definition versions
*
* @param pageNo page number
* @param pageSize page size
* @param processDefinitionId the process definition id of page version
* @param fromThis fromThis
*/
mVersionGetProcessDefinitionVersionsPage ({ pageNo, pageSize, processDefinitionId, fromThis }) {
this.getProcessDefinitionVersionsPage({
pageNo: pageNo,
pageSize: pageSize,
processDefinitionId: processDefinitionId
}).then(res => {
this.versionData.processDefinitionVersions = res.data.lists
this.versionData.total = res.data.totalCount
this.versionData.pageSize = res.data.pageSize
this.versionData.pageNo = res.data.currentPage
}).catch(e => {
this.$message.error(e.msg || '')
})
},
/**
* delete one version of process definition
*
* @param version the version need to delete
* @param processDefinitionId the process definition id user want to delete
* @param fromThis fromThis
*/
mVersionDeleteProcessDefinitionVersion ({ version, processDefinitionId, fromThis }) {
this.deleteProcessDefinitionVersion({
version: version,
processDefinitionId: processDefinitionId
}).then(res => {
this.$message.success(res.msg || '')
this.mVersionGetProcessDefinitionVersionsPage({
pageNo: 1,
pageSize: 10,
processDefinitionId: processDefinitionId,
fromThis: fromThis
})
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_version (item) {
this.getProcessDefinitionVersionsPage({
pageNo: 1,
pageSize: 10,
processDefinitionId: item.id
}).then(res => {
let processDefinitionVersions = res.data.lists
let total = res.data.totalCount
let pageSize = res.data.pageSize
let pageNo = res.data.currentPage
this.versionData.processDefinition = item
this.versionData.processDefinitionVersions = processDefinitionVersions
this.versionData.total = total
this.versionData.pageNo = pageNo
this.versionData.pageSize = pageSize
this.drawer = true
}).catch(e => {
this.$message.error(e.msg || '')
})
},
closeVersion () {
this.drawer = false
},
_batchExport () {
this.exportDefinition({
processDefinitionIds: this.strSelectIds,
fileName: 'process_' + new Date().getTime()
}).then(res => {
this._onUpdate()
this.checkAll = false
this.strSelectIds = ''
}).catch(e => {
this.strSelectIds = ''
this.checkAll = false
this.$message.error(e.msg)
})
},
/**
* Batch Copy
*/
_batchCopy () {
this.relatedItemsDialog = true
this.tmp = false
},
onBatchCopy (item) {
this._copyProcess({ id: this.strSelectIds, projectId: item })
this.relatedItemsDialog = false
},
closeRelatedItems () {
this.relatedItemsDialog = false
},
/**
* _batchMove
*/
_batchMove () {
this.tmp = true
this.relatedItemsDialog = true
},
onBatchMove (item) {
this._moveProcess({ id: this.strSelectIds, projectId: item })
this.relatedItemsDialog = false
},
/**
* Edit state
*/
_upProcessState (o) {
this.editProcessState(o).then(res => {
this.$message.success(res.msg)
$('body').find('.tooltip.fade.top.in').remove()
this._onUpdate()
}).catch(e => {
this.$message.error(e.msg || '')
})
},
_onUpdate () {
this.$emit('on-update')
},
/**
* the array that to be delete
*/
_arrDelChange (v) {
let arr = []
arr = _.map(v, 'id')
this.strSelectIds = _.join(arr, ',')
},
/**
* batch delete
*/
_batchDelete () {
this.batchDeleteDefinition({
processDefinitionIds: this.strSelectIds
}).then(res => {
this._onUpdate()
this.checkAll = false
this.strSelectIds = ''
this.$message.success(res.msg)
}).catch(e => {
this.strSelectIds = ''
this.checkAll = false
this.$message.error(e.msg || '')
})
}
},
watch: {
processList: {
handler (a) {
this.checkAll = false
this.list = []
setTimeout(() => {
this.list = _.cloneDeep(a)
})
},
immediate: true,
deep: true
},
pageNo () {
this.strSelectIds = ''
}
},
created () {
},
mounted () {
},
components: { mVersions, mStart, mTiming, mRelatedItems }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,493 | [Improvement][api] The Process Definition interface parameter releaseState is changed to an enumeration type | **Describe the question**
The Process Definition interface parameter releaseState is changed to an enumeration type
**What are the current deficiencies and the benefits of improvement**
- The current Process Definition interface parameter releaseState variable uses an int type, and the releaseState mapping relationship needs to be recorded in both the front-end and back-end code
**Which version of DolphinScheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4493 | https://github.com/apache/dolphinscheduler/pull/4494 | 046940b1a008c63d6cad44b3f7aff954f6f9af83 | cfa88a4a804b3bc565f6571d5a6e842df18644a2 | "2021-01-19T10:21:18Z" | java | "2021-01-22T14:39:58Z" | dolphinscheduler-ui/src/js/conf/home/store/dag/state.js | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import localStore from '@/module/util/localStorage'
// Get the name of the item currently clicked
const projectName = localStore.getItem('projectName')
export default {
// name
name: '',
// description
description: '',
// Node global parameter
globalParams: [],
// Node information
tasks: [],
// Node cache information, cache the previous input
cacheTasks: {},
// Timeout alarm
timeout: 0,
// tenant id
tenantId: -1,
// Node location information
locations: {},
// Node-to-node connection
connects: [],
// Running sign
runFlag: '',
// Whether to edit
isEditDag: false,
// Current project
projectName: projectName || '',
// Whether to go online the process definition
releaseState: 1,
// Whether to update the process definition
syncDefine: true,
// tasks processList
processListS: [],
// projectList
projectListS: [],
// tasks resourcesList
resourcesListS: [],
// tasks resourcesListJar
resourcesListJar: [],
// tasks datasource Type
dsTypeListS: [
{
id: 0,
code: 'MYSQL',
disabled: false
},
{
id: 1,
code: 'POSTGRESQL',
disabled: false
},
{
id: 2,
code: 'HIVE',
disabled: false
},
{
id: 3,
code: 'SPARK',
disabled: false
},
{
id: 4,
code: 'CLICKHOUSE',
disabled: false
},
{
id: 5,
code: 'ORACLE',
disabled: false
},
{
id: 6,
code: 'SQLSERVER',
disabled: false
},
{
id: 7,
code: 'DB2',
disabled: false
},
{
id: 8,
code: 'PRESTO',
disabled: false
}
],
// Alarm interface
notifyGroupListS: [],
// Process instance list{ view a single record }
instanceListS: [],
// Operating state
isDetails: false,
startup: {
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,526 | [Bug][WorkerServer] worker server doesn't work fine in K8s | i run a worker server in k8s,not change the worker server parms,sometime i run into the error in the picture,dispath do not find a suiteable worker to do the task.
<img width="964" alt="截屏2021-01-20 下午8 44 42" src="https://user-images.githubusercontent.com/3957251/105436336-ff6cbc80-5c99-11eb-993c-ac13193efd96.png">
| https://github.com/apache/dolphinscheduler/issues/4526 | https://github.com/apache/dolphinscheduler/pull/4524 | cfa88a4a804b3bc565f6571d5a6e842df18644a2 | 5bae9336b025afb77bb0459fc10f333a8ff35d48 | "2021-01-22T02:10:46Z" | java | "2021-01-24T06:53:27Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/OSUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.shell.ShellExecutor;
import org.apache.commons.configuration.Configuration;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.management.ManagementFactory;
import java.lang.management.RuntimeMXBean;
import java.math.RoundingMode;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.StringTokenizer;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import oshi.hardware.CentralProcessor;
import oshi.hardware.GlobalMemory;
import oshi.hardware.HardwareAbstractionLayer;
/**
* os utils
*/
public class OSUtils {
private static final Logger logger = LoggerFactory.getLogger(OSUtils.class);
public static final ThreadLocal<Logger> taskLoggerThreadLocal = new ThreadLocal<>();
private static final SystemInfo SI = new SystemInfo();
public static final String TWO_DECIMAL = "0.00";
/**
* return -1 when the function can not get hardware env info
* e.g {@link OSUtils#loadAverage()} {@link OSUtils#cpuUsage()}
*/
public static final double NEGATIVE_ONE = -1;
private static HardwareAbstractionLayer hal = SI.getHardware();
private OSUtils() {
throw new UnsupportedOperationException("Construct OSUtils");
}
/**
* Initialization regularization, solve the problem of pre-compilation performance,
* avoid the thread safety problem of multi-thread operation
*/
private static final Pattern PATTERN = Pattern.compile("\\s+");
/**
* get memory usage
* Keep 2 decimal
*
* @return percent %
*/
public static double memoryUsage() {
GlobalMemory memory = hal.getMemory();
double memoryUsage = (memory.getTotal() - memory.getAvailable() - memory.getSwapUsed()) * 0.1 / memory.getTotal() * 10;
DecimalFormat df = new DecimalFormat(TWO_DECIMAL);
df.setRoundingMode(RoundingMode.HALF_UP);
return Double.parseDouble(df.format(memoryUsage));
}
/**
* get available physical memory size
* <p>
* Keep 2 decimal
*
* @return available Physical Memory Size, unit: G
*/
public static double availablePhysicalMemorySize() {
GlobalMemory memory = hal.getMemory();
double availablePhysicalMemorySize = (memory.getAvailable() + memory.getSwapUsed()) / 1024.0 / 1024 / 1024;
DecimalFormat df = new DecimalFormat(TWO_DECIMAL);
df.setRoundingMode(RoundingMode.HALF_UP);
return Double.parseDouble(df.format(availablePhysicalMemorySize));
}
/**
* get total physical memory size
* <p>
* Keep 2 decimal
*
* @return available Physical Memory Size, unit: G
*/
public static double totalMemorySize() {
GlobalMemory memory = hal.getMemory();
double availablePhysicalMemorySize = memory.getTotal() / 1024.0 / 1024 / 1024;
DecimalFormat df = new DecimalFormat(TWO_DECIMAL);
df.setRoundingMode(RoundingMode.HALF_UP);
return Double.parseDouble(df.format(availablePhysicalMemorySize));
}
/**
* load average
*
* @return load average
*/
public static double loadAverage() {
double loadAverage = hal.getProcessor().getSystemLoadAverage();
if (Double.isNaN(loadAverage)) {
return NEGATIVE_ONE;
}
DecimalFormat df = new DecimalFormat(TWO_DECIMAL);
df.setRoundingMode(RoundingMode.HALF_UP);
return Double.parseDouble(df.format(loadAverage));
}
/**
* get cpu usage
*
* @return cpu usage
*/
public static double cpuUsage() {
CentralProcessor processor = hal.getProcessor();
double cpuUsage = processor.getSystemCpuLoad();
if (Double.isNaN(cpuUsage)) {
return NEGATIVE_ONE;
}
DecimalFormat df = new DecimalFormat(TWO_DECIMAL);
df.setRoundingMode(RoundingMode.HALF_UP);
return Double.parseDouble(df.format(cpuUsage));
}
public static List<String> getUserList() {
try {
if (isMacOS()) {
return getUserListFromMac();
} else if (isWindows()) {
return getUserListFromWindows();
} else {
return getUserListFromLinux();
}
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
return Collections.emptyList();
}
/**
* get user list from linux
*
* @return user list
*/
private static List<String> getUserListFromLinux() throws IOException {
List<String> userList = new ArrayList<>();
try (BufferedReader bufferedReader = new BufferedReader(
new InputStreamReader(new FileInputStream("/etc/passwd")))) {
String line;
while ((line = bufferedReader.readLine()) != null) {
if (line.contains(":")) {
String[] userInfo = line.split(":");
userList.add(userInfo[0]);
}
}
}
return userList;
}
/**
* get user list from mac
*
* @return user list
*/
private static List<String> getUserListFromMac() throws IOException {
String result = exeCmd("dscl . list /users");
if (StringUtils.isNotEmpty(result)) {
return Arrays.asList(result.split("\n"));
}
return Collections.emptyList();
}
/**
* get user list from windows
*
* @return user list
*/
private static List<String> getUserListFromWindows() throws IOException {
String result = exeCmd("net user");
String[] lines = result.split("\n");
int startPos = 0;
int endPos = lines.length - 2;
for (int i = 0; i < lines.length; i++) {
if (lines[i].isEmpty()) {
continue;
}
int count = 0;
if (lines[i].charAt(0) == '-') {
for (int j = 0; j < lines[i].length(); j++) {
if (lines[i].charAt(i) == '-') {
count++;
}
}
}
if (count == lines[i].length()) {
startPos = i + 1;
break;
}
}
List<String> users = new ArrayList<>();
while (startPos <= endPos) {
users.addAll(Arrays.asList(PATTERN.split(lines[startPos])));
startPos++;
}
return users;
}
/**
* create user
*
* @param userName user name
* @return true if creation was successful, otherwise false
*/
public static boolean createUser(String userName) {
try {
String userGroup = OSUtils.getGroup();
if (StringUtils.isEmpty(userGroup)) {
String errorLog = String.format("%s group does not exist for this operating system.", userGroup);
LoggerUtils.logError(Optional.ofNullable(logger), errorLog);
LoggerUtils.logError(Optional.ofNullable(taskLoggerThreadLocal.get()), errorLog);
return false;
}
if (isMacOS()) {
createMacUser(userName, userGroup);
} else if (isWindows()) {
createWindowsUser(userName, userGroup);
} else {
createLinuxUser(userName, userGroup);
}
return true;
} catch (Exception e) {
LoggerUtils.logError(Optional.ofNullable(logger), e);
LoggerUtils.logError(Optional.ofNullable(taskLoggerThreadLocal.get()), e);
}
return false;
}
/**
* create linux user
*
* @param userName user name
* @param userGroup user group
* @throws IOException in case of an I/O error
*/
private static void createLinuxUser(String userName, String userGroup) throws IOException {
String infoLog1 = String.format("create linux os user : %s", userName);
LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog1);
LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog1);
String cmd = String.format("sudo useradd -g %s %s", userGroup, userName);
String infoLog2 = String.format("execute cmd : %s", cmd);
LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog2);
LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog2);
OSUtils.exeCmd(cmd);
}
/**
* create mac user (Supports Mac OSX 10.10+)
*
* @param userName user name
* @param userGroup user group
* @throws IOException in case of an I/O error
*/
private static void createMacUser(String userName, String userGroup) throws IOException {
Optional<Logger> optionalLogger = Optional.ofNullable(logger);
Optional<Logger> optionalTaskLogger = Optional.ofNullable(taskLoggerThreadLocal.get());
String infoLog1 = String.format("create mac os user : %s", userName);
LoggerUtils.logInfo(optionalLogger, infoLog1);
LoggerUtils.logInfo(optionalTaskLogger, infoLog1);
String createUserCmd = String.format("sudo sysadminctl -addUser %s -password %s", userName, userName);
String infoLog2 = String.format("create user command : %s", createUserCmd);
LoggerUtils.logInfo(optionalLogger, infoLog2);
LoggerUtils.logInfo(optionalTaskLogger, infoLog2);
OSUtils.exeCmd(createUserCmd);
String appendGroupCmd = String.format("sudo dseditgroup -o edit -a %s -t user %s", userName, userGroup);
String infoLog3 = String.format("append user to group : %s", appendGroupCmd);
LoggerUtils.logInfo(optionalLogger, infoLog3);
LoggerUtils.logInfo(optionalTaskLogger, infoLog3);
OSUtils.exeCmd(appendGroupCmd);
}
/**
* create windows user
*
* @param userName user name
* @param userGroup user group
* @throws IOException in case of an I/O error
*/
private static void createWindowsUser(String userName, String userGroup) throws IOException {
String infoLog1 = String.format("create windows os user : %s", userName);
LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog1);
LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog1);
String userCreateCmd = String.format("net user \"%s\" /add", userName);
String infoLog2 = String.format("execute create user command : %s", userCreateCmd);
LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog2);
LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog2);
OSUtils.exeCmd(userCreateCmd);
String appendGroupCmd = String.format("net localgroup \"%s\" \"%s\" /add", userGroup, userName);
String infoLog3 = String.format("execute append user to group : %s", appendGroupCmd);
LoggerUtils.logInfo(Optional.ofNullable(logger), infoLog3);
LoggerUtils.logInfo(Optional.ofNullable(taskLoggerThreadLocal.get()), infoLog3);
OSUtils.exeCmd(appendGroupCmd);
}
/**
* get system group information
*
* @return system group info
* @throws IOException errors
*/
public static String getGroup() throws IOException {
if (isWindows()) {
String currentProcUserName = System.getProperty("user.name");
String result = exeCmd(String.format("net user \"%s\"", currentProcUserName));
String line = result.split("\n")[22];
String group = PATTERN.split(line)[1];
if (group.charAt(0) == '*') {
return group.substring(1);
} else {
return group;
}
} else {
String result = exeCmd("groups");
if (StringUtils.isNotEmpty(result)) {
String[] groupInfo = result.split(" ");
return groupInfo[0];
}
}
return null;
}
/**
* Execute the corresponding command of Linux or Windows
*
* @param command command
* @return result of execute command
* @throws IOException errors
*/
public static String exeCmd(String command) throws IOException {
StringTokenizer st = new StringTokenizer(command);
String[] cmdArray = new String[st.countTokens()];
for (int i = 0; st.hasMoreTokens(); i++) {
cmdArray[i] = st.nextToken();
}
return exeShell(cmdArray);
}
/**
* Execute the shell
*
* @param command command
* @return result of execute the shell
* @throws IOException errors
*/
public static String exeShell(String[] command) throws IOException {
return ShellExecutor.execCommand(command);
}
/**
* get process id
*
* @return process id
*/
public static int getProcessID() {
RuntimeMXBean runtimeMXBean = ManagementFactory.getRuntimeMXBean();
return Integer.parseInt(runtimeMXBean.getName().split("@")[0]);
}
/**
* whether is macOS
*
* @return true if mac
*/
public static boolean isMacOS() {
return getOSName().startsWith("Mac");
}
/**
* whether is windows
*
* @return true if windows
*/
public static boolean isWindows() {
return getOSName().startsWith("Windows");
}
/**
* get current OS name
*
* @return current OS name
*/
public static String getOSName() {
return System.getProperty("os.name");
}
/**
* check memory and cpu usage
*
* @param systemCpuLoad systemCpuLoad
* @param systemReservedMemory systemReservedMemory
* @return check memory and cpu usage
*/
public static Boolean checkResource(double systemCpuLoad, double systemReservedMemory) {
// system load average
double loadAverage = OSUtils.loadAverage();
// system available physical memory
double availablePhysicalMemorySize = OSUtils.availablePhysicalMemorySize();
if (loadAverage > systemCpuLoad || availablePhysicalMemorySize < systemReservedMemory) {
logger.warn("load is too high or availablePhysicalMemorySize(G) is too low, it's availablePhysicalMemorySize(G):{},loadAvg:{}", availablePhysicalMemorySize, loadAverage);
return false;
} else {
return true;
}
}
/**
* check memory and cpu usage
*
* @param conf conf
* @param isMaster is master
* @return check memory and cpu usage
*/
public static Boolean checkResource(Configuration conf, Boolean isMaster) {
double systemCpuLoad;
double systemReservedMemory;
if (Boolean.TRUE.equals(isMaster)) {
systemCpuLoad = conf.getDouble(Constants.MASTER_MAX_CPULOAD_AVG, Constants.DEFAULT_MASTER_CPU_LOAD);
systemReservedMemory = conf.getDouble(Constants.MASTER_RESERVED_MEMORY, Constants.DEFAULT_MASTER_RESERVED_MEMORY);
} else {
systemCpuLoad = conf.getDouble(Constants.WORKER_MAX_CPULOAD_AVG, Constants.DEFAULT_WORKER_CPU_LOAD);
systemReservedMemory = conf.getDouble(Constants.WORKER_RESERVED_MEMORY, Constants.DEFAULT_WORKER_RESERVED_MEMORY);
}
return checkResource(systemCpuLoad, systemReservedMemory);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,506 | There is a vulnerability in beanutils 1.7.0,upgrade recommended | https://github.com/apache/incubator-dolphinscheduler/blob/17c06ce966fc5c6a6136ee142e4698312fe6532f/pom.xml#L84
CVE-2014-0114 CVE-2019-10086
Recommended upgrade version:1.8.0~beta-1 | https://github.com/apache/dolphinscheduler/issues/4506 | https://github.com/apache/dolphinscheduler/pull/4525 | 829fdb52c1769030113b450b0b98ffecfe3693f8 | a5f31b75abee3ef12647dbb961013bd941bc78f1 | "2021-01-20T05:46:36Z" | java | "2021-01-25T05:43:06Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/CollectionUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import org.apache.commons.beanutils.BeanMap;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Provides utility methods and decorators for {@link Collection} instances.
* <p>
* Various utility methods might put the input objects into a Set/Map/Bag. In case
* the input objects override {@link Object#equals(Object)}, it is mandatory that
* the general contract of the {@link Object#hashCode()} method is maintained.
* <p>
* NOTE: From 4.0, method parameters will take {@link Iterable} objects when possible.
*
* @version $Id: CollectionUtils.java 1686855 2015-06-22 13:00:27Z tn $
* @since 1.0
*/
public class CollectionUtils {
private CollectionUtils() {
throw new UnsupportedOperationException("Construct CollectionUtils");
}
/**
* Returns a new {@link Collection} containing <i>a</i> minus a subset of
* <i>b</i>. Only the elements of <i>b</i> that satisfy the predicate
* condition, <i>p</i> are subtracted from <i>a</i>.
*
* <p>The cardinality of each element <i>e</i> in the returned {@link Collection}
* that satisfies the predicate condition will be the cardinality of <i>e</i> in <i>a</i>
* minus the cardinality of <i>e</i> in <i>b</i>, or zero, whichever is greater.</p>
* <p>The cardinality of each element <i>e</i> in the returned {@link Collection} that does <b>not</b>
* satisfy the predicate condition will be equal to the cardinality of <i>e</i> in <i>a</i>.</p>
*
* @param a the collection to subtract from, must not be null
* @param b the collection to subtract, must not be null
* @param <T> T
* @return a new collection with the results
* @see Collection#removeAll
*/
public static <T> Collection<T> subtract(Set<T> a, Set<T> b) {
return org.apache.commons.collections4.CollectionUtils.subtract(a, b);
}
public static boolean isNotEmpty(Collection coll) {
return !isEmpty(coll);
}
public static boolean isEmpty(Collection coll) {
return coll == null || coll.isEmpty();
}
/**
* String to map
*
* @param str string
* @param separator separator
* @return string to map
*/
public static Map<String, String> stringToMap(String str, String separator) {
return stringToMap(str, separator, "");
}
/**
* String to map
*
* @param str string
* @param separator separator
* @param keyPrefix prefix
* @return string to map
*/
public static Map<String, String> stringToMap(String str, String separator, String keyPrefix) {
Map<String, String> emptyMap = new HashMap<>(0);
if (StringUtils.isEmpty(str)) {
return emptyMap;
}
if (StringUtils.isEmpty(separator)) {
return emptyMap;
}
String[] strings = str.split(separator);
Map<String, String> map = new HashMap<>(strings.length);
for (int i = 0; i < strings.length; i++) {
String[] strArray = strings[i].split("=");
if (strArray.length != 2) {
return emptyMap;
}
//strArray[0] KEY strArray[1] VALUE
if (StringUtils.isEmpty(keyPrefix)) {
map.put(strArray[0], strArray[1]);
} else {
map.put(keyPrefix + strArray[0], strArray[1]);
}
}
return map;
}
/**
* Helper class to easily access cardinality properties of two collections.
*
* @param <O> the element type
*/
private static class CardinalityHelper<O> {
/**
* Contains the cardinality for each object in collection A.
*/
final Map<O, Integer> cardinalityA;
/**
* Contains the cardinality for each object in collection B.
*/
final Map<O, Integer> cardinalityB;
/**
* Create a new CardinalityHelper for two collections.
*
* @param a the first collection
* @param b the second collection
*/
public CardinalityHelper(final Iterable<? extends O> a, final Iterable<? extends O> b) {
cardinalityA = CollectionUtils.getCardinalityMap(a);
cardinalityB = CollectionUtils.getCardinalityMap(b);
}
/**
* Returns the frequency of this object in collection A.
*
* @param obj the object
* @return the frequency of the object in collection A
*/
public int freqA(final Object obj) {
return getFreq(obj, cardinalityA);
}
/**
* Returns the frequency of this object in collection B.
*
* @param obj the object
* @return the frequency of the object in collection B
*/
public int freqB(final Object obj) {
return getFreq(obj, cardinalityB);
}
private int getFreq(final Object obj, final Map<?, Integer> freqMap) {
final Integer count = freqMap.get(obj);
if (count != null) {
return count;
}
return 0;
}
}
/**
* returns {@code true} iff the given {@link Collection}s contain
* exactly the same elements with exactly the same cardinalities.
*
* @param a the first collection
* @param b the second collection
* @return Returns true iff the given Collections contain exactly the same elements with exactly the same cardinalities.
* That is, iff the cardinality of e in a is equal to the cardinality of e in b, for each element e in a or b.
*/
public static boolean equalLists(Collection<?> a, Collection<?> b) {
if (a == null && b == null) {
return true;
}
if (a == null || b == null) {
return false;
}
return isEqualCollection(a, b);
}
/**
* Returns {@code true} iff the given {@link Collection}s contain
* exactly the same elements with exactly the same cardinalities.
* <p>
* That is, iff the cardinality of <i>e</i> in <i>a</i> is
* equal to the cardinality of <i>e</i> in <i>b</i>,
* for each element <i>e</i> in <i>a</i> or <i>b</i>.
*
* @param a the first collection, must not be null
* @param b the second collection, must not be null
* @return <code>true</code> iff the collections contain the same elements with the same cardinalities.
*/
public static boolean isEqualCollection(final Collection<?> a, final Collection<?> b) {
if (a.size() != b.size()) {
return false;
}
final CardinalityHelper<Object> helper = new CardinalityHelper<>(a, b);
if (helper.cardinalityA.size() != helper.cardinalityB.size()) {
return false;
}
for (final Object obj : helper.cardinalityA.keySet()) {
if (helper.freqA(obj) != helper.freqB(obj)) {
return false;
}
}
return true;
}
/**
* Returns a {@link Map} mapping each unique element in the given
* {@link Collection} to an {@link Integer} representing the number
* of occurrences of that element in the {@link Collection}.
* <p>
* Only those elements present in the collection will appear as
* keys in the map.
*
* @param <O> the type of object in the returned {@link Map}. This is a super type of O
* @param coll the collection to get the cardinality map for, must not be null
* @return the populated cardinality map
*/
public static <O> Map<O, Integer> getCardinalityMap(final Iterable<? extends O> coll) {
final Map<O, Integer> count = new HashMap<>();
for (final O obj : coll) {
count.put(obj, count.getOrDefault(obj, 0) + 1);
}
return count;
}
/**
* Removes certain attributes of each object in the list
*
* @param originList origin list
* @param exclusionSet exclusion set
* @param <T> T
* @return removes certain attributes of each object in the list
*/
public static <T extends Object> List<Map<String, Object>> getListByExclusion(List<T> originList, Set<String> exclusionSet) {
List<Map<String, Object>> instanceList = new ArrayList<>();
if (exclusionSet == null) {
exclusionSet = new HashSet<>();
}
if (originList == null) {
return instanceList;
}
Map<String, Object> instanceMap;
for (T instance : originList) {
Map<String, Object> dataMap = new BeanMap(instance);
instanceMap = new LinkedHashMap<>(16, 0.75f, true);
for (Map.Entry<String, Object> entry : dataMap.entrySet()) {
if (exclusionSet.contains(entry.getKey())) {
continue;
}
instanceMap.put(entry.getKey(), entry.getValue());
}
instanceList.add(instanceMap);
}
return instanceList;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,506 | There is a vulnerability in beanutils 1.7.0,upgrade recommended | https://github.com/apache/incubator-dolphinscheduler/blob/17c06ce966fc5c6a6136ee142e4698312fe6532f/pom.xml#L84
CVE-2014-0114 CVE-2019-10086
Recommended upgrade version:1.8.0~beta-1 | https://github.com/apache/dolphinscheduler/issues/4506 | https://github.com/apache/dolphinscheduler/pull/4525 | 829fdb52c1769030113b450b0b98ffecfe3693f8 | a5f31b75abee3ef12647dbb961013bd941bc78f1 | "2021-01-20T05:46:36Z" | java | "2021-01-25T05:43:06Z" | dolphinscheduler-dist/release-docs/LICENSE | Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=======================================================================
Apache DolphinScheduler Subcomponents:
The Apache DolphinScheduler project contains subcomponents with separate copyright
notices and license terms. Your use of the source code for the these
subcomponents is subject to the terms and conditions of the following
licenses.
========================================================================
Apache 2.0 licenses
========================================================================
The following components are provided under the Apache License. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
apacheds-i18n 2.0.0-M15: https://mvnrepository.com/artifact/org.apache.directory.server/apacheds-i18n/2.0.0-M15, Apache 2.0
apacheds-kerberos-codec 2.0.0-M15: https://mvnrepository.com/artifact/org.apache.directory.server/apacheds-kerberos-codec/2.0.0-M15, Apache 2.0
apache-el 8.5.35.1: https://mvnrepository.com/artifact/org.mortbay.jasper/apache-el/8.5.35.1, Apache 2.0
api-asn1-api 1.0.0-M20: https://mvnrepository.com/artifact/org.apache.directory.api/api-asn1-api/1.0.0-M20, Apache 2.0
api-util 1.0.0-M20: https://mvnrepository.com/artifact/org.apache.directory.api/api-util/1.0.0-M20, Apache 2.0
audience-annotations 0.5.0: https://mvnrepository.com/artifact/org.apache.yetus/audience-annotations/0.5.0, Apache 2.0
avro 1.7.4: https://github.com/apache/avro, Apache 2.0
aws-sdk-java 1.7.4: https://mvnrepository.com/artifact/com.amazonaws/aws-java-sdk/1.7.4, Apache 2.0
bonecp 0.8.0.RELEASE: https://github.com/wwadge/bonecp, Apache 2.0
byte-buddy 1.9.10: https://mvnrepository.com/artifact/net.bytebuddy/byte-buddy/1.9.10, Apache 2.0
classmate 1.4.0: https://mvnrepository.com/artifact/com.fasterxml/classmate/1.4.0, Apache 2.0
clickhouse-jdbc 0.1.52: https://mvnrepository.com/artifact/ru.yandex.clickhouse/clickhouse-jdbc/0.1.52, Apache 2.0
commons-beanutils 1.7.0 https://mvnrepository.com/artifact/commons-beanutils/commons-beanutils/1.7.0, Apache 2.0
commons-cli 1.2: https://mvnrepository.com/artifact/commons-cli/commons-cli/1.2, Apache 2.0
commons-codec 1.11: https://mvnrepository.com/artifact/commons-codec/commons-codec/1.11, Apache 2.0
commons-collections 3.2.2: https://mvnrepository.com/artifact/commons-collections/commons-collections/3.2.2, Apache 2.0
commons-collections4 4.1: https://mvnrepository.com/artifact/org.apache.commons/commons-collections4/4.1, Apache 2.0
commons-compress 1.4.1: https://mvnrepository.com/artifact/org.apache.commons/commons-compress/1.4.1, Apache 2.0
commons-configuration 1.10: https://mvnrepository.com/artifact/commons-configuration/commons-configuration/1.10, Apache 2.0
commons-daemon 1.0.13 https://mvnrepository.com/artifact/commons-daemon/commons-daemon/1.0.13, Apache 2.0
commons-dbcp 1.4: https://github.com/apache/commons-dbcp, Apache 2.0
commons-httpclient 3.0.1: https://mvnrepository.com/artifact/commons-httpclient/commons-httpclient/3.0.1, Apache 2.0
commons-io 2.4: https://github.com/apache/commons-io, Apache 2.0
commons-lang 2.6: https://github.com/apache/commons-lang, Apache 2.0
commons-logging 1.1.1: https://github.com/apache/commons-logging, Apache 2.0
commons-math3 3.1.1: https://github.com/apache/commons-math, Apache 2.0
commons-net 3.1: https://github.com/apache/commons-net, Apache 2.0
commons-pool 1.6: https://github.com/apache/commons-pool, Apache 2.0
cron-utils 5.0.5: https://mvnrepository.com/artifact/com.cronutils/cron-utils/5.0.5, Apache 2.0
curator-client 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-client/4.3.0, Apache 2.0
curator-framework 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-framework/4.3.0, Apache 2.0
curator-recipes 4.3.0: https://mvnrepository.com/artifact/org.apache.curator/curator-recipes/4.3.0, Apache 2.0
datanucleus-api-jdo 4.2.1: https://mvnrepository.com/artifact/org.datanucleus/datanucleus-api-jdo/4.2.1, Apache 2.0
datanucleus-core 4.1.6: https://mvnrepository.com/artifact/org.datanucleus/datanucleus-core/4.1.6, Apache 2.0
datanucleus-rdbms 4.1.7: https://mvnrepository.com/artifact/org.datanucleus/datanucleus-rdbms/4.1.7, Apache 2.0
derby 10.14.2.0: https://github.com/apache/derby, Apache 2.0
druid 1.1.14: https://mvnrepository.com/artifact/com.alibaba/druid/1.1.14, Apache 2.0
error_prone_annotations 2.1.3 https://mvnrepository.com/artifact/com.google.errorprone/error_prone_annotations/2.1.3, Apache 2.0
gson 2.8.5: https://github.com/google/gson, Apache 2.0
guava 24.1-jre: https://mvnrepository.com/artifact/com.google.guava/guava/24.1-jre, Apache 2.0
guice 3.0: https://mvnrepository.com/artifact/com.google.inject/guice/3.0, Apache 2.0
guice-servlet 3.0: https://mvnrepository.com/artifact/com.google.inject.extensions/guice-servlet/3.0, Apache 2.0
hadoop-annotations 2.7.3:https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-annotations/2.7.3, Apache 2.0
hadoop-auth 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-auth/2.7.3, Apache 2.0
hadoop-aws 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-aws/2.7.3, Apache 2.0
hadoop-client 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-client/2.7.3, Apache 2.0
hadoop-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common/2.7.3, Apache 2.0
hadoop-hdfs 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs/2.7.3, Apache 2.0
hadoop-mapreduce-client-app 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-app/2.7.3, Apache 2.0
hadoop-mapreduce-client-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-common/2.7.3, Apache 2.0
hadoop-mapreduce-client-core 2.7.3: https://mvnrepository.com/artifact/io.hops/hadoop-mapreduce-client-core/2.7.3, Apache 2.0
hadoop-mapreduce-client-jobclient 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-jobclient/2.7.3, Apache 2.0
hadoop-mapreduce-client-shuffle 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-mapreduce-client-shuffle/2.7.3, Apache 2.0
hadoop-yarn-api 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-api/2.7.3, Apache 2.0
hadoop-yarn-client 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-client/2.7.3, Apache 2.0
hadoop-yarn-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-common/2.7.3, Apache 2.0
hadoop-yarn-server-common 2.7.3: https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-server-common/2.7.3, Apache 2.0
hibernate-validator 6.0.14.Final: https://github.com/hibernate/hibernate-validator, Apache 2.0
HikariCP 3.2.0: https://mvnrepository.com/artifact/com.zaxxer/HikariCP/3.2.0, Apache 2.0
hive-common 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-common/2.1.0, Apache 2.0
hive-jdbc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-jdbc/2.1.0, Apache 2.0
hive-metastore 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-metastore/2.1.0, Apache 2.0
hive-orc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-orc/2.1.0, Apache 2.0
hive-serde 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-serde/2.1.0, Apache 2.0
hive-service 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-service/2.1.0, Apache 2.0
hive-service-rpc 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-service-rpc/2.1.0, Apache 2.0
hive-storage-api 2.1.0: https://mvnrepository.com/artifact/org.apache.hive/hive-storage-api/2.1.0, Apache 2.0
htrace-core 3.1.0-incubating: https://mvnrepository.com/artifact/org.apache.htrace/htrace-core/3.1.0-incubating, Apache 2.0
httpclient 4.4.1: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpclient/4.4.1, Apache 2.0
httpcore 4.4.1: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpcore/4.4.1, Apache 2.0
httpmime 4.5.7: https://mvnrepository.com/artifact/org.apache.httpcomponents/httpmime/4.5.7, Apache 2.0
jackson-annotations 2.10.5: https://mvnrepository.com/artifact/com.fasterxml.jackson.core/jackson-annotations/2.10.5, Apache 2.0
jackson-core 2.10.5: https://github.com/FasterXML/jackson-core, Apache 2.0
jackson-core-asl 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-core-asl/1.9.13, Apache 2.0
jackson-databind 2.10.5: https://github.com/FasterXML/jackson-databind, Apache 2.0
jackson-datatype-jdk8 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jdk8/2.9.10, Apache 2.0
jackson-datatype-jsr310 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.datatype/jackson-datatype-jsr310/2.9.10, Apache 2.0
jackson-jaxrs 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-jaxrs/1.9.13, Apache 2.0 and LGPL 2.1
jackson-mapper-asl 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-mapper-asl/1.9.13, Apache 2.0
jackson-module-parameter-names 2.9.10: https://mvnrepository.com/artifact/com.fasterxml.jackson.module/jackson-module-parameter-names/2.9.10, Apache 2.0
jackson-xc 1.9.13: https://mvnrepository.com/artifact/org.codehaus.jackson/jackson-xc/1.9.13, Apache 2.0 and LGPL 2.1
javax.inject 1: https://mvnrepository.com/artifact/javax.inject/javax.inject/1, Apache 2.0
javax.jdo-3.2.0-m3: https://mvnrepository.com/artifact/org.datanucleus/javax.jdo/3.2.0-m3, Apache 2.0
java-xmlbuilder 0.4 : https://mvnrepository.com/artifact/com.jamesmurty.utils/java-xmlbuilder/0.4, Apache 2.0
jboss-logging 3.3.2.Final: https://mvnrepository.com/artifact/org.jboss.logging/jboss-logging/3.3.2.Final, Apache 2.0
jdo-api 3.0.1: https://mvnrepository.com/artifact/javax.jdo/jdo-api/3.0.1, Apache 2.0
jets3t 0.9.0: https://mvnrepository.com/artifact/net.java.dev.jets3t/jets3t/0.9.0, Apache 2.0
jettison 1.1: https://github.com/jettison-json/jettison, Apache 2.0
jetty 6.1.26: https://mvnrepository.com/artifact/org.mortbay.jetty/jetty/6.1.26, Apache 2.0 and EPL 1.0
jetty-continuation 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-continuation/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-http 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-http/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-io 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-io/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-security 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-security/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-server 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-server/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-servlet 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-servlet/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-servlets 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-servlets/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-util 6.1.26: https://mvnrepository.com/artifact/org.mortbay.jetty/jetty-util/6.1.26, Apache 2.0 and EPL 1.0
jetty-util 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-util/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-webapp 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-webapp/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jetty-xml 9.4.14.v20181114: https://mvnrepository.com/artifact/org.eclipse.jetty/jetty-xml/9.4.14.v20181114, Apache 2.0 and EPL 1.0
jna 4.5.2: https://mvnrepository.com/artifact/net.java.dev.jna/jna/4.5.2, Apache 2.0 and LGPL 2.1
jna-platform 4.5.2: https://mvnrepository.com/artifact/net.java.dev.jna/jna-platform/4.5.2, Apache 2.0 and LGPL 2.1
joda-time 2.10.1: https://github.com/JodaOrg/joda-time, Apache 2.0
jpam 1.1: https://mvnrepository.com/artifact/net.sf.jpam/jpam/1.1, Apache 2.0
jsqlparser 2.1: https://github.com/JSQLParser/JSqlParser, Apache 2.0 or LGPL 2.1
jsr305 3.0.0: https://mvnrepository.com/artifact/com.google.code.findbugs/jsr305, Apache 2.0
j2objc-annotations 1.1 https://mvnrepository.com/artifact/com.google.j2objc/j2objc-annotations/1.1, Apache 2.0
libfb303 0.9.3: https://mvnrepository.com/artifact/org.apache.thrift/libfb303/0.9.3, Apache 2.0
libthrift 0.9.3: https://mvnrepository.com/artifact/org.apache.thrift/libthrift/0.9.3, Apache 2.0
log4j-api 2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-api/2.11.2, Apache 2.0
log4j-core-2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core/2.11.2, Apache 2.0
log4j 1.2.17: https://mvnrepository.com/artifact/log4j/log4j/1.2.17, Apache 2.0
log4j-1.2-api 2.11.2: https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-1.2-api/2.11.2, Apache 2.0
lz4 1.3.0: https://mvnrepository.com/artifact/net.jpountz.lz4/lz4/1.3.0, Apache 2.0
mapstruct 1.2.0.Final: https://github.com/mapstruct/mapstruct, Apache 2.0
mybatis 3.5.2 https://mvnrepository.com/artifact/org.mybatis/mybatis/3.5.2, Apache 2.0
mybatis-plus 3.2.0: https://github.com/baomidou/mybatis-plus, Apache 2.0
mybatis-plus-annotation 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-annotation/3.2.0, Apache 2.0
mybatis-plus-boot-starter 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-boot-starter/3.2.0, Apache 2.0
mybatis-plus-core 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-core/3.2.0, Apache 2.0
mybatis-plus-extension 3.2.0: https://mvnrepository.com/artifact/com.baomidou/mybatis-plus-extension/3.2.0, Apache 2.0
mybatis-spring 2.0.2: https://mvnrepository.com/artifact/org.mybatis/mybatis-spring/2.0.2, Apache 2.0
netty 3.6.2.Final: https://github.com/netty/netty, Apache 2.0
netty-all 4.1.33.Final: https://github.com/netty/netty/blob/netty-4.1.33.Final/LICENSE.txt, Apache 2.0
opencsv 2.3: https://mvnrepository.com/artifact/net.sf.opencsv/opencsv/2.3, Apache 2.0
parquet-hadoop-bundle 1.8.1: https://mvnrepository.com/artifact/org.apache.parquet/parquet-hadoop-bundle/1.8.1, Apache 2.0
poi 3.17: https://mvnrepository.com/artifact/org.apache.poi/poi/3.17, Apache 2.0
quartz 2.3.0: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz/2.3.0, Apache 2.0
quartz-jobs 2.3.0: https://mvnrepository.com/artifact/org.quartz-scheduler/quartz-jobs/2.3.0, Apache 2.0
snakeyaml 1.23: https://mvnrepository.com/artifact/org.yaml/snakeyaml/1.23, Apache 2.0
snappy 0.2: https://mvnrepository.com/artifact/org.iq80.snappy/snappy/0.2, Apache 2.0
snappy-java 1.0.4.1: https://github.com/xerial/snappy-java, Apache 2.0
spring-aop 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-aop/5.1.18.RELEASE, Apache 2.0
spring-beans 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-beans/5.1.18.RELEASE, Apache 2.0
spring-boot 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot/2.1.17.RELEASE, Apache 2.0
spring-boot-autoconfigure 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-autoconfigure/2.1.17.RELEASE, Apache 2.0
spring-boot-starter 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter/2.1.17.RELEASE, Apache 2.0
spring-boot-starter-aop 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-aop/2.1.17.RELEASE, Apache 2.0
spring-boot-starter-jdbc 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jdbc/2.1.17.RELEASE, Apache 2.0
spring-boot-starter-jetty 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jetty/2.1.17.RELEASE, Apache 2.0
spring-boot-starter-json 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-json/2.1.17.RELEASE, Apache 2.0
spring-boot-starter-logging 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-logging/2.1.17.RELEASE, Apache 2.0
spring-boot-starter-web 2.1.17.RELEASE: https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-web/2.1.17.RELEASE, Apache 2.0
spring-context 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-context/5.1.18.RELEASE, Apache 2.0
spring-core 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-core/5.1.18.RELEASE, Apache 2.0
spring-expression 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-expression/5.1.18.RELEASE, Apache 2.0
springfox-core 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-core, Apache 2.0
springfox-schema 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-schema, Apache 2.0
springfox-spi 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-spi, Apache 2.0
springfox-spring-web 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-spring-web/2.9.2, Apache 2.0
springfox-swagger2 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger2/2.9.2, Apache 2.0
springfox-swagger-common 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger-common/2.9.2, Apache 2.0
springfox-swagger-ui 2.9.2: https://mvnrepository.com/artifact/io.springfox/springfox-swagger-ui/2.9.2, Apache 2.0
spring-jcl 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-jcl/5.1.18.RELEASE, Apache 2.0
spring-jdbc 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-jdbc/5.1.18.RELEASE, Apache 2.0
spring-plugin-core 1.2.0.RELEASE: https://mvnrepository.com/artifact/org.springframework.plugin/spring-plugin-core/1.2.0.RELEASE, Apache 2.0
spring-plugin-metadata 1.2.0.RELEASE: https://mvnrepository.com/artifact/org.springframework.plugin/spring-plugin-metadata/1.2.0.RELEASE, Apache 2.0
spring-tx 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-tx/5.1.18.RELEASE, Apache 2.0
spring-web 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-web/5.1.18.RELEASE, Apache 2.0
spring-webmvc 5.1.18.RELEASE: https://mvnrepository.com/artifact/org.springframework/spring-webmvc/5.1.18.RELEASE, Apache 2.0
swagger-annotations 1.5.20: https://mvnrepository.com/artifact/io.swagger/swagger-annotations/1.5.20, Apache 2.0
swagger-bootstrap-ui 1.9.3: https://mvnrepository.com/artifact/com.github.xiaoymin/swagger-bootstrap-ui/1.9.3, Apache 2.0
swagger-models 1.5.20: https://mvnrepository.com/artifact/io.swagger/swagger-models/1.5.20, Apache 2.0
tephra-api 0.6.0: https://mvnrepository.com/artifact/co.cask.tephra/tephra-api/0.6.0, Apache 2.0
validation-api 2.0.1.Final: https://mvnrepository.com/artifact/javax.validation/validation-api/2.0.1.Final, Apache 2.0
xercesImpl 2.9.1: https://mvnrepository.com/artifact/xerces/xercesImpl/2.9.1, Apache 2.0
xml-apis 1.4.01: https://mvnrepository.com/artifact/xml-apis/xml-apis/1.4.01, Apache 2.0 and W3C
zookeeper 3.4.14: https://mvnrepository.com/artifact/org.apache.zookeeper/zookeeper/3.4.14, Apache 2.0
presto-jdbc 0.238.1 https://mvnrepository.com/artifact/com.facebook.presto/presto-jdbc/0.238.1
========================================================================
BSD licenses
========================================================================
The following components are provided under a BSD license. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
asm 3.1: https://github.com/jdf/javalin/tree/master/lib/asm-3.1, BSD
javolution 5.5.1: https://mvnrepository.com/artifact/javolution/javolution/5.5.1, BSD
jline 0.9.94: https://github.com/jline/jline3, BSD
jsch 0.1.42: https://mvnrepository.com/artifact/com.jcraft/jsch/0.1.42, BSD
leveldbjni-all 1.8: https://github.com/fusesource/leveldbjni, BSD-3-Clause
postgresql 42.1.4: https://mvnrepository.com/artifact/org.postgresql/postgresql/42.1.4, BSD 2-clause
protobuf-java 2.5.0: https://mvnrepository.com/artifact/com.google.protobuf/protobuf-java/2.5.0, BSD 2-clause
paranamer 2.3: https://mvnrepository.com/artifact/com.thoughtworks.paranamer/paranamer/2.3, BSD
threetenbp 1.3.6: https://mvnrepository.com/artifact/org.threeten/threetenbp/1.3.6, BSD 3-clause
xmlenc 0.52: https://mvnrepository.com/artifact/xmlenc/xmlenc/0.52, BSD
hamcrest-core 1.3: https://mvnrepository.com/artifact/org.hamcrest/hamcrest-core/1.3, BSD 2-Clause
========================================================================
CDDL licenses
========================================================================
The following components are provided under the CDDL License. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
javax.activation-api 1.2.0: https://mvnrepository.com/artifact/javax.activation/javax.activation-api/1.2.0, CDDL and LGPL 2.0
javax.annotation-api 1.3.2: https://mvnrepository.com/artifact/javax.annotation/javax.annotation-api/1.3.2, CDDL + GPLv2
javax.servlet-api 3.1.0: https://mvnrepository.com/artifact/javax.servlet/javax.servlet-api/3.1.0, CDDL + GPLv2
jaxb-api 2.3.1: https://mvnrepository.com/artifact/javax.xml.bind/jaxb-api/2.3.1, CDDL 1.1
jaxb-impl 2.2.3-1: https://mvnrepository.com/artifact/com.sun.xml.bind/jaxb-impl/2.2.3-1, CDDL and GPL 1.1
jersey-client 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-client/1.9, CDDL 1.1 and GPL 1.1
jersey-core 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-core/1.9, CDDL 1.1 and GPL 1.1
jersey-guice 1.9: https://mvnrepository.com/artifact/com.sun.jersey.contribs/jersey-guice/1.9, CDDL 1.1 and GPL 1.1
jersey-json 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-json/1.9, CDDL 1.1 and GPL 1.1
jersey-server 1.9: https://mvnrepository.com/artifact/com.sun.jersey/jersey-server/1.9, CDDL 1.1 and GPL 1.1
jta 1.1: https://mvnrepository.com/artifact/javax.transaction/jta/1.1, CDDL 1.0
transaction-api 1.1: https://mvnrepository.com/artifact/javax.transaction/transaction-api/1.1, CDDL 1.0
========================================================================
EPL licenses
========================================================================
The following components are provided under the EPL License. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
aspectjweaver 1.9.2:https://mvnrepository.com/artifact/org.aspectj/aspectjweaver/1.9.2, EPL 1.0
logback-classic 1.2.3: https://mvnrepository.com/artifact/ch.qos.logback/logback-classic/1.2.3, EPL 1.0 and LGPL 2.1
logback-core 1.2.3: https://mvnrepository.com/artifact/ch.qos.logback/logback-core/1.2.3, EPL 1.0 and LGPL 2.1
oshi-core 3.5.0: https://mvnrepository.com/artifact/com.github.oshi/oshi-core/3.5.0, EPL 1.0
junit 4.12: https://mvnrepository.com/artifact/junit/junit/4.12, EPL 1.0
h2-1.4.200 https://github.com/h2database/h2database/blob/master/LICENSE.txt, MPL 2.0 or EPL 1.0
========================================================================
MIT licenses
========================================================================
The following components are provided under a MIT 2.0 license. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
jul-to-slf4j 1.7.25: https://mvnrepository.com/artifact/org.slf4j/jul-to-slf4j/1.7.25, MIT
mssql-jdbc 6.1.0.jre8: https://mvnrepository.com/artifact/com.microsoft.sqlserver/mssql-jdbc/6.1.0.jre8, MIT
slf4j-api 1.7.5: https://mvnrepository.com/artifact/org.slf4j/slf4j-api/1.7.5, MIT
animal-sniffer-annotations 1.14 https://mvnrepository.com/artifact/org.codehaus.mojo/animal-sniffer-annotations/1.14, MIT
checker-compat-qual 2.0.0 https://mvnrepository.com/artifact/org.checkerframework/checker-compat-qual/2.0.0, MIT + GPLv2
========================================================================
MPL 1.1 licenses
========================================================================
The following components are provided under a MPL 1.1 license. See project link for details.
The text of each license is also included at licenses/LICENSE-[project].txt.
jamon-runtime 2.3.1: https://mvnrepository.com/artifact/org.jamon/jamon-runtime/2.3.1, MPL-1.1
========================================================================
Public Domain licenses
========================================================================
xz 1.0: https://mvnrepository.com/artifact/org.tukaani/xz/1.0, Public Domain
aopalliance 1.0: https://mvnrepository.com/artifact/aopalliance/aopalliance/1.0, Public Domain
========================================================================
UI related licenses
========================================================================
The following components are used in UI.See project link for details.
The text of each license is also included at licenses/ui-licenses/LICENSE-[project].txt.
========================================
MIT licenses
========================================
@form-create/element-ui 1.0.18: https://github.com/xaboy/form-create MIT
axios 0.16.2: https://github.com/axios/axios MIT
bootstrap 3.3.7: https://github.com/twbs/bootstrap MIT
canvg 1.5.1: https://github.com/canvg/canvg MIT
clipboard 2.0.1: https://github.com/zenorocha/clipboard.js MIT
codemirror 5.43.0: https://github.com/codemirror/CodeMirror MIT
dayjs 1.7.8: https://github.com/iamkun/dayjs MIT
element-ui 2.13.2: https://github.com/ElemeFE/element MIT
html2canvas 0.5.0-beta4: https://github.com/niklasvh/html2canvas MIT
jquery 3.3.1: https://github.com/jquery/jquery MIT
jquery-ui 1.12.1: https://github.com/jquery/jquery-ui MIT
js-cookie 2.2.1: https://github.com/js-cookie/js-cookie MIT
jsplumb 2.8.6: https://github.com/jsplumb/jsplumb MIT and GPLv2
lodash 4.17.11: https://github.com/lodash/lodash MIT
vue-treeselect 0.4.0: https://github.com/riophae/vue-treeselect MIT
vue 2.5.17: https://github.com/vuejs/vue MIT
vue-router 2.7.0: https://github.com/vuejs/vue-router MIT
vuex 3.0.0: https://github.com/vuejs/vuex MIT
vuex-router-sync 4.1.2: https://github.com/vuejs/vuex-router-sync MIT
dagre 0.8.5: https://github.com/dagrejs/dagre MIT
========================================
Apache 2.0 licenses
========================================
echarts 4.1.0: https://github.com/apache/incubator-echarts Apache-2.0
remixicon 2.5.0 https://github.com/Remix-Design/remixicon Apache-2.0
========================================
BSD licenses
========================================
d3 3.5.17: https://github.com/d3/d3 BSD-3-Clause |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,506 | There is a vulnerability in beanutils 1.7.0,upgrade recommended | https://github.com/apache/incubator-dolphinscheduler/blob/17c06ce966fc5c6a6136ee142e4698312fe6532f/pom.xml#L84
CVE-2014-0114 CVE-2019-10086
Recommended upgrade version:1.8.0~beta-1 | https://github.com/apache/dolphinscheduler/issues/4506 | https://github.com/apache/dolphinscheduler/pull/4525 | 829fdb52c1769030113b450b0b98ffecfe3693f8 | a5f31b75abee3ef12647dbb961013bd941bc78f1 | "2021-01-20T05:46:36Z" | java | "2021-01-25T05:43:06Z" | pom.xml | <?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler</artifactId>
<version>1.3.4-SNAPSHOT</version>
<packaging>pom</packaging>
<name>${project.artifactId}</name>
<url>http://dolphinscheduler.apache.org</url>
<description>Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated
to solving the complex dependencies in data processing, making the scheduling system out of the box for data
processing.
</description>
<licenses>
<license>
<name>Apache License 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<connection>scm:git:https://github.com/apache/incubator-dolphinscheduler.git</connection>
<developerConnection>scm:git:https://github.com/apache/incubator-dolphinscheduler.git</developerConnection>
<url>https://github.com/apache/incubator-dolphinscheduler</url>
<tag>HEAD</tag>
</scm>
<mailingLists>
<mailingList>
<name>DolphinScheduler Developer List</name>
<post>dev@dolphinscheduler.incubator.apache.org</post>
<subscribe>dev-subscribe@dolphinscheduler.incubator.apache.org</subscribe>
<unsubscribe>dev-unsubscribe@dolphinscheduler.incubator.apache.org</unsubscribe>
</mailingList>
</mailingLists>
<parent>
<groupId>org.apache</groupId>
<artifactId>apache</artifactId>
<version>21</version>
</parent>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<curator.version>4.3.0</curator.version>
<spring.version>5.1.18.RELEASE</spring.version>
<spring.boot.version>2.1.17.RELEASE</spring.boot.version>
<java.version>1.8</java.version>
<logback.version>1.2.3</logback.version>
<hadoop.version>2.7.3</hadoop.version>
<quartz.version>2.3.0</quartz.version>
<jackson.version>2.10.5</jackson.version>
<mybatis-plus.version>3.2.0</mybatis-plus.version>
<mybatis.spring.version>2.0.1</mybatis.spring.version>
<cron.utils.version>5.0.5</cron.utils.version>
<druid.version>1.1.22</druid.version>
<h2.version>1.4.200</h2.version>
<commons.codec.version>1.11</commons.codec.version>
<commons.logging.version>1.1.1</commons.logging.version>
<httpclient.version>4.4.1</httpclient.version>
<httpcore.version>4.4.1</httpcore.version>
<junit.version>4.12</junit.version>
<mysql.connector.version>5.1.34</mysql.connector.version>
<slf4j.api.version>1.7.5</slf4j.api.version>
<slf4j.log4j12.version>1.7.5</slf4j.log4j12.version>
<commons.collections.version>3.2.2</commons.collections.version>
<commons.httpclient>3.0.1</commons.httpclient>
<commons.beanutils.version>1.7.0</commons.beanutils.version>
<commons.configuration.version>1.10</commons.configuration.version>
<commons.email.version>1.5</commons.email.version>
<poi.version>3.17</poi.version>
<javax.servlet.api.version>3.1.0</javax.servlet.api.version>
<commons.collections4.version>4.1</commons.collections4.version>
<guava.version>24.1-jre</guava.version>
<postgresql.version>42.1.4</postgresql.version>
<hive.jdbc.version>2.1.0</hive.jdbc.version>
<commons.io.version>2.4</commons.io.version>
<oshi.core.version>3.5.0</oshi.core.version>
<clickhouse.jdbc.version>0.1.52</clickhouse.jdbc.version>
<mssql.jdbc.version>6.1.0.jre8</mssql.jdbc.version>
<presto.jdbc.version>0.238.1</presto.jdbc.version>
<spotbugs.version>3.1.12</spotbugs.version>
<checkstyle.version>3.0.0</checkstyle.version>
<zookeeper.version>3.4.14</zookeeper.version>
<frontend-maven-plugin.version>1.6</frontend-maven-plugin.version>
<maven-compiler-plugin.version>3.3</maven-compiler-plugin.version>
<maven-assembly-plugin.version>3.1.0</maven-assembly-plugin.version>
<maven-release-plugin.version>2.5.3</maven-release-plugin.version>
<maven-javadoc-plugin.version>2.10.3</maven-javadoc-plugin.version>
<maven-source-plugin.version>2.4</maven-source-plugin.version>
<maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version>
<maven-dependency-plugin.version>3.1.1</maven-dependency-plugin.version>
<rpm-maven-plugion.version>2.2.0</rpm-maven-plugion.version>
<jacoco.version>0.8.4</jacoco.version>
<jcip.version>1.0</jcip.version>
<maven.deploy.skip>false</maven.deploy.skip>
<cobertura-maven-plugin.version>2.7</cobertura-maven-plugin.version>
<mockito.version>2.21.0</mockito.version>
<powermock.version>2.0.2</powermock.version>
<servlet-api.version>2.5</servlet-api.version>
<swagger.version>1.9.3</swagger.version>
<springfox.version>2.9.2</springfox.version>
<swagger-models.version>1.5.24</swagger-models.version>
<guava-retry.version>2.0.0</guava-retry.version>
<dep.airlift.version>0.184</dep.airlift.version>
<dep.packaging.version>${dep.airlift.version}</dep.packaging.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus</artifactId>
<version>${mybatis-plus.version}</version>
</dependency>
<!-- quartz-->
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
<version>${quartz.version}</version>
</dependency>
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz-jobs</artifactId>
<version>${quartz.version}</version>
</dependency>
<dependency>
<groupId>com.cronutils</groupId>
<artifactId>cron-utils</artifactId>
<version>${cron.utils.version}</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>${druid.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>${spring.boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>${spring.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${spring.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-server</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-alert-plugin</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-dao</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-api</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-remote</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-service</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-alert</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-spi</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-framework</artifactId>
<version>${curator.version}</version>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-recipes</artifactId>
<version>${curator.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
<exclusion>
<artifactId>netty</artifactId>
<groupId>io.netty</groupId>
</exclusion>
<exclusion>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-annotations</artifactId>
</exclusion>
</exclusions>
<version>${zookeeper.version}</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>${commons.codec.version}</version>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
<version>${commons.logging.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>${httpclient.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>${httpcore.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>${mockito.version}</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<version>${powermock.version}</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito2</artifactId>
<version>${powermock.version}</version>
<type>jar</type>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.connector.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>${h2.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>${slf4j.api.version}</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>${slf4j.log4j12.version}</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<version>${commons.collections.version}</version>
</dependency>
<dependency>
<groupId>commons-httpclient</groupId>
<artifactId>commons-httpclient</artifactId>
<version>${commons.httpclient}</version>
</dependency>
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>${commons.beanutils.version}</version>
</dependency>
<dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
<version>${commons.configuration.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-core</artifactId>
<version>${logback.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-email</artifactId>
<version>${commons.email.version}</version>
</dependency>
<!--excel poi-->
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi</artifactId>
<version>${poi.version}</version>
</dependency>
<!-- hadoop -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
<exclusions>
<exclusion>
<artifactId>slf4j-log4j12</artifactId>
<groupId>org.slf4j</groupId>
</exclusion>
<exclusion>
<artifactId>com.sun.jersey</artifactId>
<groupId>jersey-json</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-aws</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
<version>${commons.collections4.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>${postgresql.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.jdbc.version}</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>${commons.io.version}</version>
</dependency>
<dependency>
<groupId>com.github.oshi</groupId>
<artifactId>oshi-core</artifactId>
<version>${oshi.core.version}</version>
</dependency>
<dependency>
<groupId>ru.yandex.clickhouse</groupId>
<artifactId>clickhouse-jdbc</artifactId>
<version>${clickhouse.jdbc.version}</version>
</dependency>
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
<version>${mssql.jdbc.version}</version>
</dependency>
<dependency>
<groupId>com.facebook.presto</groupId>
<artifactId>presto-jdbc</artifactId>
<version>${presto.jdbc.version}</version>
</dependency>
<dependency>
<groupId>net.jcip</groupId>
<artifactId>jcip-annotations</artifactId>
<version>${jcip.version}</version>
<optional>true</optional>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<version>${servlet-api.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>${javax.servlet.api.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<version>${springfox.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
<version>${springfox.version}</version>
</dependency>
<dependency>
<groupId>io.swagger</groupId>
<artifactId>swagger-models</artifactId>
<version>${swagger-models.version}</version>
</dependency>
<dependency>
<groupId>com.github.xiaoymin</groupId>
<artifactId>swagger-bootstrap-ui</artifactId>
<version>${swagger.version}</version>
</dependency>
<dependency>
<groupId>com.github.rholder</groupId>
<artifactId>guava-retrying</artifactId>
<version>${guava-retry.version}</version>
</dependency>
<dependency>
<groupId>org.sonatype.aether</groupId>
<artifactId>aether-api</artifactId>
<version>1.13.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>io.airlift.resolver</groupId>
<artifactId>resolver</artifactId>
<version>1.5</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.ow2.asm</groupId>
<artifactId>asm</artifactId>
<version>6.2.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>javax.activation</groupId>
<artifactId>activation</artifactId>
<version>1.1</version>
</dependency>
<dependency>
<groupId>com.sun.mail</groupId>
<artifactId>javax.mail</artifactId>
<version>1.6.2</version>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<finalName>apache-dolphinscheduler-incubating-${project.version}</finalName>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-maven-plugin</artifactId>
<version>1.0.0</version>
<extensions>true</extensions>
</plugin>
<plugin>
<groupId>ca.vanzyl.maven.plugins</groupId>
<artifactId>provisio-maven-plugin</artifactId>
<version>1.0.4</version>
<extensions>true</extensions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>rpm-maven-plugin</artifactId>
<version>${rpm-maven-plugion.version}</version>
<inherited>false</inherited>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<testSource>${java.version}</testSource>
<testTarget>${java.version}</testTarget>
</configuration>
<version>${maven-compiler-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>${maven-release-plugin.version}</version>
<configuration>
<tagNameFormat>@{project.version}</tagNameFormat>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>${maven-assembly-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<configuration>
<source>8</source>
<failOnError>false</failOnError>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>${maven-source-plugin.version}</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<version>${maven-dependency-plugin.version}</version>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.dolphinscheduler</groupId>
<artifactId>dolphinscheduler-maven-plugin</artifactId>
<extensions>true</extensions>
<!--<configuration>-->
<!--<allowedProvidedDependencies>-->
<!--<allowedProvidedDependency>org.apache.dolphinscheduler:dolphinscheduler-common</allowedProvidedDependency>-->
<!--</allowedProvidedDependencies>-->
<!--</configuration>-->
</plugin>
<plugin>
<groupId>ca.vanzyl.maven.plugins</groupId>
<artifactId>provisio-maven-plugin</artifactId>
<extensions>true</extensions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<phase>verify</phase>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-javadoc-plugin</artifactId>
<version>${maven-javadoc-plugin.version}</version>
<executions>
<execution>
<id>attach-javadocs</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
<configuration>
<aggregate>true</aggregate>
<charset>${project.build.sourceEncoding}</charset>
<encoding>${project.build.sourceEncoding}</encoding>
<docencoding>${project.build.sourceEncoding}</docencoding>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
<version>${maven-release-plugin.version}</version>
<configuration>
<autoVersionSubmodules>true</autoVersionSubmodules>
<tagNameFormat>@{project.version}</tagNameFormat>
<tagBase>${project.version}</tagBase>
<!--<goals>-f pom.xml deploy</goals>-->
</configuration>
<dependencies>
<dependency>
<groupId>org.apache.maven.scm</groupId>
<artifactId>maven-scm-provider-jgit</artifactId>
<version>1.9.5</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${maven-compiler-plugin.version}</version>
<configuration>
<source>${java.version}</source>
<target>${java.version}</target>
<encoding>${project.build.sourceEncoding}</encoding>
<skip>false</skip><!--not skip compile test classes-->
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>${maven-surefire-plugin.version}</version>
<configuration>
<includes>
<include>**/api/controller/ProcessDefinitionControllerTest.java</include>
<include>**/api/controller/TenantControllerTest.java</include>
<include>**/api/dto/resources/filter/ResourceFilterTest.java</include>
<include>**/api/dto/resources/visitor/ResourceTreeVisitorTest.java</include>
<includeDataxTaskTest>**/api/enums/testGetEnum.java</includeDataxTaskTest>
<include>**/api/enums/StatusTest.java</include>
<include>**/api/exceptions/ApiExceptionHandlerTest.java</include>
<include>**/api/exceptions/ServiceExceptionTest.java</include>
<include>**/api/interceptor/LocaleChangeInterceptorTest.java</include>
<include>**/api/interceptor/LoginHandlerInterceptorTest.java</include>
<include>**/api/security/impl/pwd/PasswordAuthenticatorTest.java</include>
<include>**/api/security/impl/ldap/LdapAuthenticatorTest.java</include>
<include>**/api/security/SecurityConfigLDAPTest.java</include>
<include>**/api/security/SecurityConfigPasswordTest.java</include>
<include>**/api/service/AccessTokenServiceTest.java</include>
<include>**/api/service/AlertGroupServiceTest.java</include>
<include>**/api/service/BaseDAGServiceTest.java</include>
<include>**/api/service/BaseServiceTest.java</include>
<include>**/api/service/DataAnalysisServiceTest.java</include>
<include>**/api/service/DataSourceServiceTest.java</include>
<include>**/api/service/ExecutorService2Test.java</include>
<include>**/api/service/ExecutorServiceTest.java</include>
<include>**/api/service/LoggerServiceTest.java</include>
<include>**/api/service/MonitorServiceTest.java</include>
<include>**/api/service/ProcessDefinitionServiceTest.java</include>
<include>**/api/service/ProcessDefinitionVersionServiceTest.java</include>
<include>**/api/service/ProcessInstanceServiceTest.java</include>
<include>**/api/service/ProjectServiceTest.java</include>
<include>**/api/service/QueueServiceTest.java</include>
<include>**/api/service/ResourcesServiceTest.java</include>
<include>**/api/service/SchedulerServiceTest.java</include>
<include>**/api/service/SessionServiceTest.java</include>
<include>**/api/service/TaskInstanceServiceTest.java</include>
<include>**/api/service/TenantServiceTest.java</include>
<include>**/api/service/UdfFuncServiceTest.java</include>
<include>**/api/service/UiPluginServiceTest.java</include>
<include>**/api/service/UserAlertGroupServiceTest.java</include>
<include>**/api/service/UsersServiceTest.java</include>
<include>**/api/service/WorkerGroupServiceTest.java</include>
<include>**/api/service/WorkFlowLineageServiceTest.java</include>
<include>**/api/controller/ProcessDefinitionControllerTest.java</include>
<include>**/api/controller/TaskInstanceControllerTest.java</include>
<include>**/api/controller/WorkFlowLineageControllerTest.java</include>
<include>**/api/utils/exportprocess/DataSourceParamTest.java</include>
<include>**/api/utils/exportprocess/DependentParamTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/FileUtilsTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/CheckUtilsTest.java</include>
<include>**/api/utils/ResultTest.java</include>
<include>**/common/graph/DAGTest.java</include>
<include>**/common/os/OshiTest.java</include>
<include>**/common/os/OSUtilsTest.java</include>
<include>**/common/shell/ShellExecutorTest.java</include>
<include>**/common/task/DataxParametersTest.java</include>
<include>**/common/task/EntityTestUtils.java</include>
<include>**/common/task/FlinkParametersTest.java</include>
<include>**/common/task/HttpParametersTest.java</include>
<include>**/common/task/SqoopParameterEntityTest.java</include>
<include>**/common/threadutils/ThreadPoolExecutorsTest.java</include>
<include>**/common/threadutils/ThreadUtilsTest.java</include>
<include>**/common/utils/CollectionUtilsTest.java</include>
<include>**/common/utils/CommonUtilsTest.java</include>
<include>**/common/utils/DateUtilsTest.java</include>
<include>**/common/utils/DependentUtilsTest.java</include>
<include>**/common/utils/EncryptionUtilsTest.java</include>
<include>**/common/utils/FileUtilsTest.java</include>
<include>**/common/utils/IpUtilsTest.java</include>
<include>**/common/utils/JSONUtilsTest.java</include>
<include>**/common/utils/LoggerUtilsTest.java</include>
<include>**/common/utils/OSUtilsTest.java</include>
<include>**/common/utils/ParameterUtilsTest.java</include>
<include>**/common/utils/TimePlaceholderUtilsTest.java</include>
<include>**/common/utils/PreconditionsTest.java</include>
<include>**/common/utils/PropertyUtilsTest.java</include>
<include>**/common/utils/SchemaUtilsTest.java</include>
<include>**/common/utils/ScriptRunnerTest.java</include>
<include>**/common/utils/SensitiveLogUtilsTest.java</include>
<include>**/common/utils/StringTest.java</include>
<include>**/common/utils/StringUtilsTest.java</include>
<include>**/common/utils/TaskParametersUtilsTest.java</include>
<include>**/common/utils/VarPoolUtilsTest.java</include>
<include>**/common/utils/HadoopUtilsTest.java</include>
<include>**/common/utils/HttpUtilsTest.java</include>
<include>**/common/utils/KerberosHttpClientTest.java</include>
<include>**/common/utils/HiveConfUtilsTest.java</include>
<include>**/common/ConstantsTest.java</include>
<include>**/common/utils/HadoopUtils.java</include>
<include>**/common/utils/RetryerUtilsTest.java</include>
<include>**/common/plugin/FilePluginManagerTest</include>
<include>**/common/plugin/PluginClassLoaderTest</include>
<include>**/common/enums/ExecutionStatusTest</include>
<include>**/dao/mapper/AccessTokenMapperTest.java</include>
<include>**/dao/mapper/AlertGroupMapperTest.java</include>
<include>**/dao/mapper/CommandMapperTest.java</include>
<include>**/dao/mapper/ConnectionFactoryTest.java</include>
<include>**/dao/mapper/DataSourceMapperTest.java</include>
<include>**/dao/entity/TaskInstanceTest.java</include>
<include>**/dao/entity/UdfFuncTest.java</include>
<include>**/remote/JsonSerializerTest.java</include>
<include>**/remote/RemoveTaskLogResponseCommandTest.java</include>
<include>**/remote/RemoveTaskLogRequestCommandTest.java</include>
<include>**/remote/NettyRemotingClientTest.java</include>
<include>**/remote/NettyUtilTest.java</include>
<include>**/remote/ResponseFutureTest.java</include>
<include>**/remote/command/alert/AlertSendRequestCommandTest.java</include>
<include>**/remote/command/alert/AlertSendResponseCommandTest.java</include>
<include>**/server/log/LoggerServerTest.java</include>
<include>**/server/entity/SQLTaskExecutionContextTest.java</include>
<include>**/server/log/MasterLogFilterTest.java</include>
<include>**/server/log/SensitiveDataConverterTest.java</include>
<!--<include>**/server/log/TaskLogDiscriminatorTest.java</include>-->
<include>**/server/log/TaskLogFilterTest.java</include>
<include>**/server/log/WorkerLogFilterTest.java</include>
<include>**/server/master/consumer/TaskPriorityQueueConsumerTest.java</include>
<include>**/server/master/runner/MasterTaskExecThreadTest.java</include>
<!--<include>**/server/master/dispatch/executor/NettyExecutorManagerTest.java</include>-->
<include>**/server/master/dispatch/host/assign/LowerWeightRoundRobinTest.java</include>
<include>**/server/master/dispatch/host/assign/RandomSelectorTest.java</include>
<include>**/server/master/dispatch/host/assign/RoundRobinSelectorTest.java</include>
<include>**/server/master/register/MasterRegistryTest.java</include>
<include>**/server/master/dispatch/host/assign/RoundRobinHostManagerTest.java</include>
<include>**/server/master/AlertManagerTest.java</include>
<include>**/server/master/MasterCommandTest.java</include>
<include>**/server/master/DependentTaskTest.java</include>
<include>**/server/master/ConditionsTaskTest.java</include>
<include>**/server/master/MasterExecThreadTest.java</include>
<include>**/server/master/ParamsTest.java</include>
<include>**/server/master/SubProcessTaskTest.java</include>
<include>**/server/master/processor/TaskAckProcessorTest.java</include>
<include>**/server/master/processor/TaskKillResponseProcessorTest.java</include>
<include>**/server/master/processor/queue/TaskResponseServiceTest.java</include>
<include>**/server/register/ZookeeperNodeManagerTest.java</include>
<include>**/server/utils/DataxUtilsTest.java</include>
<include>**/server/utils/ExecutionContextTestUtils.java</include>
<include>**/server/utils/HostTest.java</include>
<include>**/server/utils/FlinkArgsUtilsTest.java</include>
<include>**/server/utils/LogUtilsTest.java</include>
<include>**/server/utils/ParamUtilsTest.java</include>
<include>**/server/utils/ProcessUtilsTest.java</include>
<include>**/server/utils/SparkArgsUtilsTest.java</include>
<include>**/server/worker/processor/TaskCallbackServiceTest.java</include>
<include>**/server/worker/registry/WorkerRegistryTest.java</include>
<include>**/server/worker/shell/ShellCommandExecutorTest.java</include>
<include>**/server/worker/sql/SqlExecutorTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<include>**/server/worker/task/EnvFileTest.java</include>
<include>**/server/worker/task/spark/SparkTaskTest.java</include>
<!--<include>**/server/worker/task/datax/DataxTaskTest.java</include>-->
<!--<include>**/server/worker/task/http/HttpTaskTest.java</include>-->
<include>**/server/worker/task/sqoop/SqoopTaskTest.java</include>
<include>**/server/worker/task/shell/ShellTaskTest.java</include>
<include>**/server/worker/task/TaskManagerTest.java</include>
<include>**/server/worker/EnvFileTest.java</include>
<include>**/server/worker/runner/TaskExecuteThreadTest.java</include>
<include>**/service/quartz/cron/CronUtilsTest.java</include>
<include>**/service/process/ProcessServiceTest.java</include>
<include>**/service/zk/DefaultEnsembleProviderTest.java</include>
<include>**/service/zk/ZKServerTest.java</include>
<include>**/service/zk/CuratorZookeeperClientTest.java</include>
<include>**/service/queue/TaskUpdateQueueTest.java</include>
<include>**/service/queue/PeerTaskInstancePriorityQueueTest.java</include>
<include>**/service/alert/AlertClientServiceTest.java</include>
<include>**/dao/mapper/DataSourceUserMapperTest.java</include>
<!--<iTaskUpdateQueueConsumerThreadnclude>**/dao/mapper/ErrorCommandMapperTest.java</iTaskUpdateQueueConsumerThreadnclude>-->
<include>**/dao/mapper/ProcessDefinitionMapperTest.java</include>
<include>**/dao/mapper/ProcessDefinitionVersionMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapMapperTest.java</include>
<include>**/dao/mapper/ProcessInstanceMapperTest.java</include>
<include>**/dao/mapper/ProjectMapperTest.java</include>
<include>**/dao/mapper/ProjectUserMapperTest.java</include>
<include>**/dao/mapper/QueueMapperTest.java</include>
<include>**/dao/mapper/ResourceUserMapperTest.java</include>
<include>**/dao/mapper/ScheduleMapperTest.java</include>
<include>**/dao/mapper/SessionMapperTest.java</include>
<include>**/dao/mapper/TaskInstanceMapperTest.java</include>
<include>**/dao/mapper/TenantMapperTest.java</include>
<include>**/dao/mapper/UdfFuncMapperTest.java</include>
<include>**/dao/mapper/UDFUserMapperTest.java</include>
<include>**/dao/mapper/UserMapperTest.java</include>
<include>**/dao/mapper/AlertPluginInstanceMapperTest.java</include>
<include>**/dao/mapper/PluginDefineTest.java</include>
<include>**/dao/utils/DagHelperTest.java</include>
<include>**/dao/AlertDaoTest.java</include>
<include>**/dao/datasource/OracleDataSourceTest.java</include>
<include>**/dao/datasource/HiveDataSourceTest.java</include>
<include>**/dao/upgrade/ProcessDefinitionDaoTest.java</include>
<include>**/dao/upgrade/WokrerGrouopDaoTest.java</include>
<include>**/dao/upgrade/UpgradeDaoTest.java</include>
<include>**/plugin/alert/email/EmailAlertChannelFactoryTest.java</include>
<include>**/plugin/alert/email/EmailAlertChannelTest.java</include>
<include>**/plugin/alert/email/ExcelUtilsTest.java</include>
<include>**/plugin/alert/email/MailUtilsTest.java</include>
<include>**/plugin/alert/email/template/DefaultHTMLTemplateTest.java</include>
<include>**/plugin/alert/dingtalk/DingTalkSenderTest.java</include>
<include>**/plugin/alert/dingtalk/DingTalkAlertChannelFactoryTest.java</include>
<include>**/plugin/alert/wechat/WeChatSenderTest.java</include>
<include>**/plugin/alert/wechat/WeChatAlertChannelFactoryTest.java</include>
<include>**/plugin/alert/script/ProcessUtilsTest.java</include>
<include>**/plugin/alert/script/ScriptAlertChannelFactoryTest.java</include>
<include>**/plugin/alert/script/ScriptSenderTest.java</include>
<include>**/plugin/alert/http/HttpAlertChannelFactoryTest.java</include>
<include>**/plugin/alert/http/HttpAlertChannelTest.java</include>
<include>**/plugin/alert/http/HttpAlertPluginTest.java</include>
<include>**/plugin/alert/http/HttpSenderTest.java</include>
<include>**/spi/params/PluginParamsTransferTest.java</include>
<!--<include>**/alert/plugin/EmailAlertPluginTest.java</include>-->
<include>**/alert/plugin/AlertPluginManagerTest.java</include>
<include>**/alert/plugin/DolphinPluginLoaderTest.java</include>
<include>**/alert/utils/DingTalkUtilsTest.java</include>
<include>**/alert/utils/EnterpriseWeChatUtilsTest.java</include>
<include>**/alert/utils/FuncUtilsTest.java</include>
<include>**/alert/processor/AlertRequestProcessorTest.java</include>
<include>**/alert/runner/AlertSenderTest.java</include>
<include>**/alert/AlertServerTest.java</include>
</includes>
<!-- <skip>true</skip> -->
</configuration>
</plugin>
<!-- jenkins plugin jacoco report-->
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>${jacoco.version}</version>
<configuration>
<destFile>target/jacoco.exec</destFile>
<dataFile>target/jacoco.exec</dataFile>
</configuration>
<executions>
<execution>
<id>jacoco-initialize</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>jacoco-site</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-maven-plugin</artifactId>
<version>${spotbugs.version}</version>
<configuration>
<xmlOutput>true</xmlOutput>
<threshold>medium</threshold>
<effort>default</effort>
<excludeFilterFile>dev-config/spotbugs-exclude.xml</excludeFilterFile>
<failOnError>true</failOnError>
</configuration>
<dependencies>
<dependency>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs</artifactId>
<version>4.0.0-beta4</version>
</dependency>
</dependencies>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
<version>${checkstyle.version}</version>
<dependencies>
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
<version>8.18</version>
</dependency>
</dependencies>
<configuration>
<consoleOutput>true</consoleOutput>
<encoding>UTF-8</encoding>
<configLocation>style/checkstyle.xml</configLocation>
<suppressionsLocation>style/checkstyle-suppressions.xml</suppressionsLocation>
<suppressionsFileExpression>checkstyle.suppressions.file</suppressionsFileExpression>
<failOnViolation>true</failOnViolation>
<violationSeverity>warning</violationSeverity>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<sourceDirectories>
<sourceDirectory>${project.build.sourceDirectory}</sourceDirectory>
</sourceDirectories>
<excludes>**\/generated-sources\/</excludes>
<skip>true</skip>
</configuration>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>cobertura-maven-plugin</artifactId>
<version>${cobertura-maven-plugin.version}</version>
<configuration>
<check>
</check>
<aggregate>true</aggregate>
<outputDirectory>./target/cobertura</outputDirectory>
<encoding>${project.build.sourceEncoding}</encoding>
<quiet>true</quiet>
<format>xml</format>
<instrumentation>
<ignoreTrivial>true</ignoreTrivial>
</instrumentation>
</configuration>
</plugin>
</plugins>
</build>
<modules>
<module>dolphinscheduler-alert-plugin</module>
<module>dolphinscheduler-ui</module>
<module>dolphinscheduler-server</module>
<module>dolphinscheduler-common</module>
<module>dolphinscheduler-api</module>
<module>dolphinscheduler-dao</module>
<module>dolphinscheduler-alert</module>
<module>dolphinscheduler-dist</module>
<module>dolphinscheduler-remote</module>
<module>dolphinscheduler-service</module>
<module>dolphinscheduler-spi</module>
<module>dolphinscheduler-microbench</module>
</modules>
</project>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,506 | There is a vulnerability in beanutils 1.7.0,upgrade recommended | https://github.com/apache/incubator-dolphinscheduler/blob/17c06ce966fc5c6a6136ee142e4698312fe6532f/pom.xml#L84
CVE-2014-0114 CVE-2019-10086
Recommended upgrade version:1.8.0~beta-1 | https://github.com/apache/dolphinscheduler/issues/4506 | https://github.com/apache/dolphinscheduler/pull/4525 | 829fdb52c1769030113b450b0b98ffecfe3693f8 | a5f31b75abee3ef12647dbb961013bd941bc78f1 | "2021-01-20T05:46:36Z" | java | "2021-01-25T05:43:06Z" | tools/dependencies/known-dependencies.txt | HikariCP-3.2.0.jar
animal-sniffer-annotations-1.14.jar
aopalliance-1.0.jar
apache-el-8.5.54.jar
apacheds-i18n-2.0.0-M15.jar
apacheds-kerberos-codec-2.0.0-M15.jar
api-asn1-api-1.0.0-M20.jar
api-util-1.0.0-M20.jar
asm-3.1.jar
aspectjweaver-1.9.6.jar
audience-annotations-0.5.0.jar
avro-1.7.4.jar
aws-java-sdk-1.7.4.jar
bonecp-0.8.0.RELEASE.jar
byte-buddy-1.9.16.jar
checker-compat-qual-2.0.0.jar
classmate-1.4.0.jar
clickhouse-jdbc-0.1.52.jar
commons-cli-1.2.jar
commons-codec-1.11.jar
commons-collections-3.2.2.jar
commons-collections4-4.1.jar
commons-compress-1.4.1.jar
commons-compiler-3.0.16.jar
commons-configuration-1.10.jar
commons-daemon-1.0.13.jar
commons-beanutils-1.7.0.jar
commons-dbcp-1.4.jar
commons-httpclient-3.0.1.jar
commons-io-2.4.jar
commons-lang-2.6.jar
commons-logging-1.1.1.jar
commons-math3-3.1.1.jar
commons-net-3.1.jar
commons-pool-1.6.jar
cron-utils-5.0.5.jar
curator-client-4.3.0.jar
curator-framework-4.3.0.jar
curator-recipes-4.3.0.jar
datanucleus-api-jdo-4.2.1.jar
datanucleus-core-4.1.6.jar
datanucleus-rdbms-4.1.7.jar
derby-10.14.2.0.jar
error_prone_annotations-2.1.3.jar
druid-1.1.22.jar
gson-2.8.6.jar
guava-24.1-jre.jar
guava-retrying-2.0.0.jar
guice-3.0.jar
guice-servlet-3.0.jar
h2-1.4.200.jar
hadoop-annotations-2.7.3.jar
hadoop-auth-2.7.3.jar
hadoop-aws-2.7.3.jar
hadoop-client-2.7.3.jar
hadoop-common-2.7.3.jar
hadoop-hdfs-2.7.3.jar
hadoop-mapreduce-client-app-2.7.3.jar
hadoop-mapreduce-client-common-2.7.3.jar
hadoop-mapreduce-client-core-2.7.3.jar
hadoop-mapreduce-client-jobclient-2.7.3.jar
hadoop-mapreduce-client-shuffle-2.7.3.jar
hadoop-yarn-api-2.7.3.jar
hadoop-yarn-client-2.7.3.jar
hadoop-yarn-common-2.7.3.jar
hadoop-yarn-server-common-2.7.3.jar
hamcrest-core-1.3.jar
hibernate-validator-6.0.20.Final.jar
hive-common-2.1.0.jar
hive-jdbc-2.1.0.jar
hive-metastore-2.1.0.jar
hive-orc-2.1.0.jar
hive-serde-2.1.0.jar
hive-service-2.1.0.jar
hive-service-rpc-2.1.0.jar
hive-storage-api-2.1.0.jar
htrace-core-3.1.0-incubating.jar
httpclient-4.4.1.jar
httpcore-4.4.1.jar
httpmime-4.5.12.jar
j2objc-annotations-1.1.jar
jackson-annotations-2.10.5.jar
jackson-core-2.10.5.jar
jackson-core-asl-1.9.13.jar
jackson-databind-2.10.5.jar
jackson-datatype-jdk8-2.9.10.jar
jackson-datatype-jsr310-2.9.10.jar
jackson-jaxrs-1.9.13.jar
jackson-mapper-asl-1.9.13.jar
jackson-module-parameter-names-2.9.10.jar
jackson-xc-1.9.13.jar
jamon-runtime-2.3.1.jar
janino-3.0.16.jar
java-xmlbuilder-0.4.jar
javax.activation-api-1.2.0.jar
javax.annotation-api-1.3.2.jar
javax.inject-1.jar
javax.jdo-3.2.0-m3.jar
javax.servlet-api-3.1.0.jar
javolution-5.5.1.jar
jaxb-api-2.3.1.jar
jaxb-impl-2.2.3-1.jar
jboss-logging-3.3.3.Final.jar
jdo-api-3.0.1.jar
jersey-client-1.9.jar
jersey-core-1.9.jar
jersey-guice-1.9.jar
jersey-json-1.9.jar
jersey-server-1.9.jar
jets3t-0.9.0.jar
jettison-1.1.jar
jetty-6.1.26.jar
jetty-continuation-9.4.31.v20200723.jar
jetty-http-9.4.31.v20200723.jar
jetty-io-9.4.31.v20200723.jar
jetty-security-9.4.31.v20200723.jar
jetty-server-9.4.31.v20200723.jar
jetty-servlet-9.4.31.v20200723.jar
jetty-servlets-9.4.31.v20200723.jar
jetty-util-6.1.26.jar
jetty-util-9.4.31.v20200723.jar
jetty-webapp-9.4.31.v20200723.jar
jetty-xml-9.4.31.v20200723.jar
jline-0.9.94.jar
jna-4.5.2.jar
jna-platform-4.5.2.jar
joda-time-2.10.6.jar
jpam-1.1.jar
jsch-0.1.42.jar
jsp-api-2.1.jar
jsqlparser-2.1.jar
jsr305-3.0.0.jar
jta-1.1.jar
jul-to-slf4j-1.7.30.jar
junit-4.12.jar
leveldbjni-all-1.8.jar
libfb303-0.9.3.jar
libthrift-0.9.3.jar
log4j-1.2-api-2.11.2.jar
log4j-1.2.17.jar
log4j-api-2.11.2.jar
log4j-core-2.11.2.jar
logback-classic-1.2.3.jar
logback-core-1.2.3.jar
lz4-1.3.0.jar
mapstruct-1.2.0.Final.jar
mssql-jdbc-6.1.0.jre8.jar
mybatis-3.5.2.jar
mybatis-plus-3.2.0.jar
mybatis-plus-annotation-3.2.0.jar
mybatis-plus-boot-starter-3.2.0.jar
mybatis-plus-core-3.2.0.jar
mybatis-plus-extension-3.2.0.jar
mybatis-spring-2.0.2.jar
netty-3.6.2.Final.jar
netty-all-4.1.52.Final.jar
opencsv-2.3.jar
oshi-core-3.5.0.jar
paranamer-2.3.jar
parquet-hadoop-bundle-1.8.1.jar
poi-3.17.jar
postgresql-42.1.4.jar
presto-jdbc-0.238.1.jar
protobuf-java-2.5.0.jar
quartz-2.3.0.jar
quartz-jobs-2.3.0.jar
slf4j-api-1.7.5.jar
snakeyaml-1.23.jar
snappy-0.2.jar
snappy-java-1.0.4.1.jar
spring-aop-5.1.18.RELEASE.jar
spring-beans-5.1.18.RELEASE.jar
spring-boot-2.1.17.RELEASE.jar
spring-boot-autoconfigure-2.1.17.RELEASE.jar
spring-boot-starter-2.1.17.RELEASE.jar
spring-boot-starter-aop-2.1.17.RELEASE.jar
spring-boot-starter-jdbc-2.1.17.RELEASE.jar
spring-boot-starter-jetty-2.1.17.RELEASE.jar
spring-boot-starter-json-2.1.17.RELEASE.jar
spring-boot-starter-logging-2.1.17.RELEASE.jar
spring-boot-starter-web-2.1.17.RELEASE.jar
spring-context-5.1.18.RELEASE.jar
spring-core-5.1.18.RELEASE.jar
spring-expression-5.1.18.RELEASE.jar
spring-jcl-5.1.18.RELEASE.jar
spring-jdbc-5.1.18.RELEASE.jar
spring-plugin-core-1.2.0.RELEASE.jar
spring-plugin-metadata-1.2.0.RELEASE.jar
spring-tx-5.1.18.RELEASE.jar
spring-web-5.1.18.RELEASE.jar
spring-webmvc-5.1.18.RELEASE.jar
springfox-core-2.9.2.jar
springfox-schema-2.9.2.jar
springfox-spi-2.9.2.jar
springfox-spring-web-2.9.2.jar
springfox-swagger-common-2.9.2.jar
springfox-swagger-ui-2.9.2.jar
springfox-swagger2-2.9.2.jar
swagger-annotations-1.5.20.jar
swagger-bootstrap-ui-1.9.3.jar
swagger-models-1.5.24.jar
tephra-api-0.6.0.jar
threetenbp-1.3.6.jar
transaction-api-1.1.jar
validation-api-2.0.1.Final.jar
xercesImpl-2.9.1.jar
xml-apis-1.4.01.jar
xmlenc-0.52.jar
xz-1.0.jar
zookeeper-3.4.14.jar
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,592 | [Improvement][api] Worker group data returned incorrectly |
![image](https://user-images.githubusercontent.com/55787491/105975589-16604400-60ca-11eb-91b8-83fd8568673e.png)
![image](https://user-images.githubusercontent.com/55787491/105975823-6212ed80-60ca-11eb-91a5-772c93a899bb.png)
**Which version of DolphinScheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4592 | https://github.com/apache/dolphinscheduler/pull/4620 | 658376ad363dcafafe1054f8babfc5bb5ffe60c0 | afd201e6fa50700cf16aad2a292e61c4e5b6e5f9 | "2021-01-27T10:06:43Z" | java | "2021-01-29T08:54:39Z" | ambari_plugin/common-services/DOLPHIN/1.3.3/configuration/dolphin-worker.xml | <!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<configuration>
<property>
<name>worker.exec.threads</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>worker execute thread num</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.heartbeat.interval</name>
<value>10</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>worker heartbeat interval</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.max.cpuload.avg</name>
<value>100</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>only less than cpu avg load, worker server can work. default value : the number of cpu cores * 2</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.reserved.memory</name>
<value>0.3</value>
<description>only larger than reserved memory, worker server can work. default value : physical memory * 1/10, unit is G.</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.listen.port</name>
<value>1234</value>
<value-attributes>
<type>int</type>
</value-attributes>
<description>worker listen port</description>
<on-ambari-upgrade add="true"/>
</property>
<property>
<name>worker.groups</name>
<value>default</value>
<description>default worker group</description>
<on-ambari-upgrade add="true"/>
</property>
</configuration> |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,592 | [Improvement][api] Worker group data returned incorrectly |
![image](https://user-images.githubusercontent.com/55787491/105975589-16604400-60ca-11eb-91b8-83fd8568673e.png)
![image](https://user-images.githubusercontent.com/55787491/105975823-6212ed80-60ca-11eb-91a5-772c93a899bb.png)
**Which version of DolphinScheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4592 | https://github.com/apache/dolphinscheduler/pull/4620 | 658376ad363dcafafe1054f8babfc5bb5ffe60c0 | afd201e6fa50700cf16aad2a292e61c4e5b6e5f9 | "2021-01-27T10:06:43Z" | java | "2021-01-29T08:54:39Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/WorkerGroupService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.entity.WorkerGroup;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* work group service
*/
@Service
public class WorkerGroupService extends BaseService {
private static final String NO_NODE_EXCEPTION_REGEX = "KeeperException$NoNodeException";
@Autowired
protected ZookeeperCachedOperator zookeeperCachedOperator;
@Autowired
ProcessInstanceMapper processInstanceMapper;
/**
* query worker group paging
*
* @param loginUser login user
* @param pageNo page number
* @param searchVal search value
* @param pageSize page size
* @return worker group list page
*/
public Map<String, Object> queryAllGroupPaging(User loginUser, Integer pageNo, Integer pageSize, String searchVal) {
// list from index
Integer fromIndex = (pageNo - 1) * pageSize;
// list to index
Integer toIndex = (pageNo - 1) * pageSize + pageSize;
Map<String, Object> result = new HashMap<>();
if (isNotAdmin(loginUser, result)) {
return result;
}
List<WorkerGroup> workerGroups = getWorkerGroups(true);
List<WorkerGroup> resultDataList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(workerGroups)) {
List<WorkerGroup> searchValDataList = new ArrayList<>();
if (StringUtils.isNotEmpty(searchVal)) {
for (WorkerGroup workerGroup : workerGroups) {
if (workerGroup.getName().contains(searchVal)) {
searchValDataList.add(workerGroup);
}
}
} else {
searchValDataList = workerGroups;
}
if (searchValDataList.size() < pageSize) {
toIndex = (pageNo - 1) * pageSize + searchValDataList.size();
}
resultDataList = searchValDataList.subList(fromIndex, toIndex);
}
PageInfo<WorkerGroup> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount(resultDataList.size());
pageInfo.setLists(resultDataList);
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query all worker group
*
* @return all worker group list
*/
public Map<String, Object> queryAllGroup() {
Map<String, Object> result = new HashMap<>();
List<WorkerGroup> workerGroups = getWorkerGroups(false);
Set<String> availableWorkerGroupSet = workerGroups.stream()
.map(workerGroup -> workerGroup.getName())
.collect(Collectors.toSet());
result.put(Constants.DATA_LIST, availableWorkerGroupSet);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get worker groups
*
* @param isPaging whether paging
* @return WorkerGroup list
*/
private List<WorkerGroup> getWorkerGroups(boolean isPaging) {
String workerPath = zookeeperCachedOperator.getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_WORKERS;
List<WorkerGroup> workerGroups = new ArrayList<>();
List<String> workerGroupList;
try {
workerGroupList = zookeeperCachedOperator.getChildrenKeys(workerPath);
} catch (Exception e) {
if (e.getMessage().contains(NO_NODE_EXCEPTION_REGEX)) {
if (isPaging) {
return workerGroups;
} else {
//ignore noNodeException return Default
WorkerGroup wg = new WorkerGroup();
wg.setName(DEFAULT_WORKER_GROUP);
workerGroups.add(wg);
return workerGroups;
}
} else {
throw e;
}
}
// available workerGroup list
List<String> availableWorkerGroupList = new ArrayList<>();
for (String workerGroup : workerGroupList) {
String workerGroupPath = workerPath + "/" + workerGroup;
List<String> childrenNodes = zookeeperCachedOperator.getChildrenKeys(workerGroupPath);
if (CollectionUtils.isNotEmpty(childrenNodes)) {
availableWorkerGroupList.add(workerGroup);
WorkerGroup wg = new WorkerGroup();
wg.setName(workerGroup);
if (isPaging) {
wg.setIpList(childrenNodes);
String registeredIpValue = zookeeperCachedOperator.get(workerGroupPath + "/" + childrenNodes.get(0));
wg.setCreateTime(DateUtils.stringToDate(registeredIpValue.split(",")[6]));
wg.setUpdateTime(DateUtils.stringToDate(registeredIpValue.split(",")[7]));
}
workerGroups.add(wg);
}
}
return workerGroups;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,648 | [Bug][UI] TypeError: Cannot read property 'id' of undefined in createUser | **For better global communication, Please describe it in English. If you feel the description in English is not clear, then you can append description in Chinese(just for Mandarin(CN)), thx! **
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior, for example:
0. Init Bare databse
1. Go to 'User Manage'
2. Click on 'Edit User'
3. See error
**Expected behavior**
TypeError fixed.
**Screenshots**
If applicable, add screenshots to help explain your problem.
![image](https://user-images.githubusercontent.com/4902714/106452923-82281f80-64c3-11eb-84de-b72ecaa5f352.png)
**Which version of Dolphin Scheduler:**
-[1.3.4]
-[dev]
**Additional context**
Add any other context about the problem here.
**Requirement or improvement**
- Please describe about your requirements or improvement suggestions.
| https://github.com/apache/dolphinscheduler/issues/4648 | https://github.com/apache/dolphinscheduler/pull/4649 | 6a49d8163e333b27ce941cabdd3957aeb1779e14 | 8419ad798c76ef5c1fec07c120e56ca78f892cf5 | "2021-02-01T11:27:55Z" | java | "2021-02-02T05:46:02Z" | dolphinscheduler-ui/src/js/conf/home/pages/security/pages/users/_source/createUser.vue | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<m-popover
ref="popover"
:ok-text="item ? $t('Edit') : $t('Submit')"
@ok="_ok"
@close="close">
<template slot="content">
<div class="create-user-model">
<m-list-box-f>
<template slot="name"><strong>*</strong>{{$t('User Name')}}</template>
<template slot="content">
<el-input
type="input"
v-model="userName"
maxlength="60"
size="small"
:placeholder="$t('Please enter user name')">
</el-input>
</template>
</m-list-box-f>
<m-list-box-f v-if="router.history.current.name !== 'account'">
<template slot="name"><strong>*</strong>{{$t('Password')}}</template>
<template slot="content">
<el-input
type="password"
v-model="userPassword"
size="small"
:placeholder="$t('Please enter your password')">
</el-input>
</template>
</m-list-box-f>
<m-list-box-f v-if="isADMIN">
<template slot="name"><strong>*</strong>{{$t('Tenant')}}</template>
<template slot="content">
<el-select v-model="tenantId" style="width: 100%;" size="small">
<el-option
v-for="city in tenantList"
:key="city.id"
:value="city.id"
:label="city.code">
</el-option>
</el-select>
</template>
</m-list-box-f>
<m-list-box-f v-if="isADMIN">
<template slot="name">{{$t('Queue')}}</template>
<template slot="content">
<el-select v-model="queueName" style="width: 100%;" size="small">
<el-input slot="trigger" slot-scope="{ selectedModel }" readonly :placeholder="$t('Please select a queue')" :value="selectedModel ? selectedModel.label : ''" @on-click-icon.stop="queueName = ''">
<em slot="suffix" class="el-icon-error" style="font-size: 15px;cursor: pointer;" v-show="queueName ==''"></em>
<em slot="suffix" class="el-icon-bottom" style="font-size: 12px;" v-show="queueName!=''"></em>
</el-input>
<el-option
v-for="city in queueList"
:key="city.id"
:value="city.id"
:label="city.code">
</el-option>
</el-select>
</template>
</m-list-box-f>
<m-list-box-f>
<template slot="name"><strong>*</strong>{{$t('Email')}}</template>
<template slot="content">
<el-input
type="input"
v-model="email"
size="small"
:placeholder="$t('Please enter email')">
</el-input>
</template>
</m-list-box-f>
<m-list-box-f>
<template slot="name">{{$t('Phone')}}</template>
<template slot="content">
<el-input
type="input"
v-model="phone"
size="small"
:placeholder="$t('Please enter phone number')">
</el-input>
</template>
</m-list-box-f>
<m-list-box-f style="line-height: 38px;">
<template slot="name">{{$t('State')}}</template>
<template slot="content">
<el-radio-group v-model="userState" size="small">
<el-radio :label="'1'">{{$t('Enable')}}</el-radio>
<el-radio :label="'0'">{{$t('Disable')}}</el-radio>
</el-radio-group>
</template>
</m-list-box-f>
</div>
</template>
</m-popover>
</template>
<script>
import _ from 'lodash'
import i18n from '@/module/i18n'
import store from '@/conf/home/store'
import router from '@/conf/home/router'
import mPopover from '@/module/components/popup/popover'
import mListBoxF from '@/module/components/listBoxF/listBoxF'
export default {
name: 'create-user',
data () {
return {
store,
router,
queueList: [],
userName: '',
userPassword: '',
tenantId: '',
queueName: '',
email: '',
phone: '',
userState: '1',
tenantList: [],
// Source admin user information
isADMIN: store.state.user.userInfo.userType === 'ADMIN_USER' && router.history.current.name !== 'account'
}
},
props: {
item: Object
},
methods: {
_ok () {
if (this._verification()) {
// The name is not verified
if (this.item && this.item.groupName === this.groupName) {
this._submit()
return
}
// Verify username
this.store.dispatch('security/verifyName', {
type: 'user',
userName: this.userName
}).then(res => {
this._submit()
}).catch(e => {
this.$message.error(e.msg || '')
})
}
},
_verification () {
let regEmail = /^([a-zA-Z0-9]+[_|\-|\.]?)*[a-zA-Z0-9]+@([a-zA-Z0-9]+[_|\-|\.]?)*[a-zA-Z0-9]+\.[a-zA-Z]{2,}$/ // eslint-disable-line
// Mobile phone number regular
let regPhone = /^1(3|4|5|6|7|8)\d{9}$/; // eslint-disable-line
let regPassword = /^(?![0-9]+$)(?![a-z]+$)(?![A-Z]+$)(?![`~!@#$%^&*()_\-+=<>?:"{}|,./;'\\[\]·~!@#¥%……&*()——\-+={}|《》?:“”【】、;‘’,。、]+$)[`~!@#$%^&*()_\-+=<>?:"{}|,./;'\\[\]·~!@#¥%……&*()——\-+={}|《》?:“”【】、;‘’,。、0-9A-Za-z]{6,22}$/
let userNameLength = this.userName.length
// user name
if (userNameLength < 3 || userNameLength > 39) {
this.$message.warning(`${i18n.$t('User name length is between 3 and 39')}`)
return false
}
if (!this.userName.replace(/\s*/g, '')) {
this.$message.warning(`${i18n.$t('Please enter user name')}`)
return false
}
// password
if (this.userPassword !== '' && this.item) {
if (!regPassword.test(this.userPassword)) {
this.$message.warning(`${i18n.$t('Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22')}`)
return false
}
} else if (!this.item) {
if (!regPassword.test(this.userPassword)) {
this.$message.warning(`${i18n.$t('Password consists of at least two combinations of numbers, letters, and characters, and the length is between 6-22')}`)
return false
}
}
// email
if (!this.email) {
this.$message.warning(`${i18n.$t('Please enter email')}`)
return false
}
// Verify email
if (!regEmail.test(this.email)) {
this.$message.warning(`${i18n.$t('Please enter the correct email format')}`)
return false
}
// Verify phone
if (this.phone) {
if (!regPhone.test(this.phone)) {
this.$message.warning(`${i18n.$t('Please enter the correct mobile phone format')}`)
return false
}
}
return true
},
_getQueueList () {
return new Promise((resolve, reject) => {
this.store.dispatch('security/getQueueList').then(res => {
this.queueList = _.map(res, v => {
return {
id: v.id,
code: v.queueName
}
})
this.$nextTick(() => {
this.queueName = this.queueList[0].id
})
resolve()
})
})
},
_getTenantList () {
return new Promise((resolve, reject) => {
this.store.dispatch('security/getTenantList').then(res => {
let arr = _.filter(res, (o) => {
return o.id !== -1
})
this.tenantList = _.map(arr, v => {
return {
id: v.id,
code: v.tenantCode
}
})
this.$nextTick(() => {
this.tenantId = this.tenantList[0].id
})
resolve()
})
})
},
_submit () {
this.$refs.popover.spinnerLoading = true
let queueCode = ''
// get queue code
if (this.queueName !== '') {
queueCode = this.queueList.length > 0 ? _.find(this.queueList, ['id', this.queueName]).code : ''
}
let param = {
userName: this.userName,
userPassword: this.userPassword,
tenantId: this.tenantId,
email: this.email,
queue: queueCode,
phone: this.phone,
state: this.userState
}
if (this.item) {
param.id = this.item.id
}
this.store.dispatch(`security/${this.item ? 'updateUser' : 'createUser'}`, param).then(res => {
this.$refs.popover.spinnerLoading = false
this.$emit('onUpdate', param)
this.$message.success(res.msg)
}).catch(e => {
this.$message.error(e.msg || '')
this.$refs.popover.spinnerLoading = false
})
},
close () {
this.$emit('close')
}
},
watch: {},
created () {
// Administrator gets tenant list
if (this.isADMIN) {
Promise.all([this._getQueueList(), this._getTenantList()]).then(() => {
if (this.item) {
this.userName = this.item.userName
this.userPassword = ''
this.email = this.item.email
this.phone = this.item.phone
this.userState = this.item.state + '' || '1'
this.tenantId = this.item.tenantId
this.$nextTick(() => {
this.queueName = _.find(this.queueList, ['code', this.item.queue]).id || ''
})
}
})
} else {
if (this.item) {
this.userName = this.item.userName
this.userPassword = ''
this.email = this.item.email
this.phone = this.item.phone
this.userState = this.state + '' || '1'
this.tenantId = this.item.tenantId
if (this.queueList.length > 0) {
this.queueName = _.find(this.queueList, ['code', this.item.queue]).id
} else {
this.queueName = ''
}
}
}
},
mounted () {
},
components: { mPopover, mListBoxF }
}
</script>
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,101 | [BUG] After renaming the branch name, the name of Branch Flow does not change | 更改任务名称后,分支流转的名称还是更改前的任务名称
![image](https://user-images.githubusercontent.com/55787491/76071071-9e2ff200-5fd0-11ea-8702-e7b357d7b060.png)
**Which version of Dolphin Scheduler:**
-[1.2.0_release]
| https://github.com/apache/dolphinscheduler/issues/2101 | https://github.com/apache/dolphinscheduler/pull/4555 | cfd9db5555b3d8f9f33f25a9272c8741115a9336 | 14d49bc584685a33e2952573a0cb6e88377ab9f9 | "2020-03-06T09:35:19Z" | java | "2021-02-02T08:07:37Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ProcessInstanceService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import static org.apache.dolphinscheduler.common.Constants.DATA_LIST;
import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT;
import static org.apache.dolphinscheduler.common.Constants.GLOBAL_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.PROCESS_INSTANCE_STATE;
import static org.apache.dolphinscheduler.common.Constants.TASK_LIST;
import org.apache.dolphinscheduler.api.dto.gantt.GanttDto;
import org.apache.dolphinscheduler.api.dto.gantt.Task;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DependResult;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
/**
* process instance service
*/
@Service
public class ProcessInstanceService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(ProcessInstanceService.class);
@Autowired
ProjectMapper projectMapper;
@Autowired
ProjectService projectService;
@Autowired
ProcessService processService;
@Autowired
ProcessInstanceMapper processInstanceMapper;
@Autowired
ProcessDefinitionMapper processDefineMapper;
@Autowired
ProcessDefinitionService processDefinitionService;
@Autowired
ProcessDefinitionVersionService processDefinitionVersionService;
@Autowired
ExecutorService execService;
@Autowired
TaskInstanceMapper taskInstanceMapper;
@Autowired
LoggerService loggerService;
@Autowired
UsersService usersService;
/**
* return top n SUCCESS process instance order by running time which started between startTime and endTime
*/
public Map<String, Object> queryTopNLongestRunningProcessInstance(User loginUser, String projectName, int size, String startTime, String endTime) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
if (0 > size) {
putMsg(result, Status.NEGTIVE_SIZE_NUMBER_ERROR, size);
return result;
}
if (Objects.isNull(startTime)) {
putMsg(result, Status.DATA_IS_NULL, Constants.START_TIME);
return result;
}
Date start = DateUtils.stringToDate(startTime);
if (Objects.isNull(endTime)) {
putMsg(result, Status.DATA_IS_NULL, Constants.END_TIME);
return result;
}
Date end = DateUtils.stringToDate(endTime);
if (start == null || end == null) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate");
return result;
}
if (start.getTime() > end.getTime()) {
putMsg(result, Status.START_TIME_BIGGER_THAN_END_TIME_ERROR, startTime, endTime);
return result;
}
List<ProcessInstance> processInstances = processInstanceMapper.queryTopNProcessInstance(size, start, end, ExecutionStatus.SUCCESS);
result.put(DATA_LIST, processInstances);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process instance by id
*
* @param loginUser login user
* @param projectName project name
* @param processId process instance id
* @return process instance detail
*/
public Map<String, Object> queryProcessInstanceById(User loginUser, String projectName, Integer processId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId);
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
processInstance.setWarningGroupId(processDefinition.getWarningGroupId());
result.put(DATA_LIST, processInstance);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* paging query process instance list, filtering according to project, process definition, time range, keyword, process status
*
* @param loginUser login user
* @param projectName project name
* @param pageNo page number
* @param pageSize page size
* @param processDefineId process definition id
* @param searchVal search value
* @param stateType state type
* @param host host
* @param startDate start time
* @param endDate end time
* @return process instance list
*/
public Map<String, Object> queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId,
String startDate, String endDate,
String searchVal, String executorName, ExecutionStatus stateType, String host,
Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
int[] statusArray = null;
// filter by state
if (stateType != null) {
statusArray = new int[]{stateType.ordinal()};
}
Date start = null;
Date end = null;
try {
if (StringUtils.isNotEmpty(startDate)) {
start = DateUtils.getScheduleDate(startDate);
}
if (StringUtils.isNotEmpty(endDate)) {
end = DateUtils.getScheduleDate(endDate);
}
} catch (Exception e) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "startDate,endDate");
return result;
}
Page<ProcessInstance> page = new Page<>(pageNo, pageSize);
PageInfo pageInfo = new PageInfo<ProcessInstance>(pageNo, pageSize);
int executorId = usersService.getUserIdByName(executorName);
IPage<ProcessInstance> processInstanceList =
processInstanceMapper.queryProcessInstanceListPaging(page,
project.getId(), processDefineId, searchVal, executorId, statusArray, host, start, end);
List<ProcessInstance> processInstances = processInstanceList.getRecords();
for (ProcessInstance processInstance : processInstances) {
processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(), processInstance.getEndTime()));
User executor = usersService.queryUser(processInstance.getExecutorId());
if (null != executor) {
processInstance.setExecutorName(executor.getUserName());
}
}
pageInfo.setTotalCount((int) processInstanceList.getTotal());
pageInfo.setLists(processInstances);
result.put(DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query task list by process instance id
*
* @param loginUser login user
* @param projectName project name
* @param processId process instance id
* @return task list for the process instance
* @throws IOException io exception
*/
public Map<String, Object> queryTaskListByProcessId(User loginUser, String projectName, Integer processId) throws IOException {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId);
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processId);
addDependResultForTaskList(taskInstanceList);
Map<String, Object> resultMap = new HashMap<>();
resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString());
resultMap.put(TASK_LIST, taskInstanceList);
result.put(DATA_LIST, resultMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* add dependent result for dependent task
*/
private void addDependResultForTaskList(List<TaskInstance> taskInstanceList) throws IOException {
for (TaskInstance taskInstance : taskInstanceList) {
if (taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())) {
Result<String> logResult = loggerService.queryLog(
taskInstance.getId(), 0, 4098);
if (logResult.getCode() == Status.SUCCESS.ordinal()) {
String log = logResult.getData();
Map<String, DependResult> resultMap = parseLogForDependentResult(log);
taskInstance.setDependentResult(JSONUtils.toJsonString(resultMap));
}
}
}
}
public Map<String, DependResult> parseLogForDependentResult(String log) throws IOException {
Map<String, DependResult> resultMap = new HashMap<>();
if (StringUtils.isEmpty(log)) {
return resultMap;
}
BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes(
StandardCharsets.UTF_8)), StandardCharsets.UTF_8));
String line;
while ((line = br.readLine()) != null) {
if (line.contains(DEPENDENT_SPLIT)) {
String[] tmpStringArray = line.split(":\\|\\|");
if (tmpStringArray.length != 2) {
continue;
}
String dependResultString = tmpStringArray[1];
String[] dependStringArray = dependResultString.split(",");
if (dependStringArray.length != 2) {
continue;
}
String key = dependStringArray[0].trim();
DependResult dependResult = DependResult.valueOf(dependStringArray[1].trim());
resultMap.put(key, dependResult);
}
}
return resultMap;
}
/**
* query sub process instance detail info by task id
*
* @param loginUser login user
* @param projectName project name
* @param taskId task id
* @return sub process instance detail
*/
public Map<String, Object> querySubProcessInstanceByTaskId(User loginUser, String projectName, Integer taskId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
TaskInstance taskInstance = processService.findTaskInstanceById(taskId);
if (taskInstance == null) {
putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId);
return result;
}
if (!taskInstance.isSubProcess()) {
putMsg(result, Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, taskInstance.getName());
return result;
}
ProcessInstance subWorkflowInstance = processService.findSubProcessInstance(
taskInstance.getProcessInstanceId(), taskInstance.getId());
if (subWorkflowInstance == null) {
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST, taskId);
return result;
}
Map<String, Object> dataMap = new HashMap<>();
dataMap.put("subProcessInstanceId", subWorkflowInstance.getId());
result.put(DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* update process instance
*
* @param loginUser login user
* @param projectName project name
* @param processInstanceJson process instance json
* @param processInstanceId process instance id
* @param scheduleTime schedule time
* @param syncDefine sync define
* @param flag flag
* @param locations locations
* @param connects connects
* @return update result code
* @throws ParseException parse exception for json parse
*/
public Map<String, Object> updateProcessInstance(User loginUser, String projectName, Integer processInstanceId,
String processInstanceJson, String scheduleTime, Boolean syncDefine,
Flag flag, String locations, String connects) throws ParseException {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
//check project permission
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
//check process instance exists
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
if (processInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
//check process instance status
if (!processInstance.getState().typeIsFinished()) {
putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR,
processInstance.getName(), processInstance.getState().toString(), "update");
return result;
}
Date schedule = null;
if (scheduleTime != null) {
schedule = DateUtils.getScheduleDate(scheduleTime);
} else {
schedule = processInstance.getScheduleTime();
}
processInstance.setScheduleTime(schedule);
processInstance.setLocations(locations);
processInstance.setConnects(connects);
String globalParams = null;
String originDefParams = null;
int timeout = processInstance.getTimeout();
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
if (StringUtils.isNotEmpty(processInstanceJson)) {
ProcessData processData = JSONUtils.parseObject(processInstanceJson, ProcessData.class);
//check workflow json is valid
Map<String, Object> checkFlowJson = processDefinitionService.checkProcessNodeList(processData, processInstanceJson);
if (checkFlowJson.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
originDefParams = JSONUtils.toJsonString(processData.getGlobalParams());
List<Property> globalParamList = processData.getGlobalParams();
Map<String, String> globalParamMap = Optional.ofNullable(globalParamList).orElse(Collections.emptyList()).stream().collect(Collectors.toMap(Property::getProp, Property::getValue));
globalParams = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList,
processInstance.getCmdTypeIfComplement(), schedule);
timeout = processData.getTimeout();
processInstance.setTimeout(timeout);
Tenant tenant = processService.getTenantForProcess(processData.getTenantId(),
processDefinition.getUserId());
if (tenant != null) {
processInstance.setTenantCode(tenant.getTenantCode());
}
processInstance.setProcessInstanceJson(processInstanceJson);
processInstance.setGlobalParams(globalParams);
}
int update = processService.updateProcessInstance(processInstance);
int updateDefine = 1;
if (Boolean.TRUE.equals(syncDefine) && StringUtils.isNotEmpty(processInstanceJson)) {
processDefinition.setProcessDefinitionJson(processInstanceJson);
processDefinition.setGlobalParams(originDefParams);
processDefinition.setLocations(locations);
processDefinition.setConnects(connects);
processDefinition.setTimeout(timeout);
processDefinition.setUpdateTime(new Date());
// add process definition version
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefinition);
processDefinition.setVersion(version);
updateDefine = processDefineMapper.updateById(processDefinition);
}
if (update > 0 && updateDefine > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR);
}
return result;
}
/**
* query parent process instance detail info by sub process instance id
*
* @param loginUser login user
* @param projectName project name
* @param subId sub process id
* @return parent instance detail
*/
public Map<String, Object> queryParentInstanceBySubId(User loginUser, String projectName, Integer subId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessInstance subInstance = processService.findProcessInstanceDetailById(subId);
if (subInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, subId);
return result;
}
if (subInstance.getIsSubProcess() == Flag.NO) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE, subInstance.getName());
return result;
}
ProcessInstance parentWorkflowInstance = processService.findParentProcessInstance(subId);
if (parentWorkflowInstance == null) {
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST);
return result;
}
Map<String, Object> dataMap = new HashMap<>();
dataMap.put("parentWorkflowInstance", parentWorkflowInstance.getId());
result.put(DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* delete process instance by id, at the same time,delete task instance and their mapping relation data
*
* @param loginUser login user
* @param projectName project name
* @param processInstanceId process instance id
* @return delete result code
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
if (null == processInstance) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
processService.removeTaskLogFile(processInstanceId);
// delete database cascade
int delete = processService.deleteWorkProcessInstanceById(processInstanceId);
processService.deleteAllSubWorkProcessByParentId(processInstanceId);
processService.deleteWorkProcessMapByParentId(processInstanceId);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR);
}
return result;
}
/**
* view process instance variables
*
* @param processInstanceId process instance id
* @return variables data
*/
public Map<String, Object> viewVariables(Integer processInstanceId) {
Map<String, Object> result = new HashMap<>();
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if (processInstance == null) {
throw new RuntimeException("workflow instance is null");
}
Map<String, String> timeParams = BusinessTimeUtils
.getBusinessTime(processInstance.getCmdTypeIfComplement(),
processInstance.getScheduleTime());
String workflowInstanceJson = processInstance.getProcessInstanceJson();
ProcessData workflowData = JSONUtils.parseObject(workflowInstanceJson, ProcessData.class);
String userDefinedParams = processInstance.getGlobalParams();
// global params
List<Property> globalParams = new ArrayList<>();
if (userDefinedParams != null && userDefinedParams.length() > 0) {
globalParams = JSONUtils.toList(userDefinedParams, Property.class);
}
List<TaskNode> taskNodeList = workflowData.getTasks();
// global param string
String globalParamStr = JSONUtils.toJsonString(globalParams);
globalParamStr = ParameterUtils.convertParameterPlaceholders(globalParamStr, timeParams);
globalParams = JSONUtils.toList(globalParamStr, Property.class);
for (Property property : globalParams) {
timeParams.put(property.getProp(), property.getValue());
}
// local params
Map<String, Map<String, Object>> localUserDefParams = new HashMap<>();
for (TaskNode taskNode : taskNodeList) {
String parameter = taskNode.getParams();
Map<String, String> map = JSONUtils.toMap(parameter);
String localParams = map.get(LOCAL_PARAMS);
if (localParams != null && !localParams.isEmpty()) {
localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams);
List<Property> localParamsList = JSONUtils.toList(localParams, Property.class);
Map<String, Object> localParamsMap = new HashMap<>();
localParamsMap.put("taskType", taskNode.getType());
localParamsMap.put("localParamsList", localParamsList);
if (CollectionUtils.isNotEmpty(localParamsList)) {
localUserDefParams.put(taskNode.getName(), localParamsMap);
}
}
}
Map<String, Object> resultMap = new HashMap<>();
resultMap.put(GLOBAL_PARAMS, globalParams);
resultMap.put(LOCAL_PARAMS, localUserDefParams);
result.put(DATA_LIST, resultMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* encapsulation gantt structure
*
* @param processInstanceId process instance id
* @return gantt tree data
* @throws Exception exception when json parse
*/
public Map<String, Object> viewGantt(Integer processInstanceId) throws Exception {
Map<String, Object> result = new HashMap<>();
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if (processInstance == null) {
throw new RuntimeException("workflow instance is null");
}
GanttDto ganttDto = new GanttDto();
DAG<String, TaskNode, TaskNodeRelation> dag = processInstance2DAG(processInstance);
//topological sort
List<String> nodeList = dag.topologicalSort();
ganttDto.setTaskNames(nodeList);
List<Task> taskList = new ArrayList<>();
for (String node : nodeList) {
TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstanceId, node);
if (taskInstance == null) {
continue;
}
Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime();
Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime();
Task task = new Task();
task.setTaskName(taskInstance.getName());
task.getStartDate().add(startTime.getTime());
task.getEndDate().add(endTime.getTime());
task.setIsoStart(startTime);
task.setIsoEnd(endTime);
task.setStatus(taskInstance.getState().toString());
task.setExecutionDate(taskInstance.getStartTime());
task.setDuration(DateUtils.format2Readable(endTime.getTime() - startTime.getTime()));
taskList.add(task);
}
ganttDto.setTasks(taskList);
result.put(DATA_LIST, ganttDto);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* process instance to DAG
*
* @param processInstance input process instance
* @return process instance dag.
*/
private static DAG<String, TaskNode, TaskNodeRelation> processInstance2DAG(ProcessInstance processInstance) {
String processDefinitionJson = processInstance.getProcessInstanceJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = processData.getTasks();
ProcessDag processDag = DagHelper.getProcessDag(taskNodeList);
return DagHelper.buildDagGraph(processDag);
}
/**
* query process instance by processDefinitionId and stateArray
* @param processDefinitionId processDefinitionId
* @param states states array
* @return process instance list
*/
public List<ProcessInstance> queryByProcessDefineIdAndStatus(int processDefinitionId, int[] states) {
return processInstanceMapper.queryByProcessDefineIdAndStatus(processDefinitionId, states);
}
/**
* query process instance by processDefinitionId
* @param processDefinitionId processDefinitionId
* @param size size
* @return process instance list
*/
public List<ProcessInstance> queryByProcessDefineId(int processDefinitionId,int size) {
return processInstanceMapper.queryByProcessDefineId(processDefinitionId, size);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,101 | [BUG] After renaming the branch name, the name of Branch Flow does not change | 更改任务名称后,分支流转的名称还是更改前的任务名称
![image](https://user-images.githubusercontent.com/55787491/76071071-9e2ff200-5fd0-11ea-8702-e7b357d7b060.png)
**Which version of Dolphin Scheduler:**
-[1.2.0_release]
| https://github.com/apache/dolphinscheduler/issues/2101 | https://github.com/apache/dolphinscheduler/pull/4555 | cfd9db5555b3d8f9f33f25a9272c8741115a9336 | 14d49bc584685a33e2952573a0cb6e88377ab9f9 | "2020-03-06T09:35:19Z" | java | "2021-02-02T08:07:37Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID;
import org.apache.dolphinscheduler.api.dto.ProcessMeta;
import org.apache.dolphinscheduler.api.dto.treeview.Instance;
import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService;
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService;
import org.apache.dolphinscheduler.api.service.ProcessInstanceService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.service.SchedulerService;
import org.apache.dolphinscheduler.api.utils.CheckUtils;
import org.apache.dolphinscheduler.api.utils.FileUtils;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.exportprocess.ProcessAddTaskParam;
import org.apache.dolphinscheduler.api.utils.exportprocess.TaskNodeParamFactory;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.AuthorizationType;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StreamUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.TaskParametersUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinitionVersion;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import org.apache.dolphinscheduler.service.permission.PermissionCheck;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* process definition service impl
*/
@Service
public class ProcessDefinitionServiceImpl extends BaseService implements
ProcessDefinitionService {
private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionServiceImpl.class);
private static final String PROCESSDEFINITIONID = "processDefinitionId";
private static final String RELEASESTATE = "releaseState";
private static final String TASKS = "tasks";
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectService projectService;
@Autowired
private ProcessDefinitionVersionService processDefinitionVersionService;
@Autowired
private ProcessDefinitionMapper processDefineMapper;
@Autowired
private ProcessInstanceService processInstanceService;
@Autowired
private TaskInstanceMapper taskInstanceMapper;
@Autowired
private ScheduleMapper scheduleMapper;
@Autowired
private ProcessService processService;
/**
* create process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param processDefinitionJson process definition json
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @return create result code
*/
@Override
public Map<String, Object> createProcessDefinition(User loginUser,
String projectName,
String name,
String processDefinitionJson,
String desc,
String locations,
String connects) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefine = new ProcessDefinition();
Date now = new Date();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
Map<String, Object> checkProcessJson = checkProcessNodeList(processData, processDefinitionJson);
if (checkProcessJson.get(Constants.STATUS) != Status.SUCCESS) {
return checkProcessJson;
}
processDefine.setName(name);
processDefine.setReleaseState(ReleaseState.OFFLINE);
processDefine.setProjectId(project.getId());
processDefine.setUserId(loginUser.getId());
processDefine.setProcessDefinitionJson(processDefinitionJson);
processDefine.setDescription(desc);
processDefine.setLocations(locations);
processDefine.setConnects(connects);
processDefine.setTimeout(processData.getTimeout());
processDefine.setTenantId(processData.getTenantId());
processDefine.setModifyBy(loginUser.getUserName());
processDefine.setResourceIds(getResourceIds(processData));
//custom global params
List<Property> globalParamsList = processData.getGlobalParams();
if (CollectionUtils.isNotEmpty(globalParamsList)) {
Set<Property> globalParamsSet = new HashSet<>(globalParamsList);
globalParamsList = new ArrayList<>(globalParamsSet);
processDefine.setGlobalParamList(globalParamsList);
}
processDefine.setCreateTime(now);
processDefine.setUpdateTime(now);
processDefine.setFlag(Flag.YES);
// save the new process definition
processDefineMapper.insert(processDefine);
// add process definition version
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefine);
processDefine.setVersion(version);
processDefineMapper.updateVersionByProcessDefinitionId(processDefine.getId(), version);
// return processDefinition object with ID
result.put(Constants.DATA_LIST, processDefine.getId());
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get resource ids
*
* @param processData process data
* @return resource ids
*/
private String getResourceIds(ProcessData processData) {
List<TaskNode> tasks = processData.getTasks();
Set<Integer> resourceIds = new HashSet<>();
StringBuilder sb = new StringBuilder();
if (CollectionUtils.isEmpty(tasks)) {
return sb.toString();
}
for (TaskNode taskNode : tasks) {
String taskParameter = taskNode.getParams();
AbstractParameters params = TaskParametersUtils.getParameters(taskNode.getType(), taskParameter);
if (params == null) {
continue;
}
if (CollectionUtils.isNotEmpty(params.getResourceFilesList())) {
Set<Integer> tempSet = params.getResourceFilesList().
stream()
.filter(t -> t.getId() != 0)
.map(ResourceInfo::getId)
.collect(Collectors.toSet());
resourceIds.addAll(tempSet);
}
}
for (int i : resourceIds) {
if (sb.length() > 0) {
sb.append(",");
}
sb.append(i);
}
return sb.toString();
}
/**
* query process definition list
*
* @param loginUser login user
* @param projectName project name
* @return definition list
*/
@Override
public Map<String, Object> queryProcessDefinitionList(User loginUser, String projectName) {
HashMap<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
List<ProcessDefinition> resourceList = processDefineMapper.queryAllDefinitionList(project.getId());
result.put(Constants.DATA_LIST, resourceList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process definition list paging
*
* @param loginUser login user
* @param projectName project name
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param userId user id
* @return process definition page
*/
@Override
public Map<String, Object> queryProcessDefinitionListPaging(User loginUser, String projectName, String searchVal, Integer pageNo, Integer pageSize, Integer userId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
Page<ProcessDefinition> page = new Page<>(pageNo, pageSize);
IPage<ProcessDefinition> processDefinitionIPage = processDefineMapper.queryDefineListPaging(
page, searchVal, userId, project.getId(), isAdmin(loginUser));
PageInfo<ProcessDefinition> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount((int) processDefinitionIPage.getTotal());
pageInfo.setLists(processDefinitionIPage.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query datail of process definition
*
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @return process definition detail
*/
@Override
public Map<String, Object> queryProcessDefinitionById(User loginUser, String projectName, Integer processId) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId);
} else {
result.put(Constants.DATA_LIST, processDefinition);
putMsg(result, Status.SUCCESS);
}
return result;
}
@Override
public Map<String, Object> queryProcessDefinitionByName(User loginUser, String projectName, String processDefinitionName) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.queryByDefineName(project.getId(),processDefinitionName);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionName);
} else {
result.put(Constants.DATA_LIST, processDefinition);
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* update process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param id process definition id
* @param processDefinitionJson process definition json
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @return update result code
*/
@Override
public Map<String, Object> updateProcessDefinition(User loginUser,
String projectName,
int id,
String name,
String processDefinitionJson,
String desc,
String locations,
String connects) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
Map<String, Object> checkProcessJson = checkProcessNodeList(processData, processDefinitionJson);
if ((checkProcessJson.get(Constants.STATUS) != Status.SUCCESS)) {
return checkProcessJson;
}
ProcessDefinition processDefine = processService.findProcessDefineById(id);
// check process definition exists
if (processDefine == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, id);
return result;
}
if (processDefine.getReleaseState() == ReleaseState.ONLINE) {
// online can not permit edit
putMsg(result, Status.PROCESS_DEFINE_NOT_ALLOWED_EDIT, processDefine.getName());
return result;
}
if (!name.equals(processDefine.getName())) {
// check whether the new process define name exist
ProcessDefinition definition = processDefineMapper.verifyByDefineName(project.getId(), name);
if (definition != null) {
putMsg(result, Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR, name);
return result;
}
}
Date now = new Date();
processDefine.setId(id);
processDefine.setName(name);
processDefine.setReleaseState(ReleaseState.OFFLINE);
processDefine.setProjectId(project.getId());
processDefine.setProcessDefinitionJson(processDefinitionJson);
processDefine.setDescription(desc);
processDefine.setLocations(locations);
processDefine.setConnects(connects);
processDefine.setTimeout(processData.getTimeout());
processDefine.setTenantId(processData.getTenantId());
processDefine.setModifyBy(loginUser.getUserName());
processDefine.setResourceIds(getResourceIds(processData));
//custom global params
List<Property> globalParamsList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(processData.getGlobalParams())) {
Set<Property> userDefParamsSet = new HashSet<>(processData.getGlobalParams());
globalParamsList = new ArrayList<>(userDefParamsSet);
}
processDefine.setGlobalParamList(globalParamsList);
processDefine.setUpdateTime(now);
processDefine.setFlag(Flag.YES);
// add process definition version
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefine);
processDefine.setVersion(version);
if (processDefineMapper.updateById(processDefine) > 0) {
putMsg(result, Status.SUCCESS);
result.put(Constants.DATA_LIST, processDefineMapper.queryByDefineId(id));
} else {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
return result;
}
/**
* verify process definition name unique
*
* @param loginUser login user
* @param projectName project name
* @param name name
* @return true if process definition name not exists, otherwise false
*/
@Override
public Map<String, Object> verifyProcessDefinitionName(User loginUser, String projectName, String name) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.verifyByDefineName(project.getId(), name);
if (processDefinition == null) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.VERIFY_PROCESS_DEFINITION_NAME_UNIQUE_ERROR, name);
}
return result;
}
/**
* delete process definition by id
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @return delete result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId) {
Map<String, Object> result = new HashMap<>(5);
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.selectById(processDefinitionId);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionId);
return result;
}
// Determine if the login user is the owner of the process definition
if (loginUser.getId() != processDefinition.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
// check process definition is already online
if (processDefinition.getReleaseState() == ReleaseState.ONLINE) {
putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE, processDefinitionId);
return result;
}
// check process instances is already running
List<ProcessInstance> processInstances = processInstanceService.queryByProcessDefineIdAndStatus(processDefinitionId, Constants.NOT_TERMINATED_STATES);
if (CollectionUtils.isNotEmpty(processInstances)) {
putMsg(result, Status.DELETE_PROCESS_DEFINITION_BY_ID_FAIL, processInstances.size());
return result;
}
// get the timing according to the process definition
List<Schedule> schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId);
if (!schedules.isEmpty() && schedules.size() > 1) {
logger.warn("scheduler num is {},Greater than 1", schedules.size());
putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR);
return result;
} else if (schedules.size() == 1) {
Schedule schedule = schedules.get(0);
if (schedule.getReleaseState() == ReleaseState.OFFLINE) {
scheduleMapper.deleteById(schedule.getId());
} else if (schedule.getReleaseState() == ReleaseState.ONLINE) {
putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId());
return result;
}
}
int delete = processDefineMapper.deleteById(processDefinitionId);
if (delete > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR);
}
return result;
}
/**
* release process definition: online / offline
*
* @param loginUser login user
* @param projectName project name
* @param id process definition id
* @param releaseState release state
* @return release result code
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> releaseProcessDefinition(User loginUser, String projectName, int id, ReleaseState releaseState) {
HashMap<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultEnum = (Status) checkResult.get(Constants.STATUS);
if (resultEnum != Status.SUCCESS) {
return checkResult;
}
// check state
if (null == releaseState) {
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
ProcessDefinition processDefinition = processDefineMapper.selectById(id);
switch (releaseState) {
case ONLINE:
// To check resources whether they are already cancel authorized or deleted
String resourceIds = processDefinition.getResourceIds();
if (StringUtils.isNotBlank(resourceIds)) {
Integer[] resourceIdArray = Arrays.stream(resourceIds.split(",")).map(Integer::parseInt).toArray(Integer[]::new);
PermissionCheck<Integer> permissionCheck = new PermissionCheck<>(AuthorizationType.RESOURCE_FILE_ID, processService, resourceIdArray, loginUser.getId(), logger);
try {
permissionCheck.checkPermission();
} catch (Exception e) {
logger.error(e.getMessage(), e);
putMsg(result, Status.RESOURCE_NOT_EXIST_OR_NO_PERMISSION, RELEASESTATE);
return result;
}
}
processDefinition.setReleaseState(releaseState);
processDefineMapper.updateById(processDefinition);
break;
case OFFLINE:
processDefinition.setReleaseState(releaseState);
processDefineMapper.updateById(processDefinition);
List<Schedule> scheduleList = scheduleMapper.selectAllByProcessDefineArray(
new int[]{processDefinition.getId()}
);
for (Schedule schedule : scheduleList) {
logger.info("set schedule offline, project id: {}, schedule id: {}, process definition id: {}", project.getId(), schedule.getId(), id);
// set status
schedule.setReleaseState(ReleaseState.OFFLINE);
scheduleMapper.updateById(schedule);
SchedulerService.deleteSchedule(project.getId(), schedule.getId());
}
break;
default:
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, RELEASESTATE);
return result;
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* batch export process definition by ids
*/
@Override
public void batchExportProcessDefinitionByIds(User loginUser, String projectName, String processDefinitionIds, HttpServletResponse response) {
if (StringUtils.isEmpty(processDefinitionIds)) {
return;
}
//export project info
Project project = projectMapper.queryByName(projectName);
//check user access for project
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return;
}
List<ProcessMeta> processDefinitionList =
getProcessDefinitionList(processDefinitionIds);
if (CollectionUtils.isNotEmpty(processDefinitionList)) {
downloadProcessDefinitionFile(response, processDefinitionList);
}
}
/**
* get process definition list by ids
*/
private List<ProcessMeta> getProcessDefinitionList(String processDefinitionIds) {
List<ProcessMeta> processDefinitionList = new ArrayList<>();
String[] processDefinitionIdArray = processDefinitionIds.split(",");
for (String strProcessDefinitionId : processDefinitionIdArray) {
//get workflow info
int processDefinitionId = Integer.parseInt(strProcessDefinitionId);
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processDefinitionId);
if (null != processDefinition) {
processDefinitionList.add(exportProcessMetaData(processDefinitionId, processDefinition));
}
}
return processDefinitionList;
}
/**
* download the process definition file
*/
private void downloadProcessDefinitionFile(HttpServletResponse response, List<ProcessMeta> processDefinitionList) {
response.setContentType(MediaType.APPLICATION_JSON_UTF8_VALUE);
BufferedOutputStream buff = null;
ServletOutputStream out = null;
try {
out = response.getOutputStream();
buff = new BufferedOutputStream(out);
buff.write(JSONUtils.toJsonString(processDefinitionList).getBytes(StandardCharsets.UTF_8));
buff.flush();
buff.close();
} catch (IOException e) {
logger.warn("export process fail", e);
} finally {
if (null != buff) {
try {
buff.close();
} catch (Exception e) {
logger.warn("export process buffer not close", e);
}
}
if (null != out) {
try {
out.close();
} catch (Exception e) {
logger.warn("export process output stream not close", e);
}
}
}
}
/**
* get export process metadata string
*
* @param processDefinitionId process definition id
* @param processDefinition process definition
* @return export process metadata string
*/
public String exportProcessMetaDataStr(Integer processDefinitionId, ProcessDefinition processDefinition) {
//create workflow json file
return JSONUtils.toJsonString(exportProcessMetaData(processDefinitionId, processDefinition));
}
/**
* get export process metadata string
*
* @param processDefinitionId process definition id
* @param processDefinition process definition
* @return export process metadata string
*/
public ProcessMeta exportProcessMetaData(Integer processDefinitionId, ProcessDefinition processDefinition) {
//correct task param which has data source or dependent param
String correctProcessDefinitionJson = addExportTaskNodeSpecialParam(processDefinition.getProcessDefinitionJson());
processDefinition.setProcessDefinitionJson(correctProcessDefinitionJson);
//export process metadata
ProcessMeta exportProcessMeta = new ProcessMeta();
exportProcessMeta.setProjectName(processDefinition.getProjectName());
exportProcessMeta.setProcessDefinitionName(processDefinition.getName());
exportProcessMeta.setProcessDefinitionJson(processDefinition.getProcessDefinitionJson());
exportProcessMeta.setProcessDefinitionDescription(processDefinition.getDescription());
exportProcessMeta.setProcessDefinitionLocations(processDefinition.getLocations());
exportProcessMeta.setProcessDefinitionConnects(processDefinition.getConnects());
//schedule info
List<Schedule> schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId);
if (!schedules.isEmpty()) {
Schedule schedule = schedules.get(0);
exportProcessMeta.setScheduleWarningType(schedule.getWarningType().toString());
exportProcessMeta.setScheduleWarningGroupId(schedule.getWarningGroupId());
exportProcessMeta.setScheduleStartTime(DateUtils.dateToString(schedule.getStartTime()));
exportProcessMeta.setScheduleEndTime(DateUtils.dateToString(schedule.getEndTime()));
exportProcessMeta.setScheduleCrontab(schedule.getCrontab());
exportProcessMeta.setScheduleFailureStrategy(String.valueOf(schedule.getFailureStrategy()));
exportProcessMeta.setScheduleReleaseState(String.valueOf(ReleaseState.OFFLINE));
exportProcessMeta.setScheduleProcessInstancePriority(String.valueOf(schedule.getProcessInstancePriority()));
exportProcessMeta.setScheduleWorkerGroupName(schedule.getWorkerGroup());
}
//create workflow json file
return exportProcessMeta;
}
/**
* correct task param which has datasource or dependent
*
* @param processDefinitionJson processDefinitionJson
* @return correct processDefinitionJson
*/
private String addExportTaskNodeSpecialParam(String processDefinitionJson) {
ObjectNode jsonObject = JSONUtils.parseObject(processDefinitionJson);
ArrayNode jsonArray = (ArrayNode) jsonObject.path(TASKS);
for (int i = 0; i < jsonArray.size(); i++) {
JsonNode taskNode = jsonArray.path(i);
if (StringUtils.isNotEmpty(taskNode.path("type").asText())) {
String taskType = taskNode.path("type").asText();
ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType);
if (null != addTaskParam) {
addTaskParam.addExportSpecialParam(taskNode);
}
}
}
jsonObject.set(TASKS, jsonArray);
return jsonObject.toString();
}
/**
* check task if has sub process
*
* @param taskType task type
* @return if task has sub process return true else false
*/
private boolean checkTaskHasSubProcess(String taskType) {
return taskType.equals(TaskType.SUB_PROCESS.name());
}
/**
* import process definition
*
* @param loginUser login user
* @param file process metadata json file
* @param currentProjectName current project name
* @return import process
*/
@Override
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) {
Map<String, Object> result = new HashMap<>(5);
String processMetaJson = FileUtils.file2String(file);
List<ProcessMeta> processMetaList = JSONUtils.toList(processMetaJson, ProcessMeta.class);
//check file content
if (CollectionUtils.isEmpty(processMetaList)) {
putMsg(result, Status.DATA_IS_NULL, "fileContent");
return result;
}
for (ProcessMeta processMeta : processMetaList) {
if (!checkAndImportProcessDefinition(loginUser, currentProjectName, result, processMeta)) {
return result;
}
}
return result;
}
/**
* check and import process definition
*/
private boolean checkAndImportProcessDefinition(User loginUser, String currentProjectName, Map<String, Object> result, ProcessMeta processMeta) {
if (!checkImportanceParams(processMeta, result)) {
return false;
}
//deal with process name
String processDefinitionName = processMeta.getProcessDefinitionName();
//use currentProjectName to query
Project targetProject = projectMapper.queryByName(currentProjectName);
if (null != targetProject) {
processDefinitionName = recursionProcessDefinitionName(targetProject.getId(),
processDefinitionName, 1);
}
//unique check
Map<String, Object> checkResult = verifyProcessDefinitionName(loginUser, currentProjectName, processDefinitionName);
Status status = (Status) checkResult.get(Constants.STATUS);
if (Status.SUCCESS.equals(status)) {
putMsg(result, Status.SUCCESS);
} else {
result.putAll(checkResult);
return false;
}
// get create process result
Map<String, Object> createProcessResult =
getCreateProcessResult(loginUser,
currentProjectName,
result,
processMeta,
processDefinitionName,
addImportTaskNodeParam(loginUser, processMeta.getProcessDefinitionJson(), targetProject));
if (createProcessResult == null) {
return false;
}
//create process definition
Integer processDefinitionId =
Objects.isNull(createProcessResult.get(Constants.DATA_LIST))
? null : Integer.parseInt(createProcessResult.get(Constants.DATA_LIST).toString());
//scheduler param
return getImportProcessScheduleResult(loginUser,
currentProjectName,
result,
processMeta,
processDefinitionName,
processDefinitionId);
}
/**
* get create process result
*/
private Map<String, Object> getCreateProcessResult(User loginUser,
String currentProjectName,
Map<String, Object> result,
ProcessMeta processMeta,
String processDefinitionName,
String importProcessParam) {
Map<String, Object> createProcessResult = null;
try {
createProcessResult = createProcessDefinition(loginUser
, currentProjectName,
processDefinitionName + "_import_" + DateUtils.getCurrentTimeStamp(),
importProcessParam,
processMeta.getProcessDefinitionDescription(),
processMeta.getProcessDefinitionLocations(),
processMeta.getProcessDefinitionConnects());
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
logger.error("import process meta json data: {}", e.getMessage(), e);
putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
}
return createProcessResult;
}
/**
* get import process schedule result
*/
private boolean getImportProcessScheduleResult(User loginUser,
String currentProjectName,
Map<String, Object> result,
ProcessMeta processMeta,
String processDefinitionName,
Integer processDefinitionId) {
if (null != processMeta.getScheduleCrontab() && null != processDefinitionId) {
int scheduleInsert = importProcessSchedule(loginUser,
currentProjectName,
processMeta,
processDefinitionName,
processDefinitionId);
if (0 == scheduleInsert) {
putMsg(result, Status.IMPORT_PROCESS_DEFINE_ERROR);
return false;
}
}
return true;
}
/**
* check importance params
*/
private boolean checkImportanceParams(ProcessMeta processMeta, Map<String, Object> result) {
if (StringUtils.isEmpty(processMeta.getProjectName())) {
putMsg(result, Status.DATA_IS_NULL, "projectName");
return false;
}
if (StringUtils.isEmpty(processMeta.getProcessDefinitionName())) {
putMsg(result, Status.DATA_IS_NULL, "processDefinitionName");
return false;
}
if (StringUtils.isEmpty(processMeta.getProcessDefinitionJson())) {
putMsg(result, Status.DATA_IS_NULL, "processDefinitionJson");
return false;
}
return true;
}
/**
* import process add special task param
*
* @param loginUser login user
* @param processDefinitionJson process definition json
* @param targetProject target project
* @return import process param
*/
private String addImportTaskNodeParam(User loginUser, String processDefinitionJson, Project targetProject) {
ObjectNode jsonObject = JSONUtils.parseObject(processDefinitionJson);
ArrayNode jsonArray = (ArrayNode) jsonObject.get(TASKS);
//add sql and dependent param
for (int i = 0; i < jsonArray.size(); i++) {
JsonNode taskNode = jsonArray.path(i);
String taskType = taskNode.path("type").asText();
ProcessAddTaskParam addTaskParam = TaskNodeParamFactory.getByTaskType(taskType);
if (null != addTaskParam) {
addTaskParam.addImportSpecialParam(taskNode);
}
}
//recursive sub-process parameter correction map key for old process id value for new process id
Map<Integer, Integer> subProcessIdMap = new HashMap<>(20);
List<Object> subProcessList = StreamUtils.asStream(jsonArray.elements())
.filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).path("type").asText()))
.collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(subProcessList)) {
importSubProcess(loginUser, targetProject, jsonArray, subProcessIdMap);
}
jsonObject.set(TASKS, jsonArray);
return jsonObject.toString();
}
/**
* import process schedule
*
* @param loginUser login user
* @param currentProjectName current project name
* @param processMeta process meta data
* @param processDefinitionName process definition name
* @param processDefinitionId process definition id
* @return insert schedule flag
*/
public int importProcessSchedule(User loginUser, String currentProjectName, ProcessMeta processMeta,
String processDefinitionName, Integer processDefinitionId) {
Date now = new Date();
Schedule scheduleObj = new Schedule();
scheduleObj.setProjectName(currentProjectName);
scheduleObj.setProcessDefinitionId(processDefinitionId);
scheduleObj.setProcessDefinitionName(processDefinitionName);
scheduleObj.setCreateTime(now);
scheduleObj.setUpdateTime(now);
scheduleObj.setUserId(loginUser.getId());
scheduleObj.setUserName(loginUser.getUserName());
scheduleObj.setCrontab(processMeta.getScheduleCrontab());
if (null != processMeta.getScheduleStartTime()) {
scheduleObj.setStartTime(DateUtils.stringToDate(processMeta.getScheduleStartTime()));
}
if (null != processMeta.getScheduleEndTime()) {
scheduleObj.setEndTime(DateUtils.stringToDate(processMeta.getScheduleEndTime()));
}
if (null != processMeta.getScheduleWarningType()) {
scheduleObj.setWarningType(WarningType.valueOf(processMeta.getScheduleWarningType()));
}
if (null != processMeta.getScheduleWarningGroupId()) {
scheduleObj.setWarningGroupId(processMeta.getScheduleWarningGroupId());
}
if (null != processMeta.getScheduleFailureStrategy()) {
scheduleObj.setFailureStrategy(FailureStrategy.valueOf(processMeta.getScheduleFailureStrategy()));
}
if (null != processMeta.getScheduleReleaseState()) {
scheduleObj.setReleaseState(ReleaseState.valueOf(processMeta.getScheduleReleaseState()));
}
if (null != processMeta.getScheduleProcessInstancePriority()) {
scheduleObj.setProcessInstancePriority(Priority.valueOf(processMeta.getScheduleProcessInstancePriority()));
}
if (null != processMeta.getScheduleWorkerGroupName()) {
scheduleObj.setWorkerGroup(processMeta.getScheduleWorkerGroupName());
}
return scheduleMapper.insert(scheduleObj);
}
/**
* check import process has sub process
* recursion create sub process
*
* @param loginUser login user
* @param targetProject target project
* @param jsonArray process task array
* @param subProcessIdMap correct sub process id map
*/
private void importSubProcess(User loginUser, Project targetProject, ArrayNode jsonArray, Map<Integer, Integer> subProcessIdMap) {
for (int i = 0; i < jsonArray.size(); i++) {
ObjectNode taskNode = (ObjectNode) jsonArray.path(i);
String taskType = taskNode.path("type").asText();
if (!checkTaskHasSubProcess(taskType)) {
continue;
}
//get sub process info
ObjectNode subParams = (ObjectNode) taskNode.path("params");
Integer subProcessId = subParams.path(PROCESSDEFINITIONID).asInt();
ProcessDefinition subProcess = processDefineMapper.queryByDefineId(subProcessId);
//check is sub process exist in db
if (null == subProcess) {
continue;
}
String subProcessJson = subProcess.getProcessDefinitionJson();
//check current project has sub process
ProcessDefinition currentProjectSubProcess = processDefineMapper.queryByDefineName(targetProject.getId(), subProcess.getName());
if (null == currentProjectSubProcess) {
ArrayNode subJsonArray = (ArrayNode) JSONUtils.parseObject(subProcess.getProcessDefinitionJson()).get(TASKS);
List<Object> subProcessList = StreamUtils.asStream(subJsonArray.elements())
.filter(item -> checkTaskHasSubProcess(JSONUtils.parseObject(item.toString()).path("type").asText()))
.collect(Collectors.toList());
if (CollectionUtils.isNotEmpty(subProcessList)) {
importSubProcess(loginUser, targetProject, subJsonArray, subProcessIdMap);
//sub process processId correct
if (!subProcessIdMap.isEmpty()) {
for (Map.Entry<Integer, Integer> entry : subProcessIdMap.entrySet()) {
String oldSubProcessId = "\"processDefinitionId\":" + entry.getKey();
String newSubProcessId = "\"processDefinitionId\":" + entry.getValue();
subProcessJson = subProcessJson.replaceAll(oldSubProcessId, newSubProcessId);
}
subProcessIdMap.clear();
}
}
//if sub-process recursion
Date now = new Date();
//create sub process in target project
ProcessDefinition processDefine = new ProcessDefinition();
processDefine.setName(subProcess.getName());
processDefine.setVersion(subProcess.getVersion());
processDefine.setReleaseState(subProcess.getReleaseState());
processDefine.setProjectId(targetProject.getId());
processDefine.setUserId(loginUser.getId());
processDefine.setProcessDefinitionJson(subProcessJson);
processDefine.setDescription(subProcess.getDescription());
processDefine.setLocations(subProcess.getLocations());
processDefine.setConnects(subProcess.getConnects());
processDefine.setTimeout(subProcess.getTimeout());
processDefine.setTenantId(subProcess.getTenantId());
processDefine.setGlobalParams(subProcess.getGlobalParams());
processDefine.setCreateTime(now);
processDefine.setUpdateTime(now);
processDefine.setFlag(subProcess.getFlag());
processDefine.setWarningGroupId(subProcess.getWarningGroupId());
processDefineMapper.insert(processDefine);
logger.info("create sub process, project: {}, process name: {}", targetProject.getName(), processDefine.getName());
//modify task node
ProcessDefinition newSubProcessDefine = processDefineMapper.queryByDefineName(processDefine.getProjectId(), processDefine.getName());
if (null != newSubProcessDefine) {
subProcessIdMap.put(subProcessId, newSubProcessDefine.getId());
subParams.put(PROCESSDEFINITIONID, newSubProcessDefine.getId());
taskNode.set("params", subParams);
}
}
}
}
/**
* check the process definition node meets the specifications
*
* @param processData process data
* @param processDefinitionJson process definition json
* @return check result code
*/
@Override
public Map<String, Object> checkProcessNodeList(ProcessData processData, String processDefinitionJson) {
Map<String, Object> result = new HashMap<>();
try {
if (processData == null) {
logger.error("process data is null");
putMsg(result, Status.DATA_IS_NOT_VALID, processDefinitionJson);
return result;
}
// Check whether the task node is normal
List<TaskNode> taskNodes = processData.getTasks();
if (taskNodes == null) {
logger.error("process node info is empty");
putMsg(result, Status.DATA_IS_NULL, processDefinitionJson);
return result;
}
// check has cycle
if (graphHasCycle(taskNodes)) {
logger.error("process DAG has cycle");
putMsg(result, Status.PROCESS_NODE_HAS_CYCLE);
return result;
}
// check whether the process definition json is normal
for (TaskNode taskNode : taskNodes) {
if (!CheckUtils.checkTaskNodeParameters(taskNode.getParams(), taskNode.getType())) {
logger.error("task node {} parameter invalid", taskNode.getName());
putMsg(result, Status.PROCESS_NODE_S_PARAMETER_INVALID, taskNode.getName());
return result;
}
// check extra params
CheckUtils.checkOtherParams(taskNode.getExtras());
}
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, e.getMessage());
}
return result;
}
/**
* get task node details based on process definition
*
* @param defineId define id
* @return task node list
*/
@Override
public Map<String, Object> getTaskNodeListByDefinitionId(Integer defineId) {
Map<String, Object> result = new HashMap<>();
ProcessDefinition processDefinition = processDefineMapper.selectById(defineId);
if (processDefinition == null) {
logger.info("process define not exists");
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, defineId);
return result;
}
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
//process data check
if (null == processData) {
logger.error("process data is null");
putMsg(result, Status.DATA_IS_NOT_VALID, processDefinitionJson);
return result;
}
List<TaskNode> taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks();
result.put(Constants.DATA_LIST, taskNodeList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get task node details based on process definition
*
* @param defineIdList define id list
* @return task node list
*/
@Override
public Map<String, Object> getTaskNodeListByDefinitionIdList(String defineIdList) {
Map<String, Object> result = new HashMap<>();
Map<Integer, List<TaskNode>> taskNodeMap = new HashMap<>();
String[] idList = defineIdList.split(",");
List<Integer> idIntList = new ArrayList<>();
for (String definitionId : idList) {
idIntList.add(Integer.parseInt(definitionId));
}
Integer[] idArray = idIntList.toArray(new Integer[0]);
List<ProcessDefinition> processDefinitionList = processDefineMapper.queryDefinitionListByIdList(idArray);
if (CollectionUtils.isEmpty(processDefinitionList)) {
logger.info("process definition not exists");
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, defineIdList);
return result;
}
for (ProcessDefinition processDefinition : processDefinitionList) {
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = (processData.getTasks() == null) ? new ArrayList<>() : processData.getTasks();
taskNodeMap.put(processDefinition.getId(), taskNodeList);
}
result.put(Constants.DATA_LIST, taskNodeMap);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query process definition all by project id
*
* @param projectId project id
* @return process definitions in the project
*/
@Override
public Map<String, Object> queryProcessDefinitionAllByProjectId(Integer projectId) {
HashMap<String, Object> result = new HashMap<>(5);
List<ProcessDefinition> resourceList = processDefineMapper.queryAllDefinitionList(projectId);
result.put(Constants.DATA_LIST, resourceList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* Encapsulates the TreeView structure
*
* @param processId process definition id
* @param limit limit
* @return tree view json data
* @throws Exception exception
*/
@Override
public Map<String, Object> viewTree(Integer processId, Integer limit) throws Exception {
Map<String, Object> result = new HashMap<>();
ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (null == processDefinition) {
logger.info("process define not exists");
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinition);
return result;
}
DAG<String, TaskNode, TaskNodeRelation> dag = genDagGraph(processDefinition);
/**
* nodes that is running
*/
Map<String, List<TreeViewDto>> runningNodeMap = new ConcurrentHashMap<>();
/**
* nodes that is waiting torun
*/
Map<String, List<TreeViewDto>> waitingRunningNodeMap = new ConcurrentHashMap<>();
/**
* List of process instances
*/
List<ProcessInstance> processInstanceList = processInstanceService.queryByProcessDefineId(processId, limit);
for (ProcessInstance processInstance : processInstanceList) {
processInstance.setDuration(DateUtils.differSec(processInstance.getStartTime(), processInstance.getEndTime()));
}
if (limit > processInstanceList.size()) {
limit = processInstanceList.size();
}
TreeViewDto parentTreeViewDto = new TreeViewDto();
parentTreeViewDto.setName("DAG");
parentTreeViewDto.setType("");
// Specify the process definition, because it is a TreeView for a process definition
for (int i = limit - 1; i >= 0; i--) {
ProcessInstance processInstance = processInstanceList.get(i);
Date endTime = processInstance.getEndTime() == null ? new Date() : processInstance.getEndTime();
parentTreeViewDto.getInstances().add(new Instance(processInstance.getId(), processInstance.getName(), "", processInstance.getState().toString()
, processInstance.getStartTime(), endTime, processInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - processInstance.getStartTime().getTime())));
}
List<TreeViewDto> parentTreeViewDtoList = new ArrayList<>();
parentTreeViewDtoList.add(parentTreeViewDto);
// Here is the encapsulation task instance
for (String startNode : dag.getBeginNode()) {
runningNodeMap.put(startNode, parentTreeViewDtoList);
}
while (Stopper.isRunning()) {
Set<String> postNodeList = null;
Iterator<Map.Entry<String, List<TreeViewDto>>> iter = runningNodeMap.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<String, List<TreeViewDto>> en = iter.next();
String nodeName = en.getKey();
parentTreeViewDtoList = en.getValue();
TreeViewDto treeViewDto = new TreeViewDto();
treeViewDto.setName(nodeName);
TaskNode taskNode = dag.getNode(nodeName);
treeViewDto.setType(taskNode.getType());
//set treeViewDto instances
for (int i = limit - 1; i >= 0; i--) {
ProcessInstance processInstance = processInstanceList.get(i);
TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstance.getId(), nodeName);
if (taskInstance == null) {
treeViewDto.getInstances().add(new Instance(-1, "not running", "null"));
} else {
Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime();
Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime();
int subProcessId = 0;
/**
* if process is sub process, the return sub id, or sub id=0
*/
if (taskInstance.getTaskType().equals(TaskType.SUB_PROCESS.name())) {
String taskJson = taskInstance.getTaskJson();
taskNode = JSONUtils.parseObject(taskJson, TaskNode.class);
subProcessId = Integer.parseInt(JSONUtils.parseObject(
taskNode.getParams()).path(CMD_PARAM_SUB_PROCESS_DEFINE_ID).asText());
}
treeViewDto.getInstances().add(new Instance(taskInstance.getId(), taskInstance.getName(), taskInstance.getTaskType(), taskInstance.getState().toString()
, taskInstance.getStartTime(), taskInstance.getEndTime(), taskInstance.getHost(), DateUtils.format2Readable(endTime.getTime() - startTime.getTime()), subProcessId));
}
}
for (TreeViewDto pTreeViewDto : parentTreeViewDtoList) {
pTreeViewDto.getChildren().add(treeViewDto);
}
postNodeList = dag.getSubsequentNodes(nodeName);
if (CollectionUtils.isNotEmpty(postNodeList)) {
for (String nextNodeName : postNodeList) {
List<TreeViewDto> treeViewDtoList = waitingRunningNodeMap.get(nextNodeName);
if (CollectionUtils.isEmpty(treeViewDtoList)) {
treeViewDtoList = new ArrayList<>();
}
treeViewDtoList.add(treeViewDto);
waitingRunningNodeMap.put(nextNodeName, treeViewDtoList);
}
}
runningNodeMap.remove(nodeName);
}
if (waitingRunningNodeMap == null || waitingRunningNodeMap.size() == 0) {
break;
} else {
runningNodeMap.putAll(waitingRunningNodeMap);
waitingRunningNodeMap.clear();
}
}
result.put(Constants.DATA_LIST, parentTreeViewDto);
result.put(Constants.STATUS, Status.SUCCESS);
result.put(Constants.MSG, Status.SUCCESS.getMsg());
return result;
}
/**
* Generate the DAG Graph based on the process definition id
*
* @param processDefinition process definition
* @return dag graph
*/
private DAG<String, TaskNode, TaskNodeRelation> genDagGraph(ProcessDefinition processDefinition) {
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
//check process data
if (null != processData) {
List<TaskNode> taskNodeList = processData.getTasks();
processDefinition.setGlobalParamList(processData.getGlobalParams());
ProcessDag processDag = DagHelper.getProcessDag(taskNodeList);
// Generate concrete Dag to be executed
return DagHelper.buildDagGraph(processDag);
}
return new DAG<>();
}
/**
* whether the graph has a ring
*
* @param taskNodeResponseList task node response list
* @return if graph has cycle flag
*/
private boolean graphHasCycle(List<TaskNode> taskNodeResponseList) {
DAG<String, TaskNode, String> graph = new DAG<>();
// Fill the vertices
for (TaskNode taskNodeResponse : taskNodeResponseList) {
graph.addNode(taskNodeResponse.getName(), taskNodeResponse);
}
// Fill edge relations
for (TaskNode taskNodeResponse : taskNodeResponseList) {
taskNodeResponse.getPreTasks();
List<String> preTasks = JSONUtils.toList(taskNodeResponse.getPreTasks(), String.class);
if (CollectionUtils.isNotEmpty(preTasks)) {
for (String preTask : preTasks) {
if (!graph.addEdge(preTask, taskNodeResponse.getName())) {
return true;
}
}
}
}
return graph.hasCycle();
}
private String recursionProcessDefinitionName(Integer projectId, String processDefinitionName, int num) {
ProcessDefinition processDefinition = processDefineMapper.queryByDefineName(projectId, processDefinitionName);
if (processDefinition != null) {
if (num > 1) {
String str = processDefinitionName.substring(0, processDefinitionName.length() - 3);
processDefinitionName = str + "(" + num + ")";
} else {
processDefinitionName = processDefinition.getName() + "(" + num + ")";
}
} else {
return processDefinitionName;
}
return recursionProcessDefinitionName(projectId, processDefinitionName, num + 1);
}
private Map<String, Object> copyProcessDefinition(User loginUser,
Integer processId,
Project targetProject) throws JsonProcessingException {
Map<String, Object> result = new HashMap<>(5);
ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId);
return result;
} else {
return createProcessDefinition(
loginUser,
targetProject.getName(),
processDefinition.getName() + "_copy_" + DateUtils.getCurrentTimeStamp(),
processDefinition.getProcessDefinitionJson(),
processDefinition.getDescription(),
processDefinition.getLocations(),
processDefinition.getConnects());
}
}
/**
* batch copy process definition
*
* @param loginUser loginUser
* @param projectName projectName
* @param processDefinitionIds processDefinitionIds
* @param targetProjectId targetProjectId
*/
@Override
public Map<String, Object> batchCopyProcessDefinition(User loginUser,
String projectName,
String processDefinitionIds,
int targetProjectId) {
Map<String, Object> result = new HashMap<>();
List<String> failedProcessList = new ArrayList<>();
if (StringUtils.isEmpty(processDefinitionIds)) {
putMsg(result, Status.PROCESS_DEFINITION_IDS_IS_EMPTY, processDefinitionIds);
return result;
}
//check src project auth
Map<String, Object> checkResult = checkProjectAndAuth(loginUser, projectName);
if (checkResult != null) {
return checkResult;
}
Project targetProject = projectMapper.queryDetailById(targetProjectId);
if (targetProject == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, targetProjectId);
return result;
}
if (!(targetProject.getName()).equals(projectName)) {
Map<String, Object> checkTargetProjectResult = checkProjectAndAuth(loginUser, targetProject.getName());
if (checkTargetProjectResult != null) {
return checkTargetProjectResult;
}
}
String[] processDefinitionIdList = processDefinitionIds.split(Constants.COMMA);
doBatchCopyProcessDefinition(loginUser, targetProject, failedProcessList, processDefinitionIdList);
checkBatchOperateResult(projectName, targetProject.getName(), result, failedProcessList, true);
return result;
}
/**
* batch move process definition
*
* @param loginUser loginUser
* @param projectName projectName
* @param processDefinitionIds processDefinitionIds
* @param targetProjectId targetProjectId
*/
@Override
public Map<String, Object> batchMoveProcessDefinition(User loginUser,
String projectName,
String processDefinitionIds,
int targetProjectId) {
Map<String, Object> result = new HashMap<>();
List<String> failedProcessList = new ArrayList<>();
//check src project auth
Map<String, Object> checkResult = checkProjectAndAuth(loginUser, projectName);
if (checkResult != null) {
return checkResult;
}
if (StringUtils.isEmpty(processDefinitionIds)) {
putMsg(result, Status.PROCESS_DEFINITION_IDS_IS_EMPTY, processDefinitionIds);
return result;
}
Project targetProject = projectMapper.queryDetailById(targetProjectId);
if (targetProject == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, targetProjectId);
return result;
}
if (!(targetProject.getName()).equals(projectName)) {
Map<String, Object> checkTargetProjectResult = checkProjectAndAuth(loginUser, targetProject.getName());
if (checkTargetProjectResult != null) {
return checkTargetProjectResult;
}
}
String[] processDefinitionIdList = processDefinitionIds.split(Constants.COMMA);
doBatchMoveProcessDefinition(targetProject, failedProcessList, processDefinitionIdList);
checkBatchOperateResult(projectName, targetProject.getName(), result, failedProcessList, false);
return result;
}
/**
* switch the defined process definition verison
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @param version the version user want to switch
* @return switch process definition version result code
*/
@Override
public Map<String, Object> switchProcessDefinitionVersion(User loginUser, String projectName
, int processDefinitionId, long version) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processDefinitionId);
if (Objects.isNull(processDefinition)) {
putMsg(result
, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_ERROR
, processDefinitionId);
return result;
}
ProcessDefinitionVersion processDefinitionVersion = processDefinitionVersionService
.queryByProcessDefinitionIdAndVersion(processDefinitionId, version);
if (Objects.isNull(processDefinitionVersion)) {
putMsg(result
, Status.SWITCH_PROCESS_DEFINITION_VERSION_NOT_EXIST_PROCESS_DEFINITION_VERSION_ERROR
, processDefinitionId
, version);
return result;
}
processDefinition.setVersion(processDefinitionVersion.getVersion());
processDefinition.setProcessDefinitionJson(processDefinitionVersion.getProcessDefinitionJson());
processDefinition.setDescription(processDefinitionVersion.getDescription());
processDefinition.setLocations(processDefinitionVersion.getLocations());
processDefinition.setConnects(processDefinitionVersion.getConnects());
processDefinition.setTimeout(processDefinitionVersion.getTimeout());
processDefinition.setGlobalParams(processDefinitionVersion.getGlobalParams());
processDefinition.setUpdateTime(new Date());
processDefinition.setWarningGroupId(processDefinitionVersion.getWarningGroupId());
processDefinition.setResourceIds(processDefinitionVersion.getResourceIds());
if (processDefineMapper.updateById(processDefinition) > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.SWITCH_PROCESS_DEFINITION_VERSION_ERROR);
}
return result;
}
/**
* do batch move process definition
*
* @param targetProject targetProject
* @param failedProcessList failedProcessList
* @param processDefinitionIdList processDefinitionIdList
*/
private void doBatchMoveProcessDefinition(Project targetProject, List<String> failedProcessList, String[] processDefinitionIdList) {
for (String processDefinitionId : processDefinitionIdList) {
try {
Map<String, Object> moveProcessDefinitionResult =
moveProcessDefinition(Integer.valueOf(processDefinitionId), targetProject);
if (!Status.SUCCESS.equals(moveProcessDefinitionResult.get(Constants.STATUS))) {
setFailedProcessList(failedProcessList, processDefinitionId);
logger.error((String) moveProcessDefinitionResult.get(Constants.MSG));
}
} catch (Exception e) {
setFailedProcessList(failedProcessList, processDefinitionId);
}
}
}
/**
* batch copy process definition
*
* @param loginUser loginUser
* @param targetProject targetProject
* @param failedProcessList failedProcessList
* @param processDefinitionIdList processDefinitionIdList
*/
private void doBatchCopyProcessDefinition(User loginUser, Project targetProject, List<String> failedProcessList, String[] processDefinitionIdList) {
for (String processDefinitionId : processDefinitionIdList) {
try {
Map<String, Object> copyProcessDefinitionResult =
copyProcessDefinition(loginUser, Integer.valueOf(processDefinitionId), targetProject);
if (!Status.SUCCESS.equals(copyProcessDefinitionResult.get(Constants.STATUS))) {
setFailedProcessList(failedProcessList, processDefinitionId);
logger.error((String) copyProcessDefinitionResult.get(Constants.MSG));
}
} catch (Exception e) {
setFailedProcessList(failedProcessList, processDefinitionId);
}
}
}
/**
* set failed processList
*
* @param failedProcessList failedProcessList
* @param processDefinitionId processDefinitionId
*/
private void setFailedProcessList(List<String> failedProcessList, String processDefinitionId) {
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(Integer.valueOf(processDefinitionId));
if (processDefinition != null) {
failedProcessList.add(processDefinitionId + "[" + processDefinition.getName() + "]");
} else {
failedProcessList.add(processDefinitionId + "[null]");
}
}
/**
* check project and auth
*
* @param loginUser loginUser
* @param projectName projectName
*/
private Map<String, Object> checkProjectAndAuth(User loginUser, String projectName) {
Project project = projectMapper.queryByName(projectName);
//check user access for project
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status resultStatus = (Status) checkResult.get(Constants.STATUS);
if (resultStatus != Status.SUCCESS) {
return checkResult;
}
return null;
}
/**
* move process definition
*
* @param processId processId
* @param targetProject targetProject
* @return move result code
*/
private Map<String, Object> moveProcessDefinition(Integer processId,
Project targetProject) {
Map<String, Object> result = new HashMap<>();
ProcessDefinition processDefinition = processDefineMapper.selectById(processId);
if (processDefinition == null) {
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processId);
return result;
}
processDefinition.setProjectId(targetProject.getId());
processDefinition.setUpdateTime(new Date());
if (processDefineMapper.updateById(processDefinition) > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.UPDATE_PROCESS_DEFINITION_ERROR);
}
return result;
}
/**
* check batch operate result
*
* @param srcProjectName srcProjectName
* @param targetProjectName targetProjectName
* @param result result
* @param failedProcessList failedProcessList
* @param isCopy isCopy
*/
private void checkBatchOperateResult(String srcProjectName, String targetProjectName,
Map<String, Object> result, List<String> failedProcessList, boolean isCopy) {
if (!failedProcessList.isEmpty()) {
if (isCopy) {
putMsg(result, Status.COPY_PROCESS_DEFINITION_ERROR, srcProjectName, targetProjectName, String.join(",", failedProcessList));
} else {
putMsg(result, Status.MOVE_PROCESS_DEFINITION_ERROR, srcProjectName, targetProjectName, String.join(",", failedProcessList));
}
} else {
putMsg(result, Status.SUCCESS);
}
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,101 | [BUG] After renaming the branch name, the name of Branch Flow does not change | 更改任务名称后,分支流转的名称还是更改前的任务名称
![image](https://user-images.githubusercontent.com/55787491/76071071-9e2ff200-5fd0-11ea-8702-e7b357d7b060.png)
**Which version of Dolphin Scheduler:**
-[1.2.0_release]
| https://github.com/apache/dolphinscheduler/issues/2101 | https://github.com/apache/dolphinscheduler/pull/4555 | cfd9db5555b3d8f9f33f25a9272c8741115a9336 | 14d49bc584685a33e2952573a0cb6e88377ab9f9 | "2020-03-06T09:35:19Z" | java | "2021-02-02T08:07:37Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/task/conditions/ConditionsParameters.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.task.conditions;
import org.apache.dolphinscheduler.common.enums.DependentRelation;
import org.apache.dolphinscheduler.common.model.DependentTaskModel;
import org.apache.dolphinscheduler.common.process.ResourceInfo;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
import java.util.ArrayList;
import java.util.List;
public class ConditionsParameters extends AbstractParameters {
//depend node list and state, only need task name
private List<DependentTaskModel> dependTaskList;
private DependentRelation dependRelation;
// node list to run when success
private List<String> successNode;
// node list to run when failed
private List<String> failedNode;
@Override
public boolean checkParameters() {
return true;
}
@Override
public List<ResourceInfo> getResourceFilesList() {
return new ArrayList<>();
}
public List<DependentTaskModel> getDependTaskList() {
return dependTaskList;
}
public void setDependTaskList(List<DependentTaskModel> dependTaskList) {
this.dependTaskList = dependTaskList;
}
public DependentRelation getDependRelation() {
return dependRelation;
}
public void setDependRelation(DependentRelation dependRelation) {
this.dependRelation = dependRelation;
}
public List<String> getSuccessNode() {
return successNode;
}
public void setSuccessNode(List<String> successNode) {
this.successNode = successNode;
}
public List<String> getFailedNode() {
return failedNode;
}
public void setFailedNode(List<String> failedNode) {
this.failedNode = failedNode;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,101 | [BUG] After renaming the branch name, the name of Branch Flow does not change | 更改任务名称后,分支流转的名称还是更改前的任务名称
![image](https://user-images.githubusercontent.com/55787491/76071071-9e2ff200-5fd0-11ea-8702-e7b357d7b060.png)
**Which version of Dolphin Scheduler:**
-[1.2.0_release]
| https://github.com/apache/dolphinscheduler/issues/2101 | https://github.com/apache/dolphinscheduler/pull/4555 | cfd9db5555b3d8f9f33f25a9272c8741115a9336 | 14d49bc584685a33e2952573a0cb6e88377ab9f9 | "2020-03-06T09:35:19Z" | java | "2021-02-02T08:07:37Z" | dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.process;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_EMPTY_SUB_PROCESS;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID;
import static org.apache.dolphinscheduler.common.Constants.YYYY_MM_DD_HH_MM_SS;
import static java.util.stream.Collectors.toSet;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.AuthorizationType;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.CycleEnum;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.TaskDependType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.model.DateInterval;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Command;
import org.apache.dolphinscheduler.dao.entity.CycleDependency;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.ErrorCommand;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.ProcessInstanceMap;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.ProjectUser;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.CommandMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.service.log.LogClientService;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import com.cronutils.model.Cron;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* process relative dao that some mappers in this.
*/
@Component
public class ProcessService {
private final Logger logger = LoggerFactory.getLogger(getClass());
private final int[] stateArray = new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(),
ExecutionStatus.RUNNING_EXECUTION.ordinal(),
ExecutionStatus.DELAY_EXECUTION.ordinal(),
ExecutionStatus.READY_PAUSE.ordinal(),
ExecutionStatus.READY_STOP.ordinal()};
@Autowired
private UserMapper userMapper;
@Autowired
private ProcessDefinitionMapper processDefineMapper;
@Autowired
private ProcessInstanceMapper processInstanceMapper;
@Autowired
private DataSourceMapper dataSourceMapper;
@Autowired
private ProcessInstanceMapMapper processInstanceMapMapper;
@Autowired
private TaskInstanceMapper taskInstanceMapper;
@Autowired
private CommandMapper commandMapper;
@Autowired
private ScheduleMapper scheduleMapper;
@Autowired
private UdfFuncMapper udfFuncMapper;
@Autowired
private ResourceMapper resourceMapper;
@Autowired
private ErrorCommandMapper errorCommandMapper;
@Autowired
private TenantMapper tenantMapper;
@Autowired
private ProjectMapper projectMapper;
/**
* handle Command (construct ProcessInstance from Command) , wrapped in transaction
*
* @param logger logger
* @param host host
* @param validThreadNum validThreadNum
* @param command found command
* @return process instance
*/
@Transactional(rollbackFor = Exception.class)
public ProcessInstance handleCommand(Logger logger, String host, int validThreadNum, Command command) {
ProcessInstance processInstance = constructProcessInstance(command, host);
// cannot construct process instance, return null
if (processInstance == null) {
logger.error("scan command, command parameter is error: {}", command);
moveToErrorCommand(command, "process instance is null");
return null;
}
if (!checkThreadNum(command, validThreadNum)) {
logger.info("there is not enough thread for this command: {}", command);
return setWaitingThreadProcess(command, processInstance);
}
processInstance.setCommandType(command.getCommandType());
processInstance.addHistoryCmd(command.getCommandType());
saveProcessInstance(processInstance);
this.setSubProcessParam(processInstance);
delCommandById(command.getId());
return processInstance;
}
/**
* save error command, and delete original command
*
* @param command command
* @param message message
*/
@Transactional(rollbackFor = Exception.class)
public void moveToErrorCommand(Command command, String message) {
ErrorCommand errorCommand = new ErrorCommand(command, message);
this.errorCommandMapper.insert(errorCommand);
delCommandById(command.getId());
}
/**
* set process waiting thread
*
* @param command command
* @param processInstance processInstance
* @return process instance
*/
private ProcessInstance setWaitingThreadProcess(Command command, ProcessInstance processInstance) {
processInstance.setState(ExecutionStatus.WAITTING_THREAD);
if (command.getCommandType() != CommandType.RECOVER_WAITTING_THREAD) {
processInstance.addHistoryCmd(command.getCommandType());
}
saveProcessInstance(processInstance);
this.setSubProcessParam(processInstance);
createRecoveryWaitingThreadCommand(command, processInstance);
return null;
}
/**
* check thread num
*
* @param command command
* @param validThreadNum validThreadNum
* @return if thread is enough
*/
private boolean checkThreadNum(Command command, int validThreadNum) {
int commandThreadCount = this.workProcessThreadNumCount(command.getProcessDefinitionId());
return validThreadNum >= commandThreadCount;
}
/**
* insert one command
*
* @param command command
* @return create result
*/
public int createCommand(Command command) {
int result = 0;
if (command != null) {
result = commandMapper.insert(command);
}
return result;
}
/**
* find one command from queue list
*
* @return command
*/
public Command findOneCommand() {
return commandMapper.getOneToRun();
}
/**
* check the input command exists in queue list
*
* @param command command
* @return create command result
*/
public Boolean verifyIsNeedCreateCommand(Command command) {
Boolean isNeedCreate = true;
EnumMap<CommandType, Integer> cmdTypeMap = new EnumMap<>(CommandType.class);
cmdTypeMap.put(CommandType.REPEAT_RUNNING, 1);
cmdTypeMap.put(CommandType.RECOVER_SUSPENDED_PROCESS, 1);
cmdTypeMap.put(CommandType.START_FAILURE_TASK_PROCESS, 1);
CommandType commandType = command.getCommandType();
if (cmdTypeMap.containsKey(commandType)) {
ObjectNode cmdParamObj = JSONUtils.parseObject(command.getCommandParam());
int processInstanceId = cmdParamObj.path(CMD_PARAM_RECOVER_PROCESS_ID_STRING).asInt();
List<Command> commands = commandMapper.selectList(null);
// for all commands
for (Command tmpCommand : commands) {
if (cmdTypeMap.containsKey(tmpCommand.getCommandType())) {
ObjectNode tempObj = JSONUtils.parseObject(tmpCommand.getCommandParam());
if (tempObj != null && processInstanceId == tempObj.path(CMD_PARAM_RECOVER_PROCESS_ID_STRING).asInt()) {
isNeedCreate = false;
break;
}
}
}
}
return isNeedCreate;
}
/**
* find process instance detail by id
*
* @param processId processId
* @return process instance
*/
public ProcessInstance findProcessInstanceDetailById(int processId) {
return processInstanceMapper.queryDetailById(processId);
}
/**
* get task node list by definitionId
*/
public List<TaskNode> getTaskNodeListByDefinitionId(Integer defineId) {
ProcessDefinition processDefinition = processDefineMapper.selectById(defineId);
if (processDefinition == null) {
logger.info("process define not exists");
return null;
}
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
//process data check
if (null == processData) {
logger.error("process data is null");
return new ArrayList<>();
}
return processData.getTasks();
}
/**
* find process instance by id
*
* @param processId processId
* @return process instance
*/
public ProcessInstance findProcessInstanceById(int processId) {
return processInstanceMapper.selectById(processId);
}
/**
* find process define by id.
*
* @param processDefinitionId processDefinitionId
* @return process definition
*/
public ProcessDefinition findProcessDefineById(int processDefinitionId) {
return processDefineMapper.selectById(processDefinitionId);
}
/**
* delete work process instance by id
*
* @param processInstanceId processInstanceId
* @return delete process instance result
*/
public int deleteWorkProcessInstanceById(int processInstanceId) {
return processInstanceMapper.deleteById(processInstanceId);
}
/**
* delete all sub process by parent instance id
*
* @param processInstanceId processInstanceId
* @return delete all sub process instance result
*/
public int deleteAllSubWorkProcessByParentId(int processInstanceId) {
List<Integer> subProcessIdList = processInstanceMapMapper.querySubIdListByParentId(processInstanceId);
for (Integer subId : subProcessIdList) {
deleteAllSubWorkProcessByParentId(subId);
deleteWorkProcessMapByParentId(subId);
removeTaskLogFile(subId);
deleteWorkProcessInstanceById(subId);
}
return 1;
}
/**
* remove task log file
*
* @param processInstanceId processInstanceId
*/
public void removeTaskLogFile(Integer processInstanceId) {
LogClientService logClient = null;
try {
logClient = new LogClientService();
List<TaskInstance> taskInstanceList = findValidTaskListByProcessId(processInstanceId);
if (CollectionUtils.isEmpty(taskInstanceList)) {
return;
}
for (TaskInstance taskInstance : taskInstanceList) {
String taskLogPath = taskInstance.getLogPath();
if (StringUtils.isEmpty(taskInstance.getHost())) {
continue;
}
int port = Constants.RPC_PORT;
String ip = "";
try {
ip = Host.of(taskInstance.getHost()).getIp();
} catch (Exception e) {
// compatible old version
ip = taskInstance.getHost();
}
// remove task log from loggerserver
logClient.removeTaskLog(ip, port, taskLogPath);
}
} finally {
if (logClient != null) {
logClient.close();
}
}
}
/**
* calculate sub process number in the process define.
*
* @param processDefinitionId processDefinitionId
* @return process thread num count
*/
private Integer workProcessThreadNumCount(Integer processDefinitionId) {
List<Integer> ids = new ArrayList<>();
recurseFindSubProcessId(processDefinitionId, ids);
return ids.size() + 1;
}
/**
* recursive query sub process definition id by parent id.
*
* @param parentId parentId
* @param ids ids
*/
public void recurseFindSubProcessId(int parentId, List<Integer> ids) {
ProcessDefinition processDefinition = processDefineMapper.selectById(parentId);
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = processData.getTasks();
if (taskNodeList != null && !taskNodeList.isEmpty()) {
for (TaskNode taskNode : taskNodeList) {
String parameter = taskNode.getParams();
ObjectNode parameterJson = JSONUtils.parseObject(parameter);
if (parameterJson.get(CMD_PARAM_SUB_PROCESS_DEFINE_ID) != null) {
SubProcessParameters subProcessParam = JSONUtils.parseObject(parameter, SubProcessParameters.class);
ids.add(subProcessParam.getProcessDefinitionId());
recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(), ids);
}
}
}
}
/**
* create recovery waiting thread command when thread pool is not enough for the process instance.
* sub work process instance need not to create recovery command.
* create recovery waiting thread command and delete origin command at the same time.
* if the recovery command is exists, only update the field update_time
*
* @param originCommand originCommand
* @param processInstance processInstance
*/
public void createRecoveryWaitingThreadCommand(Command originCommand, ProcessInstance processInstance) {
// sub process doesnot need to create wait command
if (processInstance.getIsSubProcess() == Flag.YES) {
if (originCommand != null) {
commandMapper.deleteById(originCommand.getId());
}
return;
}
Map<String, String> cmdParam = new HashMap<>();
cmdParam.put(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD, String.valueOf(processInstance.getId()));
// process instance quit by "waiting thread" state
if (originCommand == null) {
Command command = new Command(
CommandType.RECOVER_WAITTING_THREAD,
processInstance.getTaskDependType(),
processInstance.getFailureStrategy(),
processInstance.getExecutorId(),
processInstance.getProcessDefinitionId(),
JSONUtils.toJsonString(cmdParam),
processInstance.getWarningType(),
processInstance.getWarningGroupId(),
processInstance.getScheduleTime(),
processInstance.getWorkerGroup(),
processInstance.getProcessInstancePriority()
);
saveCommand(command);
return;
}
// update the command time if current command if recover from waiting
if (originCommand.getCommandType() == CommandType.RECOVER_WAITTING_THREAD) {
originCommand.setUpdateTime(new Date());
saveCommand(originCommand);
} else {
// delete old command and create new waiting thread command
commandMapper.deleteById(originCommand.getId());
originCommand.setId(0);
originCommand.setCommandType(CommandType.RECOVER_WAITTING_THREAD);
originCommand.setUpdateTime(new Date());
originCommand.setCommandParam(JSONUtils.toJsonString(cmdParam));
originCommand.setProcessInstancePriority(processInstance.getProcessInstancePriority());
saveCommand(originCommand);
}
}
/**
* get schedule time from command
*
* @param command command
* @param cmdParam cmdParam map
* @return date
*/
private Date getScheduleTime(Command command, Map<String, String> cmdParam) {
Date scheduleTime = command.getScheduleTime();
if (scheduleTime == null && cmdParam != null && cmdParam.containsKey(CMDPARAM_COMPLEMENT_DATA_START_DATE)) {
scheduleTime = DateUtils.stringToDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE));
}
return scheduleTime;
}
/**
* generate a new work process instance from command.
*
* @param processDefinition processDefinition
* @param command command
* @param cmdParam cmdParam map
* @return process instance
*/
private ProcessInstance generateNewProcessInstance(ProcessDefinition processDefinition,
Command command,
Map<String, String> cmdParam) {
ProcessInstance processInstance = new ProcessInstance(processDefinition);
processInstance.setState(ExecutionStatus.RUNNING_EXECUTION);
processInstance.setRecovery(Flag.NO);
processInstance.setStartTime(new Date());
processInstance.setRunTimes(1);
processInstance.setMaxTryTimes(0);
processInstance.setProcessDefinitionId(command.getProcessDefinitionId());
processInstance.setCommandParam(command.getCommandParam());
processInstance.setCommandType(command.getCommandType());
processInstance.setIsSubProcess(Flag.NO);
processInstance.setTaskDependType(command.getTaskDependType());
processInstance.setFailureStrategy(command.getFailureStrategy());
processInstance.setExecutorId(command.getExecutorId());
WarningType warningType = command.getWarningType() == null ? WarningType.NONE : command.getWarningType();
processInstance.setWarningType(warningType);
Integer warningGroupId = command.getWarningGroupId() == null ? 0 : command.getWarningGroupId();
processInstance.setWarningGroupId(warningGroupId);
// schedule time
Date scheduleTime = getScheduleTime(command, cmdParam);
if (scheduleTime != null) {
processInstance.setScheduleTime(scheduleTime);
}
processInstance.setCommandStartTime(command.getStartTime());
processInstance.setLocations(processDefinition.getLocations());
processInstance.setConnects(processDefinition.getConnects());
// get start params from command param
Map<String, String> startParamMap = null;
if (cmdParam != null && cmdParam.containsKey(Constants.CMD_PARAM_START_PARAMS)) {
String startParamJson = cmdParam.get(Constants.CMD_PARAM_START_PARAMS);
startParamMap = JSONUtils.toMap(startParamJson);
}
// set start param into global params
if (startParamMap != null && startParamMap.size() > 0
&& processDefinition.getGlobalParamMap() != null) {
for (Map.Entry<String, String> param : processDefinition.getGlobalParamMap().entrySet()) {
String val = startParamMap.get(param.getKey());
if (val != null) {
param.setValue(val);
}
}
}
// curing global params
processInstance.setGlobalParams(ParameterUtils.curingGlobalParams(
processDefinition.getGlobalParamMap(),
processDefinition.getGlobalParamList(),
getCommandTypeIfComplement(processInstance, command),
processInstance.getScheduleTime()));
//copy process define json to process instance
processInstance.setProcessInstanceJson(processDefinition.getProcessDefinitionJson());
// set process instance priority
processInstance.setProcessInstancePriority(command.getProcessInstancePriority());
String workerGroup = StringUtils.isBlank(command.getWorkerGroup()) ? Constants.DEFAULT_WORKER_GROUP : command.getWorkerGroup();
processInstance.setWorkerGroup(workerGroup);
processInstance.setTimeout(processDefinition.getTimeout());
processInstance.setTenantId(processDefinition.getTenantId());
return processInstance;
}
/**
* get process tenant
* there is tenant id in definition, use the tenant of the definition.
* if there is not tenant id in the definiton or the tenant not exist
* use definition creator's tenant.
*
* @param tenantId tenantId
* @param userId userId
* @return tenant
*/
public Tenant getTenantForProcess(int tenantId, int userId) {
Tenant tenant = null;
if (tenantId >= 0) {
tenant = tenantMapper.queryById(tenantId);
}
if (userId == 0) {
return null;
}
if (tenant == null) {
User user = userMapper.selectById(userId);
tenant = tenantMapper.queryById(user.getTenantId());
}
return tenant;
}
/**
* check command parameters is valid
*
* @param command command
* @param cmdParam cmdParam map
* @return whether command param is valid
*/
private Boolean checkCmdParam(Command command, Map<String, String> cmdParam) {
if (command.getTaskDependType() == TaskDependType.TASK_ONLY || command.getTaskDependType() == TaskDependType.TASK_PRE) {
if (cmdParam == null
|| !cmdParam.containsKey(Constants.CMD_PARAM_START_NODE_NAMES)
|| cmdParam.get(Constants.CMD_PARAM_START_NODE_NAMES).isEmpty()) {
logger.error("command node depend type is {}, but start nodes is null ", command.getTaskDependType());
return false;
}
}
return true;
}
/**
* construct process instance according to one command.
*
* @param command command
* @param host host
* @return process instance
*/
private ProcessInstance constructProcessInstance(Command command, String host) {
ProcessInstance processInstance = null;
CommandType commandType = command.getCommandType();
Map<String, String> cmdParam = JSONUtils.toMap(command.getCommandParam());
ProcessDefinition processDefinition = null;
if (command.getProcessDefinitionId() != 0) {
processDefinition = processDefineMapper.selectById(command.getProcessDefinitionId());
if (processDefinition == null) {
logger.error("cannot find the work process define! define id : {}", command.getProcessDefinitionId());
return null;
}
}
if (cmdParam != null) {
Integer processInstanceId = 0;
// recover from failure or pause tasks
if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING)) {
String processId = cmdParam.get(Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING);
processInstanceId = Integer.parseInt(processId);
if (processInstanceId == 0) {
logger.error("command parameter is error, [ ProcessInstanceId ] is 0");
return null;
}
} else if (cmdParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS)) {
// sub process map
String pId = cmdParam.get(Constants.CMD_PARAM_SUB_PROCESS);
processInstanceId = Integer.parseInt(pId);
} else if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD)) {
// waiting thread command
String pId = cmdParam.get(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD);
processInstanceId = Integer.parseInt(pId);
}
if (processInstanceId == 0) {
processInstance = generateNewProcessInstance(processDefinition, command, cmdParam);
} else {
processInstance = this.findProcessInstanceDetailById(processInstanceId);
// Recalculate global parameters after rerun.
processInstance.setGlobalParams(ParameterUtils.curingGlobalParams(
processDefinition.getGlobalParamMap(),
processDefinition.getGlobalParamList(),
getCommandTypeIfComplement(processInstance, command),
processInstance.getScheduleTime()));
}
processDefinition = processDefineMapper.selectById(processInstance.getProcessDefinitionId());
processInstance.setProcessDefinition(processDefinition);
//reset command parameter
if (processInstance.getCommandParam() != null) {
Map<String, String> processCmdParam = JSONUtils.toMap(processInstance.getCommandParam());
for (Map.Entry<String, String> entry : processCmdParam.entrySet()) {
if (!cmdParam.containsKey(entry.getKey())) {
cmdParam.put(entry.getKey(), entry.getValue());
}
}
}
// reset command parameter if sub process
if (cmdParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS)) {
processInstance.setCommandParam(command.getCommandParam());
}
} else {
// generate one new process instance
processInstance = generateNewProcessInstance(processDefinition, command, cmdParam);
}
if (Boolean.FALSE.equals(checkCmdParam(command, cmdParam))) {
logger.error("command parameter check failed!");
return null;
}
if (command.getScheduleTime() != null) {
processInstance.setScheduleTime(command.getScheduleTime());
}
processInstance.setHost(host);
ExecutionStatus runStatus = ExecutionStatus.RUNNING_EXECUTION;
int runTime = processInstance.getRunTimes();
switch (commandType) {
case START_PROCESS:
break;
case START_FAILURE_TASK_PROCESS:
// find failed tasks and init these tasks
List<Integer> failedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.FAILURE);
List<Integer> toleranceList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.NEED_FAULT_TOLERANCE);
List<Integer> killedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.KILL);
cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING);
failedList.addAll(killedList);
failedList.addAll(toleranceList);
for (Integer taskId : failedList) {
initTaskInstance(this.findTaskInstanceById(taskId));
}
cmdParam.put(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING,
String.join(Constants.COMMA, convertIntListToString(failedList)));
processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam));
processInstance.setRunTimes(runTime + 1);
break;
case START_CURRENT_TASK_PROCESS:
break;
case RECOVER_WAITTING_THREAD:
break;
case RECOVER_SUSPENDED_PROCESS:
// find pause tasks and init task's state
cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING);
List<Integer> suspendedNodeList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.PAUSE);
List<Integer> stopNodeList = findTaskIdByInstanceState(processInstance.getId(),
ExecutionStatus.KILL);
suspendedNodeList.addAll(stopNodeList);
for (Integer taskId : suspendedNodeList) {
// initialize the pause state
initTaskInstance(this.findTaskInstanceById(taskId));
}
cmdParam.put(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING, String.join(",", convertIntListToString(suspendedNodeList)));
processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam));
processInstance.setRunTimes(runTime + 1);
break;
case RECOVER_TOLERANCE_FAULT_PROCESS:
// recover tolerance fault process
processInstance.setRecovery(Flag.YES);
runStatus = processInstance.getState();
break;
case COMPLEMENT_DATA:
// delete all the valid tasks when complement data
List<TaskInstance> taskInstanceList = this.findValidTaskListByProcessId(processInstance.getId());
for (TaskInstance taskInstance : taskInstanceList) {
taskInstance.setFlag(Flag.NO);
this.updateTaskInstance(taskInstance);
}
initComplementDataParam(processDefinition, processInstance, cmdParam);
break;
case REPEAT_RUNNING:
// delete the recover task names from command parameter
if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING)) {
cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING);
processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam));
}
// delete all the valid tasks when repeat running
List<TaskInstance> validTaskList = findValidTaskListByProcessId(processInstance.getId());
for (TaskInstance taskInstance : validTaskList) {
taskInstance.setFlag(Flag.NO);
updateTaskInstance(taskInstance);
}
processInstance.setStartTime(new Date());
processInstance.setEndTime(null);
processInstance.setRunTimes(runTime + 1);
initComplementDataParam(processDefinition, processInstance, cmdParam);
break;
case SCHEDULER:
break;
default:
break;
}
processInstance.setState(runStatus);
return processInstance;
}
/**
* return complement data if the process start with complement data
*
* @param processInstance processInstance
* @param command command
* @return command type
*/
private CommandType getCommandTypeIfComplement(ProcessInstance processInstance, Command command) {
if (CommandType.COMPLEMENT_DATA == processInstance.getCmdTypeIfComplement()) {
return CommandType.COMPLEMENT_DATA;
} else {
return command.getCommandType();
}
}
/**
* initialize complement data parameters
*
* @param processDefinition processDefinition
* @param processInstance processInstance
* @param cmdParam cmdParam
*/
private void initComplementDataParam(ProcessDefinition processDefinition,
ProcessInstance processInstance,
Map<String, String> cmdParam) {
if (!processInstance.isComplementData()) {
return;
}
Date startComplementTime = DateUtils.parse(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE),
YYYY_MM_DD_HH_MM_SS);
if (Flag.NO == processInstance.getIsSubProcess()) {
processInstance.setScheduleTime(startComplementTime);
}
processInstance.setGlobalParams(ParameterUtils.curingGlobalParams(
processDefinition.getGlobalParamMap(),
processDefinition.getGlobalParamList(),
CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime()));
}
/**
* set sub work process parameters.
* handle sub work process instance, update relation table and command parameters
* set sub work process flag, extends parent work process command parameters
*
* @param subProcessInstance subProcessInstance
* @return process instance
*/
public ProcessInstance setSubProcessParam(ProcessInstance subProcessInstance) {
String cmdParam = subProcessInstance.getCommandParam();
if (StringUtils.isEmpty(cmdParam)) {
return subProcessInstance;
}
Map<String, String> paramMap = JSONUtils.toMap(cmdParam);
// write sub process id into cmd param.
if (paramMap.containsKey(CMD_PARAM_SUB_PROCESS)
&& CMD_PARAM_EMPTY_SUB_PROCESS.equals(paramMap.get(CMD_PARAM_SUB_PROCESS))) {
paramMap.remove(CMD_PARAM_SUB_PROCESS);
paramMap.put(CMD_PARAM_SUB_PROCESS, String.valueOf(subProcessInstance.getId()));
subProcessInstance.setCommandParam(JSONUtils.toJsonString(paramMap));
subProcessInstance.setIsSubProcess(Flag.YES);
this.saveProcessInstance(subProcessInstance);
}
// copy parent instance user def params to sub process..
String parentInstanceId = paramMap.get(CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID);
if (StringUtils.isNotEmpty(parentInstanceId)) {
ProcessInstance parentInstance = findProcessInstanceDetailById(Integer.parseInt(parentInstanceId));
if (parentInstance != null) {
subProcessInstance.setGlobalParams(
joinGlobalParams(parentInstance.getGlobalParams(), subProcessInstance.getGlobalParams()));
this.saveProcessInstance(subProcessInstance);
} else {
logger.error("sub process command params error, cannot find parent instance: {} ", cmdParam);
}
}
ProcessInstanceMap processInstanceMap = JSONUtils.parseObject(cmdParam, ProcessInstanceMap.class);
if (processInstanceMap == null || processInstanceMap.getParentProcessInstanceId() == 0) {
return subProcessInstance;
}
// update sub process id to process map table
processInstanceMap.setProcessInstanceId(subProcessInstance.getId());
this.updateWorkProcessInstanceMap(processInstanceMap);
return subProcessInstance;
}
/**
* join parent global params into sub process.
* only the keys doesn't in sub process global would be joined.
*
* @param parentGlobalParams parentGlobalParams
* @param subGlobalParams subGlobalParams
* @return global params join
*/
private String joinGlobalParams(String parentGlobalParams, String subGlobalParams) {
List<Property> parentPropertyList = JSONUtils.toList(parentGlobalParams, Property.class);
List<Property> subPropertyList = JSONUtils.toList(subGlobalParams, Property.class);
Map<String, String> subMap = subPropertyList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue));
for (Property parent : parentPropertyList) {
if (!subMap.containsKey(parent.getProp())) {
subPropertyList.add(parent);
}
}
return JSONUtils.toJsonString(subPropertyList);
}
/**
* initialize task instance
*
* @param taskInstance taskInstance
*/
private void initTaskInstance(TaskInstance taskInstance) {
if (!taskInstance.isSubProcess()
&& (taskInstance.getState().typeIsCancel() || taskInstance.getState().typeIsFailure())) {
taskInstance.setFlag(Flag.NO);
updateTaskInstance(taskInstance);
return;
}
taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS);
updateTaskInstance(taskInstance);
}
/**
* submit task to db
* submit sub process to command
*
* @param taskInstance taskInstance
* @return task instance
*/
@Transactional(rollbackFor = Exception.class)
public TaskInstance submitTask(TaskInstance taskInstance) {
ProcessInstance processInstance = this.findProcessInstanceDetailById(taskInstance.getProcessInstanceId());
logger.info("start submit task : {}, instance id:{}, state: {}",
taskInstance.getName(), taskInstance.getProcessInstanceId(), processInstance.getState());
//submit to db
TaskInstance task = submitTaskInstanceToDB(taskInstance, processInstance);
if (task == null) {
logger.error("end submit task to db error, task name:{}, process id:{} state: {} ",
taskInstance.getName(), taskInstance.getProcessInstance(), processInstance.getState());
return task;
}
if (!task.getState().typeIsFinished()) {
createSubWorkProcess(processInstance, task);
}
logger.info("end submit task to db successfully:{} state:{} complete, instance id:{} state: {} ",
taskInstance.getName(), task.getState(), processInstance.getId(), processInstance.getState());
return task;
}
/**
* set work process instance map
* consider o
* repeat running does not generate new sub process instance
* set map {parent instance id, task instance id, 0(child instance id)}
*
* @param parentInstance parentInstance
* @param parentTask parentTask
* @return process instance map
*/
private ProcessInstanceMap setProcessInstanceMap(ProcessInstance parentInstance, TaskInstance parentTask) {
ProcessInstanceMap processMap = findWorkProcessMapByParent(parentInstance.getId(), parentTask.getId());
if (processMap != null) {
return processMap;
}
if (parentInstance.getCommandType() == CommandType.REPEAT_RUNNING) {
// update current task id to map
processMap = findPreviousTaskProcessMap(parentInstance, parentTask);
if (processMap != null) {
processMap.setParentTaskInstanceId(parentTask.getId());
updateWorkProcessInstanceMap(processMap);
return processMap;
}
}
// new task
processMap = new ProcessInstanceMap();
processMap.setParentProcessInstanceId(parentInstance.getId());
processMap.setParentTaskInstanceId(parentTask.getId());
createWorkProcessInstanceMap(processMap);
return processMap;
}
/**
* find previous task work process map.
*
* @param parentProcessInstance parentProcessInstance
* @param parentTask parentTask
* @return process instance map
*/
private ProcessInstanceMap findPreviousTaskProcessMap(ProcessInstance parentProcessInstance,
TaskInstance parentTask) {
Integer preTaskId = 0;
List<TaskInstance> preTaskList = this.findPreviousTaskListByWorkProcessId(parentProcessInstance.getId());
for (TaskInstance task : preTaskList) {
if (task.getName().equals(parentTask.getName())) {
preTaskId = task.getId();
ProcessInstanceMap map = findWorkProcessMapByParent(parentProcessInstance.getId(), preTaskId);
if (map != null) {
return map;
}
}
}
logger.info("sub process instance is not found,parent task:{},parent instance:{}",
parentTask.getId(), parentProcessInstance.getId());
return null;
}
/**
* create sub work process command
*
* @param parentProcessInstance parentProcessInstance
* @param task task
*/
public void createSubWorkProcess(ProcessInstance parentProcessInstance, TaskInstance task) {
if (!task.isSubProcess()) {
return;
}
//check create sub work flow firstly
ProcessInstanceMap instanceMap = findWorkProcessMapByParent(parentProcessInstance.getId(), task.getId());
if (null != instanceMap && CommandType.RECOVER_TOLERANCE_FAULT_PROCESS == parentProcessInstance.getCommandType()) {
// recover failover tolerance would not create a new command when the sub command already have been created
return;
}
instanceMap = setProcessInstanceMap(parentProcessInstance, task);
ProcessInstance childInstance = null;
if (instanceMap.getProcessInstanceId() != 0) {
childInstance = findProcessInstanceById(instanceMap.getProcessInstanceId());
}
Command subProcessCommand = createSubProcessCommand(parentProcessInstance, childInstance, instanceMap, task);
updateSubProcessDefinitionByParent(parentProcessInstance, subProcessCommand.getProcessDefinitionId());
initSubInstanceState(childInstance);
createCommand(subProcessCommand);
logger.info("sub process command created: {} ", subProcessCommand);
}
/**
* complement data needs transform parent parameter to child.
*/
private String getSubWorkFlowParam(ProcessInstanceMap instanceMap, ProcessInstance parentProcessInstance) {
// set sub work process command
String processMapStr = JSONUtils.toJsonString(instanceMap);
Map<String, String> cmdParam = JSONUtils.toMap(processMapStr);
if (parentProcessInstance.isComplementData()) {
Map<String, String> parentParam = JSONUtils.toMap(parentProcessInstance.getCommandParam());
String endTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE);
String startTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE);
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, endTime);
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, startTime);
processMapStr = JSONUtils.toJsonString(cmdParam);
}
return processMapStr;
}
/**
* create sub work process command
*/
public Command createSubProcessCommand(ProcessInstance parentProcessInstance,
ProcessInstance childInstance,
ProcessInstanceMap instanceMap,
TaskInstance task) {
CommandType commandType = getSubCommandType(parentProcessInstance, childInstance);
TaskNode taskNode = JSONUtils.parseObject(task.getTaskJson(), TaskNode.class);
Map<String, String> subProcessParam = JSONUtils.toMap(taskNode.getParams());
Integer childDefineId = Integer.parseInt(subProcessParam.get(Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID));
String processParam = getSubWorkFlowParam(instanceMap, parentProcessInstance);
return new Command(
commandType,
TaskDependType.TASK_POST,
parentProcessInstance.getFailureStrategy(),
parentProcessInstance.getExecutorId(),
childDefineId,
processParam,
parentProcessInstance.getWarningType(),
parentProcessInstance.getWarningGroupId(),
parentProcessInstance.getScheduleTime(),
task.getWorkerGroup(),
parentProcessInstance.getProcessInstancePriority()
);
}
/**
* initialize sub work flow state
* child instance state would be initialized when 'recovery from pause/stop/failure'
*/
private void initSubInstanceState(ProcessInstance childInstance) {
if (childInstance != null) {
childInstance.setState(ExecutionStatus.RUNNING_EXECUTION);
updateProcessInstance(childInstance);
}
}
/**
* get sub work flow command type
* child instance exist: child command = fatherCommand
* child instance not exists: child command = fatherCommand[0]
*/
private CommandType getSubCommandType(ProcessInstance parentProcessInstance, ProcessInstance childInstance) {
CommandType commandType = parentProcessInstance.getCommandType();
if (childInstance == null) {
String fatherHistoryCommand = parentProcessInstance.getHistoryCmd();
commandType = CommandType.valueOf(fatherHistoryCommand.split(Constants.COMMA)[0]);
}
return commandType;
}
/**
* update sub process definition
*
* @param parentProcessInstance parentProcessInstance
* @param childDefinitionId childDefinitionId
*/
private void updateSubProcessDefinitionByParent(ProcessInstance parentProcessInstance, int childDefinitionId) {
ProcessDefinition fatherDefinition = this.findProcessDefineById(parentProcessInstance.getProcessDefinitionId());
ProcessDefinition childDefinition = this.findProcessDefineById(childDefinitionId);
if (childDefinition != null && fatherDefinition != null) {
childDefinition.setWarningGroupId(fatherDefinition.getWarningGroupId());
processDefineMapper.updateById(childDefinition);
}
}
/**
* submit task to mysql
*
* @param taskInstance taskInstance
* @param processInstance processInstance
* @return task instance
*/
public TaskInstance submitTaskInstanceToDB(TaskInstance taskInstance, ProcessInstance processInstance) {
ExecutionStatus processInstanceState = processInstance.getState();
if (taskInstance.getState().typeIsFailure()) {
if (taskInstance.isSubProcess()) {
taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1);
} else {
if (processInstanceState != ExecutionStatus.READY_STOP
&& processInstanceState != ExecutionStatus.READY_PAUSE) {
// failure task set invalid
taskInstance.setFlag(Flag.NO);
updateTaskInstance(taskInstance);
// crate new task instance
if (taskInstance.getState() != ExecutionStatus.NEED_FAULT_TOLERANCE) {
taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1);
}
taskInstance.setSubmitTime(null);
taskInstance.setStartTime(null);
taskInstance.setEndTime(null);
taskInstance.setFlag(Flag.YES);
taskInstance.setHost(null);
taskInstance.setId(0);
}
}
}
taskInstance.setExecutorId(processInstance.getExecutorId());
taskInstance.setProcessInstancePriority(processInstance.getProcessInstancePriority());
taskInstance.setState(getSubmitTaskState(taskInstance, processInstanceState));
if (taskInstance.getSubmitTime() == null) {
taskInstance.setSubmitTime(new Date());
}
if (taskInstance.getFirstSubmitTime() == null) {
taskInstance.setFirstSubmitTime(taskInstance.getSubmitTime());
}
boolean saveResult = saveTaskInstance(taskInstance);
if (!saveResult) {
return null;
}
return taskInstance;
}
/**
* get submit task instance state by the work process state
* cannot modify the task state when running/kill/submit success, or this
* task instance is already exists in task queue .
* return pause if work process state is ready pause
* return stop if work process state is ready stop
* if all of above are not satisfied, return submit success
*
* @param taskInstance taskInstance
* @param processInstanceState processInstanceState
* @return process instance state
*/
public ExecutionStatus getSubmitTaskState(TaskInstance taskInstance, ExecutionStatus processInstanceState) {
ExecutionStatus state = taskInstance.getState();
if (
// running, delayed or killed
// the task already exists in task queue
// return state
state == ExecutionStatus.RUNNING_EXECUTION
|| state == ExecutionStatus.DELAY_EXECUTION
|| state == ExecutionStatus.KILL
) {
return state;
}
//return pasue /stop if process instance state is ready pause / stop
// or return submit success
if (processInstanceState == ExecutionStatus.READY_PAUSE) {
state = ExecutionStatus.PAUSE;
} else if (processInstanceState == ExecutionStatus.READY_STOP
|| !checkProcessStrategy(taskInstance)) {
state = ExecutionStatus.KILL;
} else {
state = ExecutionStatus.SUBMITTED_SUCCESS;
}
return state;
}
/**
* check process instance strategy
*
* @param taskInstance taskInstance
* @return check strategy result
*/
private boolean checkProcessStrategy(TaskInstance taskInstance) {
ProcessInstance processInstance = this.findProcessInstanceById(taskInstance.getProcessInstanceId());
FailureStrategy failureStrategy = processInstance.getFailureStrategy();
if (failureStrategy == FailureStrategy.CONTINUE) {
return true;
}
List<TaskInstance> taskInstances = this.findValidTaskListByProcessId(taskInstance.getProcessInstanceId());
for (TaskInstance task : taskInstances) {
if (task.getState() == ExecutionStatus.FAILURE) {
return false;
}
}
return true;
}
/**
* create a new process instance
*
* @param processInstance processInstance
*/
public void createProcessInstance(ProcessInstance processInstance) {
if (processInstance != null) {
processInstanceMapper.insert(processInstance);
}
}
/**
* insert or update work process instance to data base
*
* @param processInstance processInstance
*/
public void saveProcessInstance(ProcessInstance processInstance) {
if (processInstance == null) {
logger.error("save error, process instance is null!");
return;
}
if (processInstance.getId() != 0) {
processInstanceMapper.updateById(processInstance);
} else {
createProcessInstance(processInstance);
}
}
/**
* insert or update command
*
* @param command command
* @return save command result
*/
public int saveCommand(Command command) {
if (command.getId() != 0) {
return commandMapper.updateById(command);
} else {
return commandMapper.insert(command);
}
}
/**
* insert or update task instance
*
* @param taskInstance taskInstance
* @return save task instance result
*/
public boolean saveTaskInstance(TaskInstance taskInstance) {
if (taskInstance.getId() != 0) {
return updateTaskInstance(taskInstance);
} else {
return createTaskInstance(taskInstance);
}
}
/**
* insert task instance
*
* @param taskInstance taskInstance
* @return create task instance result
*/
public boolean createTaskInstance(TaskInstance taskInstance) {
int count = taskInstanceMapper.insert(taskInstance);
return count > 0;
}
/**
* update task instance
*
* @param taskInstance taskInstance
* @return update task instance result
*/
public boolean updateTaskInstance(TaskInstance taskInstance) {
int count = taskInstanceMapper.updateById(taskInstance);
return count > 0;
}
/**
* delete a command by id
*
* @param id id
*/
public void delCommandById(int id) {
commandMapper.deleteById(id);
}
/**
* find task instance by id
*
* @param taskId task id
* @return task intance
*/
public TaskInstance findTaskInstanceById(Integer taskId) {
return taskInstanceMapper.selectById(taskId);
}
/**
* package task instance,associate processInstance and processDefine
*
* @param taskInstId taskInstId
* @return task instance
*/
public TaskInstance getTaskInstanceDetailByTaskId(int taskInstId) {
// get task instance
TaskInstance taskInstance = findTaskInstanceById(taskInstId);
if (taskInstance == null) {
return taskInstance;
}
// get process instance
ProcessInstance processInstance = findProcessInstanceDetailById(taskInstance.getProcessInstanceId());
// get process define
ProcessDefinition processDefine = findProcessDefineById(taskInstance.getProcessDefinitionId());
taskInstance.setProcessInstance(processInstance);
taskInstance.setProcessDefine(processDefine);
return taskInstance;
}
/**
* get id list by task state
*
* @param instanceId instanceId
* @param state state
* @return task instance states
*/
public List<Integer> findTaskIdByInstanceState(int instanceId, ExecutionStatus state) {
return taskInstanceMapper.queryTaskByProcessIdAndState(instanceId, state.ordinal());
}
/**
* find valid task list by process definition id
*
* @param processInstanceId processInstanceId
* @return task instance list
*/
public List<TaskInstance> findValidTaskListByProcessId(Integer processInstanceId) {
return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.YES);
}
/**
* find previous task list by work process id
*
* @param processInstanceId processInstanceId
* @return task instance list
*/
public List<TaskInstance> findPreviousTaskListByWorkProcessId(Integer processInstanceId) {
return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.NO);
}
/**
* update work process instance map
*
* @param processInstanceMap processInstanceMap
* @return update process instance result
*/
public int updateWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap) {
return processInstanceMapMapper.updateById(processInstanceMap);
}
/**
* create work process instance map
*
* @param processInstanceMap processInstanceMap
* @return create process instance result
*/
public int createWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap) {
int count = 0;
if (processInstanceMap != null) {
return processInstanceMapMapper.insert(processInstanceMap);
}
return count;
}
/**
* find work process map by parent process id and parent task id.
*
* @param parentWorkProcessId parentWorkProcessId
* @param parentTaskId parentTaskId
* @return process instance map
*/
public ProcessInstanceMap findWorkProcessMapByParent(Integer parentWorkProcessId, Integer parentTaskId) {
return processInstanceMapMapper.queryByParentId(parentWorkProcessId, parentTaskId);
}
/**
* delete work process map by parent process id
*
* @param parentWorkProcessId parentWorkProcessId
* @return delete process map result
*/
public int deleteWorkProcessMapByParentId(int parentWorkProcessId) {
return processInstanceMapMapper.deleteByParentProcessId(parentWorkProcessId);
}
/**
* find sub process instance
*
* @param parentProcessId parentProcessId
* @param parentTaskId parentTaskId
* @return process instance
*/
public ProcessInstance findSubProcessInstance(Integer parentProcessId, Integer parentTaskId) {
ProcessInstance processInstance = null;
ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryByParentId(parentProcessId, parentTaskId);
if (processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0) {
return processInstance;
}
processInstance = findProcessInstanceById(processInstanceMap.getProcessInstanceId());
return processInstance;
}
/**
* find parent process instance
*
* @param subProcessId subProcessId
* @return process instance
*/
public ProcessInstance findParentProcessInstance(Integer subProcessId) {
ProcessInstance processInstance = null;
ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryBySubProcessId(subProcessId);
if (processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0) {
return processInstance;
}
processInstance = findProcessInstanceById(processInstanceMap.getParentProcessInstanceId());
return processInstance;
}
/**
* change task state
*
* @param state state
* @param startTime startTime
* @param host host
* @param executePath executePath
* @param logPath logPath
* @param taskInstId taskInstId
*/
public void changeTaskState(TaskInstance taskInstance, ExecutionStatus state, Date startTime, String host,
String executePath,
String logPath,
int taskInstId) {
taskInstance.setState(state);
taskInstance.setStartTime(startTime);
taskInstance.setHost(host);
taskInstance.setExecutePath(executePath);
taskInstance.setLogPath(logPath);
saveTaskInstance(taskInstance);
}
/**
* update process instance
*
* @param processInstance processInstance
* @return update process instance result
*/
public int updateProcessInstance(ProcessInstance processInstance) {
return processInstanceMapper.updateById(processInstance);
}
/**
* update the process instance
*
* @param processInstanceId processInstanceId
* @param processJson processJson
* @param globalParams globalParams
* @param scheduleTime scheduleTime
* @param flag flag
* @param locations locations
* @param connects connects
* @return update process instance result
*/
public int updateProcessInstance(Integer processInstanceId, String processJson,
String globalParams, Date scheduleTime, Flag flag,
String locations, String connects) {
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if (processInstance != null) {
processInstance.setProcessInstanceJson(processJson);
processInstance.setGlobalParams(globalParams);
processInstance.setScheduleTime(scheduleTime);
processInstance.setLocations(locations);
processInstance.setConnects(connects);
return processInstanceMapper.updateById(processInstance);
}
return 0;
}
/**
* change task state
*
* @param state state
* @param endTime endTime
* @param taskInstId taskInstId
* @param varPool varPool
*/
public void changeTaskState(TaskInstance taskInstance, ExecutionStatus state,
Date endTime,
int processId,
String appIds,
int taskInstId,
String varPool) {
taskInstance.setPid(processId);
taskInstance.setAppLink(appIds);
taskInstance.setState(state);
taskInstance.setEndTime(endTime);
taskInstance.setVarPool(varPool);
saveTaskInstance(taskInstance);
}
/**
* convert integer list to string list
*
* @param intList intList
* @return string list
*/
public List<String> convertIntListToString(List<Integer> intList) {
if (intList == null) {
return new ArrayList<>();
}
List<String> result = new ArrayList<>(intList.size());
for (Integer intVar : intList) {
result.add(String.valueOf(intVar));
}
return result;
}
/**
* query schedule by id
*
* @param id id
* @return schedule
*/
public Schedule querySchedule(int id) {
return scheduleMapper.selectById(id);
}
/**
* query Schedule by processDefinitionId
*
* @param processDefinitionId processDefinitionId
* @see Schedule
*/
public List<Schedule> queryReleaseSchedulerListByProcessDefinitionId(int processDefinitionId) {
return scheduleMapper.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId);
}
/**
* query need failover process instance
*
* @param host host
* @return process instance list
*/
public List<ProcessInstance> queryNeedFailoverProcessInstances(String host) {
return processInstanceMapper.queryByHostAndStatus(host, stateArray);
}
/**
* process need failover process instance
*
* @param processInstance processInstance
*/
@Transactional(rollbackFor = RuntimeException.class)
public void processNeedFailoverProcessInstances(ProcessInstance processInstance) {
//1 update processInstance host is null
processInstance.setHost(Constants.NULL);
processInstanceMapper.updateById(processInstance);
//2 insert into recover command
Command cmd = new Command();
cmd.setProcessDefinitionId(processInstance.getProcessDefinitionId());
cmd.setCommandParam(String.format("{\"%s\":%d}", Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING, processInstance.getId()));
cmd.setExecutorId(processInstance.getExecutorId());
cmd.setCommandType(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS);
createCommand(cmd);
}
/**
* query all need failover task instances by host
*
* @param host host
* @return task instance list
*/
public List<TaskInstance> queryNeedFailoverTaskInstances(String host) {
return taskInstanceMapper.queryByHostAndStatus(host,
stateArray);
}
/**
* find data source by id
*
* @param id id
* @return datasource
*/
public DataSource findDataSourceById(int id) {
return dataSourceMapper.selectById(id);
}
/**
* update process instance state by id
*
* @param processInstanceId processInstanceId
* @param executionStatus executionStatus
* @return update process result
*/
public int updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) {
ProcessInstance instance = processInstanceMapper.selectById(processInstanceId);
instance.setState(executionStatus);
return processInstanceMapper.updateById(instance);
}
/**
* find process instance by the task id
*
* @param taskId taskId
* @return process instance
*/
public ProcessInstance findProcessInstanceByTaskId(int taskId) {
TaskInstance taskInstance = taskInstanceMapper.selectById(taskId);
if (taskInstance != null) {
return processInstanceMapper.selectById(taskInstance.getProcessInstanceId());
}
return null;
}
/**
* find udf function list by id list string
*
* @param ids ids
* @return udf function list
*/
public List<UdfFunc> queryUdfFunListByIds(int[] ids) {
return udfFuncMapper.queryUdfByIdStr(ids, null);
}
/**
* find tenant code by resource name
*
* @param resName resource name
* @param resourceType resource type
* @return tenant code
*/
public String queryTenantCodeByResName(String resName, ResourceType resourceType) {
// in order to query tenant code successful although the version is older
String fullName = resName.startsWith("/") ? resName : String.format("/%s", resName);
return resourceMapper.queryTenantCodeByResourceName(fullName, resourceType.ordinal());
}
/**
* find schedule list by process define id.
*
* @param ids ids
* @return schedule list
*/
public List<Schedule> selectAllByProcessDefineId(int[] ids) {
return scheduleMapper.selectAllByProcessDefineArray(
ids);
}
/**
* get dependency cycle by work process define id and scheduler fire time
*
* @param masterId masterId
* @param processDefinitionId processDefinitionId
* @param scheduledFireTime the time the task schedule is expected to trigger
* @return CycleDependency
* @throws Exception if error throws Exception
*/
public CycleDependency getCycleDependency(int masterId, int processDefinitionId, Date scheduledFireTime) throws Exception {
List<CycleDependency> list = getCycleDependencies(masterId, new int[]{processDefinitionId}, scheduledFireTime);
return !list.isEmpty() ? list.get(0) : null;
}
/**
* get dependency cycle list by work process define id list and scheduler fire time
*
* @param masterId masterId
* @param ids ids
* @param scheduledFireTime the time the task schedule is expected to trigger
* @return CycleDependency list
* @throws Exception if error throws Exception
*/
public List<CycleDependency> getCycleDependencies(int masterId, int[] ids, Date scheduledFireTime) throws Exception {
List<CycleDependency> cycleDependencyList = new ArrayList<>();
if (null == ids || ids.length == 0) {
logger.warn("ids[] is empty!is invalid!");
return cycleDependencyList;
}
if (scheduledFireTime == null) {
logger.warn("scheduledFireTime is null!is invalid!");
return cycleDependencyList;
}
String strCrontab = "";
CronExpression depCronExpression;
Cron depCron;
List<Date> list;
List<Schedule> schedules = this.selectAllByProcessDefineId(ids);
// for all scheduling information
for (Schedule depSchedule : schedules) {
strCrontab = depSchedule.getCrontab();
depCronExpression = CronUtils.parse2CronExpression(strCrontab);
depCron = CronUtils.parse2Cron(strCrontab);
CycleEnum cycleEnum = CronUtils.getMiniCycle(depCron);
if (cycleEnum == null) {
logger.error("{} is not valid", strCrontab);
continue;
}
Calendar calendar = Calendar.getInstance();
switch (cycleEnum) {
case HOUR:
calendar.add(Calendar.HOUR, -25);
break;
case DAY:
case WEEK:
calendar.add(Calendar.DATE, -32);
break;
case MONTH:
calendar.add(Calendar.MONTH, -13);
break;
default:
String cycleName = cycleEnum.name();
logger.warn("Dependent process definition's cycleEnum is {},not support!!", cycleName);
continue;
}
Date start = calendar.getTime();
if (depSchedule.getProcessDefinitionId() == masterId) {
list = CronUtils.getSelfFireDateList(start, scheduledFireTime, depCronExpression);
} else {
list = CronUtils.getFireDateList(start, scheduledFireTime, depCronExpression);
}
if (!list.isEmpty()) {
start = list.get(list.size() - 1);
CycleDependency dependency = new CycleDependency(depSchedule.getProcessDefinitionId(), start, CronUtils.getExpirationTime(start, cycleEnum), cycleEnum);
cycleDependencyList.add(dependency);
}
}
return cycleDependencyList;
}
/**
* find last scheduler process instance in the date interval
*
* @param definitionId definitionId
* @param dateInterval dateInterval
* @return process instance
*/
public ProcessInstance findLastSchedulerProcessInterval(int definitionId, DateInterval dateInterval) {
return processInstanceMapper.queryLastSchedulerProcess(definitionId,
dateInterval.getStartTime(),
dateInterval.getEndTime());
}
/**
* find last manual process instance interval
*
* @param definitionId process definition id
* @param dateInterval dateInterval
* @return process instance
*/
public ProcessInstance findLastManualProcessInterval(int definitionId, DateInterval dateInterval) {
return processInstanceMapper.queryLastManualProcess(definitionId,
dateInterval.getStartTime(),
dateInterval.getEndTime());
}
/**
* find last running process instance
*
* @param definitionId process definition id
* @param startTime start time
* @param endTime end time
* @return process instance
*/
public ProcessInstance findLastRunningProcess(int definitionId, Date startTime, Date endTime) {
return processInstanceMapper.queryLastRunningProcess(definitionId,
startTime,
endTime,
stateArray);
}
/**
* query user queue by process instance id
*
* @param processInstanceId processInstanceId
* @return queue
*/
public String queryUserQueueByProcessInstanceId(int processInstanceId) {
String queue = "";
ProcessInstance processInstance = processInstanceMapper.selectById(processInstanceId);
if (processInstance == null) {
return queue;
}
User executor = userMapper.selectById(processInstance.getExecutorId());
if (executor != null) {
queue = executor.getQueue();
}
return queue;
}
/**
* query project name and user name by processInstanceId.
*
* @param processInstanceId processInstanceId
* @return projectName and userName
*/
public ProjectUser queryProjectWithUserByProcessInstanceId(int processInstanceId) {
return projectMapper.queryProjectWithUserByProcessInstanceId(processInstanceId);
}
/**
* get task worker group
*
* @param taskInstance taskInstance
* @return workerGroupId
*/
public String getTaskWorkerGroup(TaskInstance taskInstance) {
String workerGroup = taskInstance.getWorkerGroup();
if (StringUtils.isNotBlank(workerGroup)) {
return workerGroup;
}
int processInstanceId = taskInstance.getProcessInstanceId();
ProcessInstance processInstance = findProcessInstanceById(processInstanceId);
if (processInstance != null) {
return processInstance.getWorkerGroup();
}
logger.info("task : {} will use default worker group", taskInstance.getId());
return Constants.DEFAULT_WORKER_GROUP;
}
/**
* get have perm project list
*
* @param userId userId
* @return project list
*/
public List<Project> getProjectListHavePerm(int userId) {
List<Project> createProjects = projectMapper.queryProjectCreatedByUser(userId);
List<Project> authedProjects = projectMapper.queryAuthedProjectListByUserId(userId);
if (createProjects == null) {
createProjects = new ArrayList<>();
}
if (authedProjects != null) {
createProjects.addAll(authedProjects);
}
return createProjects;
}
/**
* get have perm project ids
*
* @param userId userId
* @return project ids
*/
public List<Integer> getProjectIdListHavePerm(int userId) {
List<Integer> projectIdList = new ArrayList<>();
for (Project project : getProjectListHavePerm(userId)) {
projectIdList.add(project.getId());
}
return projectIdList;
}
/**
* list unauthorized udf function
*
* @param userId user id
* @param needChecks data source id array
* @return unauthorized udf function list
*/
public <T> List<T> listUnauthorized(int userId, T[] needChecks, AuthorizationType authorizationType) {
List<T> resultList = new ArrayList<>();
if (Objects.nonNull(needChecks) && needChecks.length > 0) {
Set<T> originResSet = new HashSet<>(Arrays.asList(needChecks));
switch (authorizationType) {
case RESOURCE_FILE_ID:
case UDF_FILE:
Set<Integer> authorizedResourceFiles = resourceMapper.listAuthorizedResourceById(userId, needChecks).stream().map(Resource::getId).collect(toSet());
originResSet.removeAll(authorizedResourceFiles);
break;
case RESOURCE_FILE_NAME:
Set<String> authorizedResources = resourceMapper.listAuthorizedResource(userId, needChecks).stream().map(Resource::getFullName).collect(toSet());
originResSet.removeAll(authorizedResources);
break;
case DATASOURCE:
Set<Integer> authorizedDatasources = dataSourceMapper.listAuthorizedDataSource(userId, needChecks).stream().map(DataSource::getId).collect(toSet());
originResSet.removeAll(authorizedDatasources);
break;
case UDF:
Set<Integer> authorizedUdfs = udfFuncMapper.listAuthorizedUdfFunc(userId, needChecks).stream().map(UdfFunc::getId).collect(toSet());
originResSet.removeAll(authorizedUdfs);
break;
default:
break;
}
resultList.addAll(originResSet);
}
return resultList;
}
/**
* get user by user id
*
* @param userId user id
* @return User
*/
public User getUserById(int userId) {
return userMapper.selectById(userId);
}
/**
* get resource by resoruce id
*
* @param resoruceId resource id
* @return Resource
*/
public Resource getResourceById(int resoruceId) {
return resourceMapper.selectById(resoruceId);
}
/**
* list resources by ids
*
* @param resIds resIds
* @return resource list
*/
public List<Resource> listResourceByIds(Integer[] resIds) {
return resourceMapper.listResourceByIds(resIds);
}
/**
* format task app id in task instance
*/
public String formatTaskAppId(TaskInstance taskInstance) {
ProcessDefinition definition = this.findProcessDefineById(taskInstance.getProcessDefinitionId());
ProcessInstance processInstanceById = this.findProcessInstanceById(taskInstance.getProcessInstanceId());
if (definition == null || processInstanceById == null) {
return "";
}
return String.format("%s_%s_%s",
definition.getId(),
processInstanceById.getId(),
taskInstance.getId());
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 2,101 | [BUG] After renaming the branch name, the name of Branch Flow does not change | 更改任务名称后,分支流转的名称还是更改前的任务名称
![image](https://user-images.githubusercontent.com/55787491/76071071-9e2ff200-5fd0-11ea-8702-e7b357d7b060.png)
**Which version of Dolphin Scheduler:**
-[1.2.0_release]
| https://github.com/apache/dolphinscheduler/issues/2101 | https://github.com/apache/dolphinscheduler/pull/4555 | cfd9db5555b3d8f9f33f25a9272c8741115a9336 | 14d49bc584685a33e2952573a0cb6e88377ab9f9 | "2020-03-06T09:35:19Z" | java | "2021-02-02T08:07:37Z" | dolphinscheduler-service/src/test/java/org/apache/dolphinscheduler/service/process/ProcessServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.process;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.Command;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.ProcessInstanceMap;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.CommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtilsTest;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.JsonNode;
/**
* process service test
*/
@RunWith(MockitoJUnitRunner.class)
public class ProcessServiceTest {
private static final Logger logger = LoggerFactory.getLogger(CronUtilsTest.class);
@InjectMocks
private ProcessService processService;
@Mock
private CommandMapper commandMapper;
@Mock
private ErrorCommandMapper errorCommandMapper;
@Mock
private ProcessDefinitionMapper processDefineMapper;
@Mock
private ProcessInstanceMapper processInstanceMapper;
@Mock
private UserMapper userMapper;
@Mock
TaskInstanceMapper taskInstanceMapper;
@Test
public void testCreateSubCommand() {
ProcessService processService = new ProcessService();
ProcessInstance parentInstance = new ProcessInstance();
parentInstance.setProcessDefinitionId(1);
parentInstance.setWarningType(WarningType.SUCCESS);
parentInstance.setWarningGroupId(0);
TaskInstance task = new TaskInstance();
task.setTaskJson("{\"params\":{\"processDefinitionId\":100}}");
task.setId(10);
ProcessInstance childInstance = null;
ProcessInstanceMap instanceMap = new ProcessInstanceMap();
instanceMap.setParentProcessInstanceId(1);
instanceMap.setParentTaskInstanceId(10);
Command command = null;
//father history: start; child null == command type: start
parentInstance.setHistoryCmd("START_PROCESS");
parentInstance.setCommandType(CommandType.START_PROCESS);
command = processService.createSubProcessCommand(
parentInstance, childInstance, instanceMap, task
);
Assert.assertEquals(CommandType.START_PROCESS, command.getCommandType());
//father history: start,start failure; child null == command type: start
parentInstance.setCommandType(CommandType.START_FAILURE_TASK_PROCESS);
parentInstance.setHistoryCmd("START_PROCESS,START_FAILURE_TASK_PROCESS");
command = processService.createSubProcessCommand(
parentInstance, childInstance, instanceMap, task
);
Assert.assertEquals(CommandType.START_PROCESS, command.getCommandType());
//father history: scheduler,start failure; child null == command type: scheduler
parentInstance.setCommandType(CommandType.START_FAILURE_TASK_PROCESS);
parentInstance.setHistoryCmd("SCHEDULER,START_FAILURE_TASK_PROCESS");
command = processService.createSubProcessCommand(
parentInstance, childInstance, instanceMap, task
);
Assert.assertEquals(CommandType.SCHEDULER, command.getCommandType());
//father history: complement,start failure; child null == command type: complement
String startString = "2020-01-01 00:00:00";
String endString = "2020-01-10 00:00:00";
parentInstance.setCommandType(CommandType.START_FAILURE_TASK_PROCESS);
parentInstance.setHistoryCmd("COMPLEMENT_DATA,START_FAILURE_TASK_PROCESS");
Map<String, String> complementMap = new HashMap<>();
complementMap.put(Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE, startString);
complementMap.put(Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE, endString);
parentInstance.setCommandParam(JSONUtils.toJsonString(complementMap));
command = processService.createSubProcessCommand(
parentInstance, childInstance, instanceMap, task
);
Assert.assertEquals(CommandType.COMPLEMENT_DATA, command.getCommandType());
JsonNode complementDate = JSONUtils.parseObject(command.getCommandParam());
Date start = DateUtils.stringToDate(complementDate.get(Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE).asText());
Date end = DateUtils.stringToDate(complementDate.get(Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE).asText());
Assert.assertEquals(startString, DateUtils.dateToString(start));
Assert.assertEquals(endString, DateUtils.dateToString(end));
//father history: start,failure,start failure; child not null == command type: start failure
childInstance = new ProcessInstance();
parentInstance.setCommandType(CommandType.START_FAILURE_TASK_PROCESS);
parentInstance.setHistoryCmd("START_PROCESS,START_FAILURE_TASK_PROCESS");
command = processService.createSubProcessCommand(
parentInstance, childInstance, instanceMap, task
);
Assert.assertEquals(CommandType.START_FAILURE_TASK_PROCESS, command.getCommandType());
}
@Test
public void testVerifyIsNeedCreateCommand() {
List<Command> commands = new ArrayList<>();
Command command = new Command();
command.setCommandType(CommandType.REPEAT_RUNNING);
command.setCommandParam("{\"" + CMD_PARAM_RECOVER_PROCESS_ID_STRING + "\":\"111\"}");
commands.add(command);
Mockito.when(commandMapper.selectList(null)).thenReturn(commands);
Assert.assertFalse(processService.verifyIsNeedCreateCommand(command));
Command command1 = new Command();
command1.setCommandType(CommandType.REPEAT_RUNNING);
command1.setCommandParam("{\"" + CMD_PARAM_RECOVER_PROCESS_ID_STRING + "\":\"222\"}");
Assert.assertTrue(processService.verifyIsNeedCreateCommand(command1));
Command command2 = new Command();
command2.setCommandType(CommandType.PAUSE);
Assert.assertTrue(processService.verifyIsNeedCreateCommand(command2));
}
@Test
public void testCreateRecoveryWaitingThreadCommand() {
int id = 123;
Mockito.when(commandMapper.deleteById(id)).thenReturn(1);
ProcessInstance subProcessInstance = new ProcessInstance();
subProcessInstance.setIsSubProcess(Flag.YES);
Command originCommand = new Command();
originCommand.setId(id);
processService.createRecoveryWaitingThreadCommand(originCommand, subProcessInstance);
ProcessInstance processInstance = new ProcessInstance();
processInstance.setId(111);
processService.createRecoveryWaitingThreadCommand(null, subProcessInstance);
Command recoverCommand = new Command();
recoverCommand.setCommandType(CommandType.RECOVER_WAITTING_THREAD);
processService.createRecoveryWaitingThreadCommand(recoverCommand, subProcessInstance);
Command repeatRunningCommand = new Command();
recoverCommand.setCommandType(CommandType.REPEAT_RUNNING);
processService.createRecoveryWaitingThreadCommand(repeatRunningCommand, subProcessInstance);
ProcessInstance subProcessInstance2 = new ProcessInstance();
subProcessInstance2.setId(111);
subProcessInstance2.setIsSubProcess(Flag.NO);
processService.createRecoveryWaitingThreadCommand(repeatRunningCommand, subProcessInstance2);
}
@Test
public void testHandleCommand() {
//cannot construct process instance, return null;
String host = "127.0.0.1";
int validThreadNum = 1;
Command command = new Command();
command.setProcessDefinitionId(222);
command.setCommandType(CommandType.REPEAT_RUNNING);
command.setCommandParam("{\"" + CMD_PARAM_RECOVER_PROCESS_ID_STRING + "\":\"111\",\""
+ CMD_PARAM_SUB_PROCESS_DEFINE_ID + "\":\"222\"}");
Mockito.when(processDefineMapper.selectById(command.getProcessDefinitionId())).thenReturn(null);
Assert.assertNull(processService.handleCommand(logger, host, validThreadNum, command));
//there is not enough thread for this command
Command command1 = new Command();
command1.setProcessDefinitionId(123);
command1.setCommandParam("{\"ProcessInstanceId\":222}");
command1.setCommandType(CommandType.START_PROCESS);
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setId(123);
processDefinition.setName("test");
processDefinition.setVersion(1);
processDefinition.setProcessDefinitionJson("{\"globalParams\":[{\"prop\":\"startParam1\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"\"}],\"tasks\":[{\"conditionResult\":"
+ "{\"failedNode\":[\"\"],\"successNode\":[\"\"]},\"delayTime\":\"0\",\"dependence\":{}"
+ ",\"description\":\"\",\"id\":\"tasks-3011\",\"maxRetryTimes\":\"0\",\"name\":\"tsssss\""
+ ",\"params\":{\"localParams\":[],\"rawScript\":\"echo \\\"123123\\\"\",\"resourceList\":[]}"
+ ",\"preTasks\":[],\"retryInterval\":\"1\",\"runFlag\":\"NORMAL\",\"taskInstancePriority\":\"MEDIUM\""
+ ",\"timeout\":{\"enable\":false,\"interval\":null,\"strategy\":\"\"},\"type\":\"SHELL\""
+ ",\"waitStartTimeout\":{},\"workerGroup\":\"default\"}],\"tenantId\":4,\"timeout\":0}");
processDefinition.setGlobalParams("[{\"prop\":\"startParam1\",\"direct\":\"IN\",\"type\":\"VARCHAR\",\"value\":\"\"}]");
ProcessInstance processInstance = new ProcessInstance();
processInstance.setId(222);
Mockito.when(processDefineMapper.selectById(command1.getProcessDefinitionId())).thenReturn(processDefinition);
Mockito.when(processInstanceMapper.queryDetailById(222)).thenReturn(processInstance);
Assert.assertNotNull(processService.handleCommand(logger, host, validThreadNum, command1));
Command command2 = new Command();
command2.setCommandParam("{\"ProcessInstanceId\":222,\"StartNodeIdList\":\"n1,n2\"}");
command2.setProcessDefinitionId(123);
command2.setCommandType(CommandType.RECOVER_SUSPENDED_PROCESS);
Assert.assertNotNull(processService.handleCommand(logger, host, validThreadNum, command2));
Command command3 = new Command();
command3.setProcessDefinitionId(123);
command3.setCommandParam("{\"WaitingThreadInstanceId\":222}");
command3.setCommandType(CommandType.START_FAILURE_TASK_PROCESS);
Assert.assertNotNull(processService.handleCommand(logger, host, validThreadNum, command3));
Command command4 = new Command();
command4.setProcessDefinitionId(123);
command4.setCommandParam("{\"WaitingThreadInstanceId\":222,\"StartNodeIdList\":\"n1,n2\"}");
command4.setCommandType(CommandType.REPEAT_RUNNING);
Assert.assertNotNull(processService.handleCommand(logger, host, validThreadNum, command4));
Command command5 = new Command();
command5.setProcessDefinitionId(123);
HashMap<String, String> startParams = new HashMap<>();
startParams.put("startParam1", "testStartParam1");
HashMap<String, String> commandParams = new HashMap<>();
commandParams.put(CMD_PARAM_START_PARAMS, JSONUtils.toJsonString(startParams));
command5.setCommandParam(JSONUtils.toJsonString(commandParams));
command5.setCommandType(CommandType.START_PROCESS);
ProcessInstance processInstance1 = processService.handleCommand(logger, host, validThreadNum, command5);
Assert.assertTrue(processInstance1.getGlobalParams().contains("\"testStartParam1\""));
}
@Test
public void testGetUserById() {
User user = new User();
user.setId(123);
Mockito.when(userMapper.selectById(123)).thenReturn(user);
Assert.assertEquals(user, processService.getUserById(123));
}
@Test
public void testFormatTaskAppId() {
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(333);
taskInstance.setProcessDefinitionId(111);
taskInstance.setProcessInstanceId(222);
Mockito.when(processService.findProcessDefineById(taskInstance.getProcessDefinitionId())).thenReturn(null);
Mockito.when(processService.findProcessInstanceById(taskInstance.getProcessInstanceId())).thenReturn(null);
Assert.assertEquals("", processService.formatTaskAppId(taskInstance));
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setId(111);
ProcessInstance processInstance = new ProcessInstance();
processInstance.setId(222);
Mockito.when(processService.findProcessDefineById(taskInstance.getProcessDefinitionId())).thenReturn(processDefinition);
Mockito.when(processService.findProcessInstanceById(taskInstance.getProcessInstanceId())).thenReturn(processInstance);
Assert.assertEquals("111_222_333", processService.formatTaskAppId(taskInstance));
}
@Test
public void testRecurseFindSubProcessId() {
ProcessDefinition processDefinition = new ProcessDefinition();
processDefinition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"conditionResult\":"
+ "{\"failedNode\":[\"\"],\"successNode\":[\"\"]},\"delayTime\":\"0\""
+ ",\"dependence\":{},\"description\":\"\",\"id\":\"tasks-76544\""
+ ",\"maxRetryTimes\":\"0\",\"name\":\"test\",\"params\":{\"localParams\":[],"
+ "\"rawScript\":\"echo \\\"123123\\\"\",\"resourceList\":[],\"processDefinitionId\""
+ ":\"222\"},\"preTasks\":[],\"retryInterval\":\"1\",\"runFlag\":\"NORMAL\","
+ "\"taskInstancePriority\":\"MEDIUM\",\"timeout\":{\"enable\":false,\"interval\":"
+ "null,\"strategy\":\"\"},\"type\":\"SHELL\",\"waitStartTimeout\":{},\"workerGroup\":\"default\"}],"
+ "\"tenantId\":4,\"timeout\":0}");
int parentId = 111;
List<Integer> ids = new ArrayList<>();
ProcessDefinition processDefinition2 = new ProcessDefinition();
processDefinition2.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"conditionResult\""
+ ":{\"failedNode\":[\"\"],\"successNode\":[\"\"]},\"delayTime\":\"0\",\"dependence\":{},"
+ "\"description\":\"\",\"id\":\"tasks-76544\",\"maxRetryTimes\":\"0\",\"name\":\"test\","
+ "\"params\":{\"localParams\":[],\"rawScript\":\"echo \\\"123123\\\"\",\"resourceList\":[]},"
+ "\"preTasks\":[],\"retryInterval\":\"1\",\"runFlag\":\"NORMAL\",\"taskInstancePriority\":"
+ "\"MEDIUM\",\"timeout\":{\"enable\":false,\"interval\":null,\"strategy\":\"\"},\"type\":"
+ "\"SHELL\",\"waitStartTimeout\":{},\"workerGroup\":\"default\"}],\"tenantId\":4,\"timeout\":0}");
Mockito.when(processDefineMapper.selectById(parentId)).thenReturn(processDefinition);
Mockito.when(processDefineMapper.selectById(222)).thenReturn(processDefinition2);
processService.recurseFindSubProcessId(parentId, ids);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,641 | [Bug][Alert] Sending the email to the following server failed :null:null |
**Describe the bug**
Alert-server email alert error, the message is 'Sending the email to the following server failed :null:null'
**Which version of Dolphin Scheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4641 | https://github.com/apache/dolphinscheduler/pull/4642 | 14d49bc584685a33e2952573a0cb6e88377ab9f9 | 6df87b89d7b0ec013c5a2bfdfb92fa3ff1496c40 | "2021-02-01T04:50:30Z" | java | "2021-02-03T06:20:16Z" | dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/main/java/org/apache/dolphinscheduler/plugin/alert/email/MailParamsConstants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.alert.email;
/**
* mail plugin params json use
*/
public class MailParamsConstants {
private MailParamsConstants() {
throw new IllegalStateException("Utility class");
}
public static final String PLUGIN_DEFAULT_EMAIL_RECEIVERS = "$t('receivers')";
public static final String NAME_PLUGIN_DEFAULT_EMAIL_RECEIVERS = "receivers";
public static final String PLUGIN_DEFAULT_EMAIL_RECEIVERCCS = "$t('receiverCcs')";
public static final String NAME_PLUGIN_DEFAULT_EMAIL_RECEIVERCCS = "receiverCcs";
public static final String MAIL_PROTOCOL = "transport.protocol";
public static final String NAME_MAIL_PROTOCOL = "protocol";
public static final String MAIL_SMTP_HOST = "smtp.host";
public static final String NAME_MAIL_SMTP_HOST = "serverHost";
public static final String MAIL_SMTP_PORT = "smtp.port";
public static final String NAME_MAIL_SMTP_PORT = "serverPort";
public static final String MAIL_SENDER = "sender";
public static final String NAME_MAIL_SENDER = "sender";
public static final String MAIL_SMTP_AUTH = "smtp.auth";
public static final String NAME_MAIL_SMTP_AUTH = "enableSmtpAuth";
public static final String MAIL_USER = "user";
public static final String NAME_MAIL_USER = "user";
public static final String MAIL_PASSWD = "passwd";
public static final String NAME_MAIL_PASSWD = "passwd";
public static final String MAIL_SMTP_STARTTLS_ENABLE = "smtp.starttls.enable";
public static final String NAME_MAIL_SMTP_STARTTLS_ENABLE = "starttlsEnable";
public static final String MAIL_SMTP_SSL_ENABLE = "smtp.ssl.enable";
public static final String NAME_MAIL_SMTP_SSL_ENABLE = "sslEnable";
public static final String MAIL_SMTP_SSL_TRUST = "smtp.ssl.trust";
public static final String NAME_MAIL_SMTP_SSL_TRUST = "smtpSslTrust";
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,641 | [Bug][Alert] Sending the email to the following server failed :null:null |
**Describe the bug**
Alert-server email alert error, the message is 'Sending the email to the following server failed :null:null'
**Which version of Dolphin Scheduler:**
-[dev]
| https://github.com/apache/dolphinscheduler/issues/4641 | https://github.com/apache/dolphinscheduler/pull/4642 | 14d49bc584685a33e2952573a0cb6e88377ab9f9 | 6df87b89d7b0ec013c5a2bfdfb92fa3ff1496c40 | "2021-02-01T04:50:30Z" | java | "2021-02-03T06:20:16Z" | dolphinscheduler-alert-plugin/dolphinscheduler-alert-email/src/test/java/org/apache/dolphinscheduler/plugin/alert/email/MailUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.plugin.alert.email;
import org.apache.dolphinscheduler.plugin.alert.email.template.AlertTemplate;
import org.apache.dolphinscheduler.plugin.alert.email.template.DefaultHTMLTemplate;
import org.apache.dolphinscheduler.spi.alert.AlertConstants;
import org.apache.dolphinscheduler.spi.alert.ShowType;
import org.apache.dolphinscheduler.spi.utils.JSONUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*/
public class MailUtilsTest {
private static final Logger logger = LoggerFactory.getLogger(MailUtilsTest.class);
private static Map<String, String> emailConfig = new HashMap<>();
private static AlertTemplate alertTemplate;
static MailSender mailSender;
@BeforeClass
public static void initEmailConfig() {
emailConfig.put(MailParamsConstants.NAME_MAIL_PROTOCOL, "smtp");
emailConfig.put(MailParamsConstants.NAME_MAIL_SMTP_HOST, "xxx.xxx.com");
emailConfig.put(MailParamsConstants.NAME_MAIL_SMTP_PORT, "25");
emailConfig.put(MailParamsConstants.NAME_MAIL_SENDER, "xxx1.xxx.com");
emailConfig.put(MailParamsConstants.NAME_MAIL_USER, "xxx2.xxx.com");
emailConfig.put(MailParamsConstants.NAME_MAIL_PASSWD, "111111");
emailConfig.put(MailParamsConstants.NAME_MAIL_SMTP_STARTTLS_ENABLE, "true");
emailConfig.put(MailParamsConstants.NAME_MAIL_SMTP_SSL_ENABLE, "false");
emailConfig.put(MailParamsConstants.NAME_MAIL_SMTP_SSL_TRUST, "false");
emailConfig.put(MailParamsConstants.NAME_PLUGIN_DEFAULT_EMAIL_RECEIVERS, "347801120@qq.com");
emailConfig.put(MailParamsConstants.NAME_PLUGIN_DEFAULT_EMAIL_RECEIVERCCS, "347801120@qq.com");
emailConfig.put(AlertConstants.SHOW_TYPE, ShowType.TEXT.getDescp());
alertTemplate = new DefaultHTMLTemplate();
mailSender = new MailSender(emailConfig);
}
@Test
public void testSendMails() {
String content = "[\"id:69\","
+ "\"name:UserBehavior-0--1193959466\","
+ "\"Job name: Start workflow\","
+ "\"State: SUCCESS\","
+ "\"Recovery:NO\","
+ "\"Run time: 1\","
+ "\"Start time: 2018-08-06 10:31:34.0\","
+ "\"End time: 2018-08-06 10:31:49.0\","
+ "\"Host: 192.168.xx.xx\","
+ "\"Notify group :4\"]";
mailSender.sendMails(
"Mysql Exception",
content);
}
public String list2String() {
LinkedHashMap<String, Object> map1 = new LinkedHashMap<>();
map1.put("mysql service name", "mysql200");
map1.put("mysql address", "192.168.xx.xx");
map1.put("port", "3306");
map1.put("no index of number", "80");
map1.put("database client connections", "190");
LinkedHashMap<String, Object> map2 = new LinkedHashMap<>();
map2.put("mysql service name", "mysql210");
map2.put("mysql address", "192.168.xx.xx");
map2.put("port", "3306");
map2.put("no index of number", "10");
map2.put("database client connections", "90");
List<LinkedHashMap<String, Object>> maps = new ArrayList<>();
maps.add(0, map1);
maps.add(1, map2);
String mapjson = JSONUtils.toJsonString(maps);
logger.info(mapjson);
return mapjson;
}
@Test
public void testSendTableMail() {
String title = "Mysql Exception";
String content = list2String();
emailConfig.put(AlertConstants.SHOW_TYPE, ShowType.TABLE.getDescp());
mailSender = new MailSender(emailConfig);
mailSender.sendMails(title, content);
}
@Test
public void testAttachmentFile() throws Exception {
String content = list2String();
emailConfig.put(AlertConstants.SHOW_TYPE, ShowType.ATTACHMENT.getDescp());
mailSender = new MailSender(emailConfig);
mailSender.sendMails("gaojing", content);
}
@Test
public void testTableAttachmentFile() throws Exception {
String content = list2String();
emailConfig.put(AlertConstants.SHOW_TYPE, ShowType.TABLEATTACHMENT.getDescp());
mailSender = new MailSender(emailConfig);
mailSender.sendMails("gaojing", content);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,298 | [Bug][k8s]when recreate or upgrade helm release, the host ip changed | **Describe the bug**
When DS was deployed to k8s by helm, if you upgrade the release or recreate the release, the ip of worker server and other server will be changed, so when you try to query the log of history task, you will get empty log.
**To Reproduce**
Steps to reproduce the behavior, for example:
1. deploy the cluster use helm
2. recreate the deployed release or just upgrade
3. query a task which runned before upgrade/recreate
4. no log will show
**Expected behavior**
log should be show
**Screenshots**
![image](https://user-images.githubusercontent.com/3021207/88361996-915c4100-cdad-11ea-8e45-1c46b69109ce.png)
**Which version of Dolphin Scheduler:**
-[1.3.1-release]
**Additional context**
**Requirement or improvement**
| https://github.com/apache/dolphinscheduler/issues/3298 | https://github.com/apache/dolphinscheduler/pull/4786 | 23591b71a2859021e9280899b1f1b6ff3b679fbc | 8ebddc1f5bb484a2d7d8e1353b2a8f51fae78ec0 | "2020-07-24T05:00:07Z" | java | "2021-02-18T03:41:58Z" | dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/NetUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
import static org.apache.dolphinscheduler.common.Constants.DOLPHIN_SCHEDULER_PREFERRED_NETWORK_INTERFACE;
import static java.util.Collections.emptyList;
import org.apache.dolphinscheduler.common.Constants;
import java.io.IOException;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.Enumeration;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* NetUtils
*/
public class NetUtils {
private static final Pattern IP_PATTERN = Pattern.compile("\\d{1,3}(\\.\\d{1,3}){3,5}$");
private static final String NETWORK_PRIORITY_DEFAULT = "default";
private static final String NETWORK_PRIORITY_INNER = "inner";
private static final String NETWORK_PRIORITY_OUTER = "outer";
private static final Logger logger = LoggerFactory.getLogger(NetUtils.class);
private static InetAddress LOCAL_ADDRESS = null;
private static volatile String HOST_ADDRESS;
private NetUtils() {
throw new UnsupportedOperationException("Construct NetUtils");
}
/**
* get addr like host:port
* @return addr
*/
public static String getAddr(String host, int port) {
return String.format("%s:%d", host, port);
}
/**
* get addr like host:port
* @return addr
*/
public static String getAddr(int port) {
return getAddr(getHost(), port);
}
public static String getHost() {
if (HOST_ADDRESS != null) {
return HOST_ADDRESS;
}
InetAddress address = getLocalAddress();
if (address != null) {
HOST_ADDRESS = Constants.KUBERNETES_MODE ? address.getHostName() : address.getHostAddress();
return HOST_ADDRESS;
}
return Constants.KUBERNETES_MODE ? "localhost" : "127.0.0.1";
}
private static InetAddress getLocalAddress() {
if (null != LOCAL_ADDRESS) {
return LOCAL_ADDRESS;
}
return getLocalAddress0();
}
/**
* Find first valid IP from local network card
*
* @return first valid local IP
*/
private static synchronized InetAddress getLocalAddress0() {
if (null != LOCAL_ADDRESS) {
return LOCAL_ADDRESS;
}
InetAddress localAddress = null;
try {
NetworkInterface networkInterface = findNetworkInterface();
if (networkInterface != null) {
Enumeration<InetAddress> addresses = networkInterface.getInetAddresses();
while (addresses.hasMoreElements()) {
Optional<InetAddress> addressOp = toValidAddress(addresses.nextElement());
if (addressOp.isPresent()) {
try {
if (addressOp.get().isReachable(100)) {
LOCAL_ADDRESS = addressOp.get();
return LOCAL_ADDRESS;
}
} catch (IOException e) {
logger.warn("test address id reachable io exception", e);
}
}
}
}
localAddress = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
logger.warn("InetAddress get LocalHost exception", e);
}
Optional<InetAddress> addressOp = toValidAddress(localAddress);
if (addressOp.isPresent()) {
LOCAL_ADDRESS = addressOp.get();
}
return LOCAL_ADDRESS;
}
private static Optional<InetAddress> toValidAddress(InetAddress address) {
if (address instanceof Inet6Address) {
Inet6Address v6Address = (Inet6Address) address;
if (isPreferIPV6Address()) {
return Optional.ofNullable(normalizeV6Address(v6Address));
}
}
if (isValidV4Address(address)) {
return Optional.of(address);
}
return Optional.empty();
}
private static InetAddress normalizeV6Address(Inet6Address address) {
String addr = address.getHostAddress();
int i = addr.lastIndexOf('%');
if (i > 0) {
try {
return InetAddress.getByName(addr.substring(0, i) + '%' + address.getScopeId());
} catch (UnknownHostException e) {
logger.debug("Unknown IPV6 address: ", e);
}
}
return address;
}
public static boolean isValidV4Address(InetAddress address) {
if (address == null || address.isLoopbackAddress()) {
return false;
}
String name = address.getHostAddress();
return (name != null
&& IP_PATTERN.matcher(name).matches()
&& !address.isAnyLocalAddress()
&& !address.isLoopbackAddress());
}
/**
* Check if an ipv6 address
*
* @return true if it is reachable
*/
private static boolean isPreferIPV6Address() {
return Boolean.getBoolean("java.net.preferIPv6Addresses");
}
/**
* Get the suitable {@link NetworkInterface}
*
* @return If no {@link NetworkInterface} is available , return <code>null</code>
*/
private static NetworkInterface findNetworkInterface() {
List<NetworkInterface> validNetworkInterfaces = emptyList();
try {
validNetworkInterfaces = getValidNetworkInterfaces();
} catch (SocketException e) {
logger.warn("ValidNetworkInterfaces exception", e);
}
NetworkInterface result = null;
// Try to specify config NetWork Interface
for (NetworkInterface networkInterface : validNetworkInterfaces) {
if (isSpecifyNetworkInterface(networkInterface)) {
result = networkInterface;
break;
}
}
if (null != result) {
return result;
}
return findAddress(validNetworkInterfaces);
}
/**
* Get the valid {@link NetworkInterface network interfaces}
*
* @throws SocketException SocketException if an I/O error occurs.
*/
private static List<NetworkInterface> getValidNetworkInterfaces() throws SocketException {
List<NetworkInterface> validNetworkInterfaces = new LinkedList<>();
Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
while (interfaces.hasMoreElements()) {
NetworkInterface networkInterface = interfaces.nextElement();
if (ignoreNetworkInterface(networkInterface)) { // ignore
continue;
}
validNetworkInterfaces.add(networkInterface);
}
return validNetworkInterfaces;
}
/**
* @param networkInterface {@link NetworkInterface}
* @return if the specified {@link NetworkInterface} should be ignored, return <code>true</code>
* @throws SocketException SocketException if an I/O error occurs.
*/
public static boolean ignoreNetworkInterface(NetworkInterface networkInterface) throws SocketException {
return networkInterface == null
|| networkInterface.isLoopback()
|| networkInterface.isVirtual()
|| !networkInterface.isUp();
}
private static boolean isSpecifyNetworkInterface(NetworkInterface networkInterface) {
String preferredNetworkInterface = System.getProperty(DOLPHIN_SCHEDULER_PREFERRED_NETWORK_INTERFACE);
return Objects.equals(networkInterface.getDisplayName(), preferredNetworkInterface);
}
private static NetworkInterface findAddress(List<NetworkInterface> validNetworkInterfaces) {
if (validNetworkInterfaces.isEmpty()) {
return null;
}
String networkPriority = PropertyUtils.getString(Constants.NETWORK_PRIORITY_STRATEGY, NETWORK_PRIORITY_DEFAULT);
if (NETWORK_PRIORITY_DEFAULT.equalsIgnoreCase(networkPriority)) {
return findAddressByDefaultPolicy(validNetworkInterfaces);
} else if (NETWORK_PRIORITY_INNER.equalsIgnoreCase(networkPriority)) {
return findInnerAddress(validNetworkInterfaces);
} else if (NETWORK_PRIORITY_OUTER.equalsIgnoreCase(networkPriority)) {
return findOuterAddress(validNetworkInterfaces);
} else {
logger.error("There is no matching network card acquisition policy!");
return null;
}
}
private static NetworkInterface findAddressByDefaultPolicy(List<NetworkInterface> validNetworkInterfaces) {
NetworkInterface networkInterface;
networkInterface = findInnerAddress(validNetworkInterfaces);
if (networkInterface == null) {
networkInterface = findOuterAddress(validNetworkInterfaces);
if (networkInterface == null) {
networkInterface = validNetworkInterfaces.get(0);
}
}
return networkInterface;
}
/**
* Get the Intranet IP
*
* @return If no {@link NetworkInterface} is available , return <code>null</code>
*/
private static NetworkInterface findInnerAddress(List<NetworkInterface> validNetworkInterfaces) {
NetworkInterface networkInterface = null;
for (NetworkInterface ni : validNetworkInterfaces) {
Enumeration<InetAddress> address = ni.getInetAddresses();
while (address.hasMoreElements()) {
InetAddress ip = address.nextElement();
if (ip.isSiteLocalAddress()
&& !ip.isLoopbackAddress()) {
networkInterface = ni;
}
}
}
return networkInterface;
}
private static NetworkInterface findOuterAddress(List<NetworkInterface> validNetworkInterfaces) {
NetworkInterface networkInterface = null;
for (NetworkInterface ni : validNetworkInterfaces) {
Enumeration<InetAddress> address = ni.getInetAddresses();
while (address.hasMoreElements()) {
InetAddress ip = address.nextElement();
if (!ip.isSiteLocalAddress()
&& !ip.isLoopbackAddress()) {
networkInterface = ni;
}
}
}
return networkInterface;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,298 | [Bug][k8s]when recreate or upgrade helm release, the host ip changed | **Describe the bug**
When DS was deployed to k8s by helm, if you upgrade the release or recreate the release, the ip of worker server and other server will be changed, so when you try to query the log of history task, you will get empty log.
**To Reproduce**
Steps to reproduce the behavior, for example:
1. deploy the cluster use helm
2. recreate the deployed release or just upgrade
3. query a task which runned before upgrade/recreate
4. no log will show
**Expected behavior**
log should be show
**Screenshots**
![image](https://user-images.githubusercontent.com/3021207/88361996-915c4100-cdad-11ea-8e45-1c46b69109ce.png)
**Which version of Dolphin Scheduler:**
-[1.3.1-release]
**Additional context**
**Requirement or improvement**
| https://github.com/apache/dolphinscheduler/issues/3298 | https://github.com/apache/dolphinscheduler/pull/4786 | 23591b71a2859021e9280899b1f1b6ff3b679fbc | 8ebddc1f5bb484a2d7d8e1353b2a8f51fae78ec0 | "2020-07-24T05:00:07Z" | java | "2021-02-18T03:41:58Z" | dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/utils/ChannelUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.remote.utils;
import java.net.InetSocketAddress;
import io.netty.channel.Channel;
/**
* channel utils
*/
public class ChannelUtils {
private ChannelUtils() {
throw new IllegalStateException(ChannelUtils.class.getName());
}
/**
* get local address
*
* @param channel channel
* @return local address
*/
public static String getLocalAddress(Channel channel) {
return ((InetSocketAddress) channel.localAddress()).getAddress().getHostAddress();
}
/**
* get remote address
*
* @param channel channel
* @return remote address
*/
public static String getRemoteAddress(Channel channel) {
return ((InetSocketAddress) channel.remoteAddress()).getAddress().getHostAddress();
}
/**
* channel to address
*
* @param channel channel
* @return address
*/
public static Host toAddress(Channel channel) {
InetSocketAddress socketAddress = ((InetSocketAddress) channel.remoteAddress());
return new Host(socketAddress.getAddress().getHostAddress(), socketAddress.getPort());
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,298 | [Bug][k8s]when recreate or upgrade helm release, the host ip changed | **Describe the bug**
When DS was deployed to k8s by helm, if you upgrade the release or recreate the release, the ip of worker server and other server will be changed, so when you try to query the log of history task, you will get empty log.
**To Reproduce**
Steps to reproduce the behavior, for example:
1. deploy the cluster use helm
2. recreate the deployed release or just upgrade
3. query a task which runned before upgrade/recreate
4. no log will show
**Expected behavior**
log should be show
**Screenshots**
![image](https://user-images.githubusercontent.com/3021207/88361996-915c4100-cdad-11ea-8e45-1c46b69109ce.png)
**Which version of Dolphin Scheduler:**
-[1.3.1-release]
**Additional context**
**Requirement or improvement**
| https://github.com/apache/dolphinscheduler/issues/3298 | https://github.com/apache/dolphinscheduler/pull/4786 | 23591b71a2859021e9280899b1f1b6ff3b679fbc | 8ebddc1f5bb484a2d7d8e1353b2a8f51fae78ec0 | "2020-07-24T05:00:07Z" | java | "2021-02-18T03:41:58Z" | dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.master.registry;
import static org.apache.dolphinscheduler.common.Constants.HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH;
import org.apache.dolphinscheduler.remote.utils.Constants;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
import org.apache.dolphinscheduler.server.zk.SpringZKServer;
import org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import org.apache.dolphinscheduler.service.zk.ZookeeperConfig;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
/**
* master registry test
*/
@RunWith(SpringRunner.class)
@ContextConfiguration(classes = {SpringZKServer.class, MasterRegistry.class, ZookeeperRegistryCenter.class,
MasterConfig.class, ZookeeperCachedOperator.class, ZookeeperConfig.class, CuratorZookeeperClient.class})
public class MasterRegistryTest {
@Autowired
private MasterRegistry masterRegistry;
@Autowired
private ZookeeperRegistryCenter zookeeperRegistryCenter;
@Autowired
private MasterConfig masterConfig;
@Test
public void testRegistry() throws InterruptedException {
masterRegistry.registry();
String masterPath = zookeeperRegistryCenter.getMasterPath();
TimeUnit.SECONDS.sleep(masterConfig.getMasterHeartbeatInterval() + 2); //wait heartbeat info write into zk node
String masterNodePath = masterPath + "/" + (Constants.LOCAL_ADDRESS + ":" + masterConfig.getListenPort());
String heartbeat = zookeeperRegistryCenter.getZookeeperCachedOperator().get(masterNodePath);
Assert.assertEquals(HEARTBEAT_FOR_ZOOKEEPER_INFO_LENGTH, heartbeat.split(",").length);
masterRegistry.unRegistry();
}
@Test
public void testUnRegistry() throws InterruptedException {
masterRegistry.init();
masterRegistry.registry();
TimeUnit.SECONDS.sleep(masterConfig.getMasterHeartbeatInterval() + 2); //wait heartbeat info write into zk node
masterRegistry.unRegistry();
String masterPath = zookeeperRegistryCenter.getMasterPath();
List<String> childrenKeys = zookeeperRegistryCenter.getZookeeperCachedOperator().getChildrenKeys(masterPath);
Assert.assertTrue(childrenKeys.isEmpty());
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,369 | [Improvement][api] Introduce interface about api module service | **Describe the question**
Introduce interface about api module service for clearly service module.
**What are the current deficiencies and the benefits of improvement**
- A clear and concise service will help contributor or user to ready the code and locate the function.
**Which version of DolphinScheduler:**
-[dev]
**Describe alternatives you've considered**
Introduce service interface
| https://github.com/apache/dolphinscheduler/issues/3369 | https://github.com/apache/dolphinscheduler/pull/4759 | 9ae29a756f0aeed894c80f5e495d786ccf03f41f | 15a5b0588399bbafd201405af02d611d548fac12 | "2020-07-31T13:52:28Z" | java | "2021-02-18T15:27:37Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ExecutorController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.CHECK_PROCESS_DEFINITION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.EXECUTE_PROCESS_INSTANCE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.START_PROCESS_INSTANCE_ERROR;
import org.apache.dolphinscheduler.api.enums.ExecuteType;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ExecutorService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.RunMode;
import org.apache.dolphinscheduler.common.enums.TaskDependType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import java.text.ParseException;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import springfox.documentation.annotations.ApiIgnore;
/**
* execute process controller
*/
@Api(tags = "PROCESS_INSTANCE_EXECUTOR_TAG", position = 1)
@RestController
@RequestMapping("projects/{projectName}/executors")
public class ExecutorController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(ExecutorController.class);
@Autowired
private ExecutorService execService;
/**
* execute process instance
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @param scheduleTime schedule time
* @param failureStrategy failure strategy
* @param startNodeList start nodes list
* @param taskDependType task depend type
* @param execType execute type
* @param warningType warning type
* @param warningGroupId warning group id
* @param runMode run mode
* @param processInstancePriority process instance priority
* @param workerGroup worker group
* @param timeout timeout
* @return start process result code
*/
@ApiOperation(value = "startProcessInstance", notes = "RUN_PROCESS_INSTANCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "scheduleTime", value = "SCHEDULE_TIME", required = true, dataType = "String"),
@ApiImplicitParam(name = "failureStrategy", value = "FAILURE_STRATEGY", required = true, dataType = "FailureStrategy"),
@ApiImplicitParam(name = "startNodeList", value = "START_NODE_LIST", dataType = "String"),
@ApiImplicitParam(name = "taskDependType", value = "TASK_DEPEND_TYPE", dataType = "TaskDependType"),
@ApiImplicitParam(name = "execType", value = "COMMAND_TYPE", dataType = "CommandType"),
@ApiImplicitParam(name = "warningType", value = "WARNING_TYPE", required = true, dataType = "WarningType"),
@ApiImplicitParam(name = "warningGroupId", value = "WARNING_GROUP_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "runMode", value = "RUN_MODE", dataType = "RunMode"),
@ApiImplicitParam(name = "processInstancePriority", value = "PROCESS_INSTANCE_PRIORITY", required = true, dataType = "Priority"),
@ApiImplicitParam(name = "workerGroup", value = "WORKER_GROUP", dataType = "String", example = "default"),
@ApiImplicitParam(name = "timeout", value = "TIMEOUT", dataType = "Int", example = "100"),
})
@PostMapping(value = "start-process-instance")
@ResponseStatus(HttpStatus.OK)
@ApiException(START_PROCESS_INSTANCE_ERROR)
public Result startProcessInstance(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam(value = "processDefinitionId") int processDefinitionId,
@RequestParam(value = "scheduleTime", required = false) String scheduleTime,
@RequestParam(value = "failureStrategy", required = true) FailureStrategy failureStrategy,
@RequestParam(value = "startNodeList", required = false) String startNodeList,
@RequestParam(value = "taskDependType", required = false) TaskDependType taskDependType,
@RequestParam(value = "execType", required = false) CommandType execType,
@RequestParam(value = "warningType", required = true) WarningType warningType,
@RequestParam(value = "warningGroupId", required = false) int warningGroupId,
@RequestParam(value = "runMode", required = false) RunMode runMode,
@RequestParam(value = "processInstancePriority", required = false) Priority processInstancePriority,
@RequestParam(value = "workerGroup", required = false, defaultValue = "default") String workerGroup,
@RequestParam(value = "timeout", required = false) Integer timeout,
@RequestParam(value = "startParams", required = false) String startParams) throws ParseException {
logger.info("login user {}, start process instance, project name: {}, process definition id: {}, schedule time: {}, "
+ "failure policy: {}, node name: {}, node dep: {}, notify type: {}, "
+ "notify group id: {}, run mode: {},process instance priority:{}, workerGroup: {}, timeout: {}, startParams: {} ",
loginUser.getUserName(), projectName, processDefinitionId, scheduleTime,
failureStrategy, startNodeList, taskDependType, warningType, workerGroup, runMode, processInstancePriority,
workerGroup, timeout, startParams);
if (timeout == null) {
timeout = Constants.MAX_TASK_TIMEOUT;
}
Map<String, String> startParamMap = null;
if (startParams != null) {
startParamMap = JSONUtils.toMap(startParams);
}
Map<String, Object> result = execService.execProcessInstance(loginUser, projectName, processDefinitionId, scheduleTime, execType, failureStrategy,
startNodeList, taskDependType, warningType,
warningGroupId, runMode, processInstancePriority, workerGroup, timeout, startParamMap);
return returnDataList(result);
}
/**
* do action to process instance:pause, stop, repeat, recover from pause, recover from stop
*
* @param loginUser login user
* @param projectName project name
* @param processInstanceId process instance id
* @param executeType execute type
* @return execute result code
*/
@ApiOperation(value = "execute", notes = "EXECUTE_ACTION_TO_PROCESS_INSTANCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processInstanceId", value = "PROCESS_INSTANCE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "executeType", value = "EXECUTE_TYPE", required = true, dataType = "ExecuteType")
})
@PostMapping(value = "/execute")
@ResponseStatus(HttpStatus.OK)
@ApiException(EXECUTE_PROCESS_INSTANCE_ERROR)
public Result execute(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@ApiParam(name = "projectName", value = "PROJECT_NAME", required = true) @PathVariable String projectName,
@RequestParam("processInstanceId") Integer processInstanceId,
@RequestParam("executeType") ExecuteType executeType
) {
logger.info("execute command, login user: {}, project:{}, process instance id:{}, execute type:{}",
loginUser.getUserName(), projectName, processInstanceId, executeType);
Map<String, Object> result = execService.execute(loginUser, projectName, processInstanceId, executeType);
return returnDataList(result);
}
/**
* check process definition and all of the son process definitions is on line.
*
* @param loginUser login user
* @param processDefinitionId process definition id
* @return check result code
*/
@ApiOperation(value = "startCheckProcessDefinition", notes = "START_CHECK_PROCESS_DEFINITION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "processDefinitionId", value = "PROCESS_DEFINITION_ID", required = true, dataType = "Int", example = "100")
})
@PostMapping(value = "/start-check")
@ResponseStatus(HttpStatus.OK)
@ApiException(CHECK_PROCESS_DEFINITION_ERROR)
public Result startCheckProcessDefinition(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "processDefinitionId") int processDefinitionId) {
logger.info("login user {}, check process definition {}", loginUser.getUserName(), processDefinitionId);
Map<String, Object> result = execService.startCheckByProcessDefinedId(processDefinitionId);
return returnDataList(result);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,369 | [Improvement][api] Introduce interface about api module service | **Describe the question**
Introduce interface about api module service for clearly service module.
**What are the current deficiencies and the benefits of improvement**
- A clear and concise service will help contributor or user to ready the code and locate the function.
**Which version of DolphinScheduler:**
-[dev]
**Describe alternatives you've considered**
Introduce service interface
| https://github.com/apache/dolphinscheduler/issues/3369 | https://github.com/apache/dolphinscheduler/pull/4759 | 9ae29a756f0aeed894c80f5e495d786ccf03f41f | 15a5b0588399bbafd201405af02d611d548fac12 | "2020-07-31T13:52:28Z" | java | "2021-02-18T15:27:37Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/controller/ResourcesController.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.controller;
import static org.apache.dolphinscheduler.api.enums.Status.AUTHORIZED_FILE_RESOURCE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.AUTHORIZED_UDF_FUNCTION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.AUTHORIZE_RESOURCE_TREE;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_RESOURCE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_RESOURCE_FILE_ON_LINE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.CREATE_UDF_FUNCTION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_RESOURCE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DELETE_UDF_FUNCTION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.DOWNLOAD_RESOURCE_FILE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.EDIT_RESOURCE_FILE_ON_LINE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_DATASOURCE_BY_TYPE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RESOURCES_LIST_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_RESOURCES_LIST_PAGING;
import static org.apache.dolphinscheduler.api.enums.Status.QUERY_UDF_FUNCTION_LIST_PAGING_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.RESOURCE_FILE_IS_EMPTY;
import static org.apache.dolphinscheduler.api.enums.Status.RESOURCE_NOT_EXIST;
import static org.apache.dolphinscheduler.api.enums.Status.UNAUTHORIZED_UDF_FUNCTION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_RESOURCE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.UPDATE_UDF_FUNCTION_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VERIFY_UDF_FUNCTION_NAME_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VIEW_RESOURCE_FILE_ON_LINE_ERROR;
import static org.apache.dolphinscheduler.api.enums.Status.VIEW_UDF_FUNCTION_ERROR;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ApiException;
import org.apache.dolphinscheduler.api.service.ResourcesService;
import org.apache.dolphinscheduler.api.service.UdfFuncService;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ProgramType;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.UdfType;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.User;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import springfox.documentation.annotations.ApiIgnore;
/**
* resources controller
*/
@Api(tags = "RESOURCES_TAG", position = 1)
@RestController
@RequestMapping("resources")
public class ResourcesController extends BaseController {
private static final Logger logger = LoggerFactory.getLogger(ResourcesController.class);
@Autowired
private ResourcesService resourceService;
@Autowired
private UdfFuncService udfFuncService;
/**
*
* @param loginUser login user
* @param type type
* @param alias alias
* @param description description
* @param pid parent id
* @param currentDir current directory
* @return create result code
*/
@ApiOperation(value = "createDirctory", notes = "CREATE_RESOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"),
@ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"),
@ApiImplicitParam(name = "pid", value = "RESOURCE_PID", required = true, dataType = "Int", example = "10"),
@ApiImplicitParam(name = "currentDir", value = "RESOURCE_CURRENTDIR", required = true, dataType = "String")
})
@PostMapping(value = "/directory/create")
@ApiException(CREATE_RESOURCE_ERROR)
public Result createDirectory(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") ResourceType type,
@RequestParam(value = "name") String alias,
@RequestParam(value = "description", required = false) String description,
@RequestParam(value = "pid") int pid,
@RequestParam(value = "currentDir") String currentDir) {
logger.info("login user {}, create resource, type: {}, resource alias: {}, desc: {}, file: {},{}",
loginUser.getUserName(), type, alias, description, pid, currentDir);
return resourceService.createDirectory(loginUser, alias, description, type, pid, currentDir);
}
/**
* create resource
* @param loginUser
* @param type
* @param alias
* @param description
* @param file
* @param pid
* @param currentDir
* @return create result code
*/
@ApiOperation(value = "createResource", notes = "CREATE_RESOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"),
@ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"),
@ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile"),
@ApiImplicitParam(name = "pid", value = "RESOURCE_PID", required = true, dataType = "Int", example = "10"),
@ApiImplicitParam(name = "currentDir", value = "RESOURCE_CURRENTDIR", required = true, dataType = "String")
})
@PostMapping(value = "/create")
@ApiException(CREATE_RESOURCE_ERROR)
public Result createResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") ResourceType type,
@RequestParam(value = "name") String alias,
@RequestParam(value = "description", required = false) String description,
@RequestParam("file") MultipartFile file,
@RequestParam(value = "pid") int pid,
@RequestParam(value = "currentDir") String currentDir) {
logger.info("login user {}, create resource, type: {}, resource alias: {}, desc: {}, file: {},{}",
loginUser.getUserName(), type, alias, description, file.getName(), file.getOriginalFilename());
return resourceService.createResource(loginUser, alias, description, type, file, pid, currentDir);
}
/**
* update resource
*
* @param loginUser login user
* @param alias alias
* @param resourceId resource id
* @param type resource type
* @param description description
* @param file resource file
* @return update result code
*/
@ApiOperation(value = "updateResource", notes = "UPDATE_RESOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"),
@ApiImplicitParam(name = "name", value = "RESOURCE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"),
@ApiImplicitParam(name = "file", value = "RESOURCE_FILE", required = true, dataType = "MultipartFile")
})
@PostMapping(value = "/update")
@ApiException(UPDATE_RESOURCE_ERROR)
public Result updateResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int resourceId,
@RequestParam(value = "type") ResourceType type,
@RequestParam(value = "name") String alias,
@RequestParam(value = "description", required = false) String description,
@RequestParam(value = "file" ,required = false) MultipartFile file) {
logger.info("login user {}, update resource, type: {}, resource alias: {}, desc: {}, file: {}",
loginUser.getUserName(), type, alias, description, file);
return resourceService.updateResource(loginUser, resourceId, alias, description, type, file);
}
/**
* query resources list
*
* @param loginUser login user
* @param type resource type
* @return resource list
*/
@ApiOperation(value = "queryResourceList", notes = "QUERY_RESOURCE_LIST_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType")
})
@GetMapping(value = "/list")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_RESOURCES_LIST_ERROR)
public Result queryResourceList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") ResourceType type
) {
logger.info("query resource list, login user:{}, resource type:{}", loginUser.getUserName(), type);
Map<String, Object> result = resourceService.queryResourceList(loginUser, type);
return returnDataList(result);
}
/**
* query resources list paging
*
* @param loginUser login user
* @param type resource type
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return resource list page
*/
@ApiOperation(value = "queryResourceListPaging", notes = "QUERY_RESOURCE_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"),
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "int", example = "10"),
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"),
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20")
})
@GetMapping(value = "/list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_RESOURCES_LIST_PAGING)
public Result queryResourceListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") ResourceType type,
@RequestParam(value = "id") int id,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize
) {
logger.info("query resource list, login user:{}, resource type:{}, search value:{}",
loginUser.getUserName(), type, searchVal);
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return returnDataListPaging(result);
}
searchVal = ParameterUtils.handleEscapes(searchVal);
result = resourceService.queryResourceListPaging(loginUser, id, type, searchVal, pageNo, pageSize);
return returnDataListPaging(result);
}
/**
* delete resource
*
* @param loginUser login user
* @param resourceId resource id
* @return delete result code
*/
@ApiOperation(value = "deleteResource", notes = "DELETE_RESOURCE_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_RESOURCE_ERROR)
public Result deleteResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int resourceId
) throws Exception {
logger.info("login user {}, delete resource id: {}",
loginUser.getUserName(), resourceId);
return resourceService.delete(loginUser, resourceId);
}
/**
* verify resource by alias and type
*
* @param loginUser login user
* @param fullName resource full name
* @param type resource type
* @return true if the resource name not exists, otherwise return false
*/
@ApiOperation(value = "verifyResourceName", notes = "VERIFY_RESOURCE_NAME_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"),
@ApiImplicitParam(name = "fullName", value = "RESOURCE_FULL_NAME", required = true, dataType = "String")
})
@GetMapping(value = "/verify-name")
@ResponseStatus(HttpStatus.OK)
@ApiException(VERIFY_RESOURCE_BY_NAME_AND_TYPE_ERROR)
public Result verifyResourceName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "fullName") String fullName,
@RequestParam(value = "type") ResourceType type
) {
logger.info("login user {}, verfiy resource alias: {},resource type: {}",
loginUser.getUserName(), fullName, type);
return resourceService.verifyResourceName(fullName, type, loginUser);
}
/**
* query resources jar list
*
* @param loginUser login user
* @param type resource type
* @return resource list
*/
@ApiOperation(value = "queryResourceByProgramType", notes = "QUERY_RESOURCE_LIST_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType")
})
@GetMapping(value = "/list/jar")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_RESOURCES_LIST_ERROR)
public Result queryResourceJarList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") ResourceType type,
@RequestParam(value = "programType",required = false) ProgramType programType
) {
String programTypeName = programType == null ? "" : programType.name();
String userName = loginUser.getUserName();
userName = userName.replaceAll("[\n|\r|\t]", "_");
logger.info("query resource list, login user:{}, resource type:{}, program type:{}", userName,programTypeName);
Map<String, Object> result = resourceService.queryResourceByProgramType(loginUser, type,programType);
return returnDataList(result);
}
/**
* query resource by full name and type
*
* @param loginUser login user
* @param fullName resource full name
* @param type resource type
* @param id resource id
* @return true if the resource name not exists, otherwise return false
*/
@ApiOperation(value = "queryResource", notes = "QUERY_BY_RESOURCE_NAME")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"),
@ApiImplicitParam(name = "fullName", value = "RESOURCE_FULL_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = false, dataType = "Int", example = "10")
})
@GetMapping(value = "/queryResource")
@ResponseStatus(HttpStatus.OK)
@ApiException(RESOURCE_NOT_EXIST)
public Result queryResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "fullName", required = false) String fullName,
@RequestParam(value = "id", required = false) Integer id,
@RequestParam(value = "type") ResourceType type
) {
logger.info("login user {}, query resource by full name: {} or id: {},resource type: {}",
loginUser.getUserName(), fullName, id, type);
return resourceService.queryResource(fullName, id, type);
}
/**
* view resource file online
*
* @param loginUser login user
* @param resourceId resource id
* @param skipLineNum skip line number
* @param limit limit
* @return resource content
*/
@ApiOperation(value = "viewResource", notes = "VIEW_RESOURCE_BY_ID_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "skipLineNum", value = "SKIP_LINE_NUM", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "limit", value = "LIMIT", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/view")
@ApiException(VIEW_RESOURCE_FILE_ON_LINE_ERROR)
public Result viewResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int resourceId,
@RequestParam(value = "skipLineNum") int skipLineNum,
@RequestParam(value = "limit") int limit
) {
logger.info("login user {}, view resource : {}, skipLineNum {} , limit {}",
loginUser.getUserName(), resourceId, skipLineNum, limit);
return resourceService.readResource(resourceId, skipLineNum, limit);
}
/**
* create resource file online
* @param loginUser
* @param type
* @param fileName
* @param fileSuffix
* @param description
* @param content
* @param pid
* @param currentDir
* @return create result code
*/
@ApiOperation(value = "onlineCreateResource", notes = "ONLINE_CREATE_RESOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "RESOURCE_TYPE", required = true, dataType = "ResourceType"),
@ApiImplicitParam(name = "fileName", value = "RESOURCE_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "suffix", value = "SUFFIX", required = true, dataType = "String"),
@ApiImplicitParam(name = "description", value = "RESOURCE_DESC", dataType = "String"),
@ApiImplicitParam(name = "content", value = "CONTENT", required = true, dataType = "String"),
@ApiImplicitParam(name = "pid", value = "RESOURCE_PID", required = true, dataType = "Int", example = "10"),
@ApiImplicitParam(name = "currentDir", value = "RESOURCE_CURRENTDIR", required = true, dataType = "String")
})
@PostMapping(value = "/online-create")
@ApiException(CREATE_RESOURCE_FILE_ON_LINE_ERROR)
public Result onlineCreateResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") ResourceType type,
@RequestParam(value = "fileName") String fileName,
@RequestParam(value = "suffix") String fileSuffix,
@RequestParam(value = "description", required = false) String description,
@RequestParam(value = "content") String content,
@RequestParam(value = "pid") int pid,
@RequestParam(value = "currentDir") String currentDir
) {
logger.info("login user {}, online create resource! fileName : {}, type : {}, suffix : {},desc : {},content : {}",
loginUser.getUserName(), fileName, type, fileSuffix, description, content, pid, currentDir);
if (StringUtils.isEmpty(content)) {
logger.error("resource file contents are not allowed to be empty");
return error(Status.RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg());
}
return resourceService.onlineCreateResource(loginUser, type, fileName, fileSuffix, description, content, pid, currentDir);
}
/**
* edit resource file online
*
* @param loginUser login user
* @param resourceId resource id
* @param content content
* @return update result code
*/
@ApiOperation(value = "updateResourceContent", notes = "UPDATE_RESOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100"),
@ApiImplicitParam(name = "content", value = "CONTENT", required = true, dataType = "String")
})
@PostMapping(value = "/update-content")
@ApiException(EDIT_RESOURCE_FILE_ON_LINE_ERROR)
public Result updateResourceContent(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int resourceId,
@RequestParam(value = "content") String content
) {
logger.info("login user {}, updateProcessInstance resource : {}",
loginUser.getUserName(), resourceId);
if (StringUtils.isEmpty(content)) {
logger.error("The resource file contents are not allowed to be empty");
return error(Status.RESOURCE_FILE_IS_EMPTY.getCode(), RESOURCE_FILE_IS_EMPTY.getMsg());
}
return resourceService.updateResourceContent(resourceId, content);
}
/**
* download resource file
*
* @param loginUser login user
* @param resourceId resource id
* @return resource content
*/
@ApiOperation(value = "downloadResource", notes = "DOWNLOAD_RESOURCE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/download")
@ResponseBody
@ApiException(DOWNLOAD_RESOURCE_FILE_ERROR)
public ResponseEntity downloadResource(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int resourceId) throws Exception {
logger.info("login user {}, download resource : {}",
loginUser.getUserName(), resourceId);
Resource file = resourceService.downloadResource(resourceId);
if (file == null) {
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(Status.RESOURCE_NOT_EXIST.getMsg());
}
return ResponseEntity
.ok()
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + file.getFilename() + "\"")
.body(file);
}
/**
* create udf function
*
* @param loginUser login user
* @param type udf type
* @param funcName function name
* @param argTypes argument types
* @param database database
* @param description description
* @param className class name
* @param resourceId resource id
* @return create result code
*/
@ApiOperation(value = "createUdfFunc", notes = "CREATE_UDF_FUNCTION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType = "UdfType"),
@ApiImplicitParam(name = "funcName", value = "FUNC_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "suffix", value = "CLASS_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "argTypes", value = "ARG_TYPES", dataType = "String"),
@ApiImplicitParam(name = "database", value = "DATABASE_NAME", dataType = "String"),
@ApiImplicitParam(name = "description", value = "UDF_DESC", dataType = "String"),
@ApiImplicitParam(name = "resourceId", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100")
})
@PostMapping(value = "/udf-func/create")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(CREATE_UDF_FUNCTION_ERROR)
public Result createUdfFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "type") UdfType type,
@RequestParam(value = "funcName") String funcName,
@RequestParam(value = "className") String className,
@RequestParam(value = "argTypes", required = false) String argTypes,
@RequestParam(value = "database", required = false) String database,
@RequestParam(value = "description", required = false) String description,
@RequestParam(value = "resourceId") int resourceId) {
logger.info("login user {}, create udf function, type: {}, funcName: {},argTypes: {} ,database: {},desc: {},resourceId: {}",
loginUser.getUserName(), type, funcName, argTypes, database, description, resourceId);
return udfFuncService.createUdfFunction(loginUser, funcName, className, argTypes, database, description, type, resourceId);
}
/**
* view udf function
*
* @param loginUser login user
* @param id resource id
* @return udf function detail
*/
@ApiOperation(value = "viewUIUdfFunction", notes = "VIEW_UDF_FUNCTION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/udf-func/update-ui")
@ResponseStatus(HttpStatus.OK)
@ApiException(VIEW_UDF_FUNCTION_ERROR)
public Result viewUIUdfFunction(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("id") int id) {
logger.info("login user {}, query udf{}",
loginUser.getUserName(), id);
Map<String, Object> map = udfFuncService.queryUdfFuncDetail(id);
return returnDataList(map);
}
/**
* update udf function
*
* @param loginUser login user
* @param type resource type
* @param funcName function name
* @param argTypes argument types
* @param database data base
* @param description description
* @param resourceId resource id
* @param className class name
* @param udfFuncId udf function id
* @return update result code
*/
@ApiOperation(value = "updateUdfFunc", notes = "UPDATE_UDF_FUNCTION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "UDF_ID", required = true, dataType = "Int"),
@ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType = "UdfType"),
@ApiImplicitParam(name = "funcName", value = "FUNC_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "className", value = "CLASS_NAME", required = true, dataType = "String"),
@ApiImplicitParam(name = "argTypes", value = "ARG_TYPES", dataType = "String"),
@ApiImplicitParam(name = "database", value = "DATABASE_NAME", dataType = "String"),
@ApiImplicitParam(name = "description", value = "UDF_DESC", dataType = "String"),
@ApiImplicitParam(name = "resourceId", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100")
})
@PostMapping(value = "/udf-func/update")
@ApiException(UPDATE_UDF_FUNCTION_ERROR)
public Result updateUdfFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int udfFuncId,
@RequestParam(value = "type") UdfType type,
@RequestParam(value = "funcName") String funcName,
@RequestParam(value = "className") String className,
@RequestParam(value = "argTypes", required = false) String argTypes,
@RequestParam(value = "database", required = false) String database,
@RequestParam(value = "description", required = false) String description,
@RequestParam(value = "resourceId") int resourceId) {
logger.info("login user {}, updateProcessInstance udf function id: {},type: {}, funcName: {},argTypes: {} ,database: {},desc: {},resourceId: {}",
loginUser.getUserName(), udfFuncId, type, funcName, argTypes, database, description, resourceId);
Map<String, Object> result = udfFuncService.updateUdfFunc(udfFuncId, funcName, className, argTypes, database, description, type, resourceId);
return returnDataList(result);
}
/**
* query udf function list paging
*
* @param loginUser login user
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return udf function list page
*/
@ApiOperation(value = "queryUdfFuncListPaging", notes = "QUERY_UDF_FUNCTION_LIST_PAGING_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "searchVal", value = "SEARCH_VAL", dataType = "String"),
@ApiImplicitParam(name = "pageNo", value = "PAGE_NO", dataType = "Int", example = "1"),
@ApiImplicitParam(name = "pageSize", value = "PAGE_SIZE", dataType = "Int", example = "20")
})
@GetMapping(value = "/udf-func/list-paging")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_UDF_FUNCTION_LIST_PAGING_ERROR)
public Result<Object> queryUdfFuncListPaging(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("pageNo") Integer pageNo,
@RequestParam(value = "searchVal", required = false) String searchVal,
@RequestParam("pageSize") Integer pageSize
) {
logger.info("query udf functions list, login user:{},search value:{}",
loginUser.getUserName(), searchVal);
Map<String, Object> result = checkPageParams(pageNo, pageSize);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return returnDataListPaging(result);
}
result = udfFuncService.queryUdfFuncListPaging(loginUser, searchVal, pageNo, pageSize);
return returnDataListPaging(result);
}
/**
* query udf func list by type
*
* @param loginUser login user
* @param type resource type
* @return resource list
*/
@ApiOperation(value = "queryUdfFuncList", notes = "QUERY_UDF_FUNC_LIST_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "type", value = "UDF_TYPE", required = true, dataType = "UdfType")
})
@GetMapping(value = "/udf-func/list")
@ResponseStatus(HttpStatus.OK)
@ApiException(QUERY_DATASOURCE_BY_TYPE_ERROR)
public Result<Object> queryUdfFuncList(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("type") UdfType type) {
String userName = loginUser.getUserName();
userName = userName.replaceAll("[\n|\r|\t]", "_");
logger.info("query udf func list, user:{}, type:{}", userName, type);
Map<String, Object> result = udfFuncService.queryUdfFuncList(loginUser, type.ordinal());
return returnDataList(result);
}
/**
* verify udf function name can use or not
*
* @param loginUser login user
* @param name name
* @return true if the name can user, otherwise return false
*/
@ApiOperation(value = "verifyUdfFuncName", notes = "VERIFY_UDF_FUNCTION_NAME_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "name", value = "FUNC_NAME", required = true, dataType = "String")
})
@GetMapping(value = "/udf-func/verify-name")
@ResponseStatus(HttpStatus.OK)
@ApiException(VERIFY_UDF_FUNCTION_NAME_ERROR)
public Result verifyUdfFuncName(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "name") String name
) {
logger.info("login user {}, verfiy udf function name: {}",
loginUser.getUserName(), name);
return udfFuncService.verifyUdfFuncByName(name);
}
/**
* delete udf function
*
* @param loginUser login user
* @param udfFuncId udf function id
* @return delete result code
*/
@ApiOperation(value = "deleteUdfFunc", notes = "DELETE_UDF_FUNCTION_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "id", value = "RESOURCE_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/udf-func/delete")
@ResponseStatus(HttpStatus.OK)
@ApiException(DELETE_UDF_FUNCTION_ERROR)
public Result deleteUdfFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam(value = "id") int udfFuncId
) {
logger.info("login user {}, delete udf function id: {}", loginUser.getUserName(), udfFuncId);
return udfFuncService.delete(udfFuncId);
}
/**
* authorized file resource list
*
* @param loginUser login user
* @param userId user id
* @return authorized result
*/
@ApiOperation(value = "authorizedFile", notes = "AUTHORIZED_FILE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/authed-file")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(AUTHORIZED_FILE_RESOURCE_ERROR)
public Result authorizedFile(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("userId") Integer userId) {
logger.info("authorized file resource, user: {}, user id:{}", loginUser.getUserName(), userId);
Map<String, Object> result = resourceService.authorizedFile(loginUser, userId);
return returnDataList(result);
}
/**
* unauthorized file resource list
*
* @param loginUser login user
* @param userId user id
* @return unauthorized result code
*/
@ApiOperation(value = "authorizeResourceTree", notes = "AUTHORIZE_RESOURCE_TREE_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/authorize-resource-tree")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(AUTHORIZE_RESOURCE_TREE)
public Result authorizeResourceTree(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("userId") Integer userId) {
logger.info("all resource file, user:{}, user id:{}", loginUser.getUserName(), userId);
Map<String, Object> result = resourceService.authorizeResourceTree(loginUser, userId);
return returnDataList(result);
}
/**
* unauthorized udf function
*
* @param loginUser login user
* @param userId user id
* @return unauthorized result code
*/
@ApiOperation(value = "unauthUDFFunc", notes = "UNAUTHORIZED_UDF_FUNC_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/unauth-udf-func")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(UNAUTHORIZED_UDF_FUNCTION_ERROR)
public Result unauthUDFFunc(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("userId") Integer userId) {
logger.info("unauthorized udf function, login user:{}, unauthorized user id:{}", loginUser.getUserName(), userId);
Map<String, Object> result = resourceService.unauthorizedUDFFunction(loginUser, userId);
return returnDataList(result);
}
/**
* authorized udf function
*
* @param loginUser login user
* @param userId user id
* @return authorized result code
*/
@ApiOperation(value = "authUDFFunc", notes = "AUTHORIZED_UDF_FUNC_NOTES")
@ApiImplicitParams({
@ApiImplicitParam(name = "userId", value = "USER_ID", required = true, dataType = "Int", example = "100")
})
@GetMapping(value = "/authed-udf-func")
@ResponseStatus(HttpStatus.CREATED)
@ApiException(AUTHORIZED_UDF_FUNCTION_ERROR)
public Result authorizedUDFFunction(@ApiIgnore @RequestAttribute(value = Constants.SESSION_USER) User loginUser,
@RequestParam("userId") Integer userId) {
logger.info("auth udf function, login user:{}, auth user id:{}", loginUser.getUserName(), userId);
Map<String, Object> result = resourceService.authorizedUDFFunction(loginUser, userId);
return returnDataList(result);
}
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,369 | [Improvement][api] Introduce interface about api module service | **Describe the question**
Introduce interface about api module service for clearly service module.
**What are the current deficiencies and the benefits of improvement**
- A clear and concise service will help contributor or user to ready the code and locate the function.
**Which version of DolphinScheduler:**
-[dev]
**Describe alternatives you've considered**
Introduce service interface
| https://github.com/apache/dolphinscheduler/issues/3369 | https://github.com/apache/dolphinscheduler/pull/4759 | 9ae29a756f0aeed894c80f5e495d786ccf03f41f | 15a5b0588399bbafd201405af02d611d548fac12 | "2020-07-31T13:52:28Z" | java | "2021-02-18T15:27:37Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataSourceService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbConnectType;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.dao.datasource.OracleDataSource;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* datasource service
*/
@Service
public class DataSourceService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(DataSourceService.class);
public static final String NAME = "name";
public static final String NOTE = "note";
public static final String TYPE = "type";
public static final String HOST = "host";
public static final String PORT = "port";
public static final String PRINCIPAL = "principal";
public static final String DATABASE = "database";
public static final String USER_NAME = "userName";
public static final String OTHER = "other";
@Autowired
private DataSourceMapper dataSourceMapper;
@Autowired
private DataSourceUserMapper datasourceUserMapper;
/**
* create data source
*
* @param loginUser login user
* @param name data source name
* @param desc data source description
* @param type data source type
* @param parameter datasource parameters
* @return create result code
*/
public Result<Object> createDataSource(User loginUser, String name, String desc, DbType type, String parameter) {
Result<Object> result = new Result<>();
// check name can use or not
if (checkName(name)) {
putMsg(result, Status.DATASOURCE_EXIST);
return result;
}
Result<Object> isConnection = checkConnection(type, parameter);
if (Status.SUCCESS.getCode() != isConnection.getCode()) {
return result;
}
// build datasource
DataSource dataSource = new DataSource();
Date now = new Date();
dataSource.setName(name.trim());
dataSource.setNote(desc);
dataSource.setUserId(loginUser.getId());
dataSource.setUserName(loginUser.getUserName());
dataSource.setType(type);
dataSource.setConnectionParams(parameter);
dataSource.setCreateTime(now);
dataSource.setUpdateTime(now);
dataSourceMapper.insert(dataSource);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* updateProcessInstance datasource
*
* @param loginUser login user
* @param name data source name
* @param desc data source description
* @param type data source type
* @param parameter datasource parameters
* @param id data source id
* @return update result code
*/
public Result<Object> updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter) {
Result<Object> result = new Result<>();
// determine whether the data source exists
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, dataSource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
//check name can use or not
if (!name.trim().equals(dataSource.getName()) && checkName(name)) {
putMsg(result, Status.DATASOURCE_EXIST);
return result;
}
//check password,if the password is not updated, set to the old password.
ObjectNode paramObject = JSONUtils.parseObject(parameter);
String password = paramObject.path(Constants.PASSWORD).asText();
if (StringUtils.isBlank(password)) {
String oldConnectionParams = dataSource.getConnectionParams();
ObjectNode oldParams = JSONUtils.parseObject(oldConnectionParams);
paramObject.put(Constants.PASSWORD, oldParams.path(Constants.PASSWORD).asText());
}
// connectionParams json
String connectionParams = paramObject.toString();
Result<Object> isConnection = checkConnection(type, parameter);
if (Status.SUCCESS.getCode() != isConnection.getCode()) {
return result;
}
Date now = new Date();
dataSource.setName(name.trim());
dataSource.setNote(desc);
dataSource.setUserName(loginUser.getUserName());
dataSource.setType(type);
dataSource.setConnectionParams(connectionParams);
dataSource.setUpdateTime(now);
dataSourceMapper.updateById(dataSource);
putMsg(result, Status.SUCCESS);
return result;
}
private boolean checkName(String name) {
List<DataSource> queryDataSource = dataSourceMapper.queryDataSourceByName(name.trim());
return queryDataSource != null && queryDataSource.size() > 0;
}
/**
* updateProcessInstance datasource
*
* @param id datasource id
* @return data source detail
*/
public Map<String, Object> queryDataSource(int id) {
Map<String, Object> result = new HashMap<String, Object>(5);
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
// type
String dataSourceType = dataSource.getType().toString();
// name
String dataSourceName = dataSource.getName();
// desc
String desc = dataSource.getNote();
// parameter
String parameter = dataSource.getConnectionParams();
BaseDataSource datasourceForm = DataSourceFactory.getDatasource(dataSource.getType(), parameter);
DbConnectType connectType = null;
String hostSeperator = Constants.DOUBLE_SLASH;
if (DbType.ORACLE.equals(dataSource.getType())) {
connectType = ((OracleDataSource) datasourceForm).getConnectType();
if (DbConnectType.ORACLE_SID.equals(connectType)) {
hostSeperator = Constants.AT_SIGN;
}
}
String database = datasourceForm.getDatabase();
// jdbc connection params
String other = datasourceForm.getOther();
String address = datasourceForm.getAddress();
String[] hostsPorts = getHostsAndPort(address, hostSeperator);
// ip host
String host = hostsPorts[0];
// prot
String port = hostsPorts[1];
String separator = "";
switch (dataSource.getType()) {
case HIVE:
case SQLSERVER:
separator = ";";
break;
case MYSQL:
case POSTGRESQL:
case CLICKHOUSE:
case ORACLE:
case PRESTO:
separator = "&";
break;
default:
separator = "&";
break;
}
Map<String, String> otherMap = new LinkedHashMap<String, String>();
if (other != null) {
String[] configs = other.split(separator);
for (String config : configs) {
otherMap.put(config.split("=")[0], config.split("=")[1]);
}
}
Map<String, Object> map = new HashMap<>(10);
map.put(NAME, dataSourceName);
map.put(NOTE, desc);
map.put(TYPE, dataSourceType);
if (connectType != null) {
map.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType);
}
map.put(HOST, host);
map.put(PORT, port);
map.put(PRINCIPAL, datasourceForm.getPrincipal());
map.put(Constants.KERBEROS_KRB5_CONF_PATH, datasourceForm.getJavaSecurityKrb5Conf());
map.put(Constants.KERBEROS_KEY_TAB_USERNAME, datasourceForm.getLoginUserKeytabUsername());
map.put(Constants.KERBEROS_KEY_TAB_PATH, datasourceForm.getLoginUserKeytabPath());
map.put(DATABASE, database);
map.put(USER_NAME, datasourceForm.getUser());
map.put(OTHER, otherMap);
result.put(Constants.DATA_LIST, map);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* query datasource list by keyword
*
* @param loginUser login user
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @return data source list page
*/
public Map<String, Object> queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) {
Map<String, Object> result = new HashMap<>();
IPage<DataSource> dataSourceList = null;
Page<DataSource> dataSourcePage = new Page(pageNo, pageSize);
if (isAdmin(loginUser)) {
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal);
} else {
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal);
}
List<DataSource> dataSources = dataSourceList != null ? dataSourceList.getRecords() : new ArrayList<>();
handlePasswd(dataSources);
PageInfo pageInfo = new PageInfo<Resource>(pageNo, pageSize);
pageInfo.setTotalCount((int) (dataSourceList != null ? dataSourceList.getTotal() : 0L));
pageInfo.setLists(dataSources);
result.put(Constants.DATA_LIST, pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* handle datasource connection password for safety
*
* @param dataSourceList
*/
private void handlePasswd(List<DataSource> dataSourceList) {
for (DataSource dataSource : dataSourceList) {
String connectionParams = dataSource.getConnectionParams();
ObjectNode object = JSONUtils.parseObject(connectionParams);
object.put(Constants.PASSWORD, Constants.XXXXXX);
dataSource.setConnectionParams(object.toString());
}
}
/**
* query data resource list
*
* @param loginUser login user
* @param type data source type
* @return data source list page
*/
public Map<String, Object> queryDataSourceList(User loginUser, Integer type) {
Map<String, Object> result = new HashMap<>();
List<DataSource> datasourceList;
if (isAdmin(loginUser)) {
datasourceList = dataSourceMapper.listAllDataSourceByType(type);
} else {
datasourceList = dataSourceMapper.queryDataSourceByType(loginUser.getId(), type);
}
result.put(Constants.DATA_LIST, datasourceList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* verify datasource exists
*
* @param name datasource name
* @return true if data datasource not exists, otherwise return false
*/
public Result<Object> verifyDataSourceName(String name) {
Result<Object> result = new Result<>();
List<DataSource> dataSourceList = dataSourceMapper.queryDataSourceByName(name);
if (dataSourceList != null && dataSourceList.size() > 0) {
logger.error("datasource name:{} has exist, can't create again.", name);
putMsg(result, Status.DATASOURCE_EXIST);
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* check connection
*
* @param type data source type
* @param parameter data source parameters
* @return true if connect successfully, otherwise false
*/
public Result<Object> checkConnection(DbType type, String parameter) {
Result<Object> result = new Result<>();
BaseDataSource datasource = DataSourceFactory.getDatasource(type, parameter);
if (datasource == null) {
putMsg(result, Status.DATASOURCE_TYPE_NOT_EXIST, type);
return result;
}
try (Connection connection = datasource.getConnection()) {
if (connection == null) {
putMsg(result, Status.CONNECTION_TEST_FAILURE);
return result;
}
putMsg(result, Status.SUCCESS);
return result;
} catch (Exception e) {
logger.error("datasource test connection error, dbType:{}, jdbcUrl:{}, message:{}.", type, datasource.getJdbcUrl(), e.getMessage());
return new Result<>(Status.CONNECTION_TEST_FAILURE.getCode(),e.getMessage());
}
}
/**
* test connection
*
* @param id datasource id
* @return connect result code
*/
public Result<Object> connectionTest(int id) {
DataSource dataSource = dataSourceMapper.selectById(id);
if (dataSource == null) {
Result<Object> result = new Result<>();
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
return checkConnection(dataSource.getType(), dataSource.getConnectionParams());
}
/**
* build paramters
*
* @param type data source type
* @param host data source host
* @param port data source port
* @param database data source database name
* @param userName user name
* @param password password
* @param other other parameters
* @param principal principal
* @return datasource parameter
*/
public String buildParameter(DbType type, String host,
String port, String database, String principal, String userName,
String password, DbConnectType connectType, String other,
String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath) {
String address = buildAddress(type, host, port, connectType);
Map<String, Object> parameterMap = new LinkedHashMap<String, Object>(6);
String jdbcUrl;
if (DbType.SQLSERVER == type) {
jdbcUrl = address + ";databaseName=" + database;
} else {
jdbcUrl = address + "/" + database;
}
if (Constants.ORACLE.equals(type.name())) {
parameterMap.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType);
}
if (CommonUtils.getKerberosStartupState()
&& (type == DbType.HIVE || type == DbType.SPARK)) {
jdbcUrl += ";principal=" + principal;
}
String separator = "";
if (Constants.MYSQL.equals(type.name())
|| Constants.POSTGRESQL.equals(type.name())
|| Constants.CLICKHOUSE.equals(type.name())
|| Constants.ORACLE.equals(type.name())
|| Constants.PRESTO.equals(type.name())) {
separator = "&";
} else if (Constants.HIVE.equals(type.name())
|| Constants.SPARK.equals(type.name())
|| Constants.DB2.equals(type.name())
|| Constants.SQLSERVER.equals(type.name())) {
separator = ";";
}
parameterMap.put(TYPE, connectType);
parameterMap.put(Constants.ADDRESS, address);
parameterMap.put(Constants.DATABASE, database);
parameterMap.put(Constants.JDBC_URL, jdbcUrl);
parameterMap.put(Constants.USER, userName);
parameterMap.put(Constants.PASSWORD, CommonUtils.encodePassword(password));
if (CommonUtils.getKerberosStartupState()
&& (type == DbType.HIVE || type == DbType.SPARK)) {
parameterMap.put(Constants.PRINCIPAL, principal);
parameterMap.put(Constants.KERBEROS_KRB5_CONF_PATH, javaSecurityKrb5Conf);
parameterMap.put(Constants.KERBEROS_KEY_TAB_USERNAME, loginUserKeytabUsername);
parameterMap.put(Constants.KERBEROS_KEY_TAB_PATH, loginUserKeytabPath);
}
Map<String, String> map = JSONUtils.toMap(other);
if (map != null) {
StringBuilder otherSb = new StringBuilder();
for (Map.Entry<String, String> entry: map.entrySet()) {
otherSb.append(String.format("%s=%s%s", entry.getKey(), entry.getValue(), separator));
}
if (!Constants.DB2.equals(type.name())) {
otherSb.deleteCharAt(otherSb.length() - 1);
}
parameterMap.put(Constants.OTHER, otherSb);
}
if (logger.isDebugEnabled()) {
logger.info("parameters map:{}", JSONUtils.toJsonString(parameterMap));
}
return JSONUtils.toJsonString(parameterMap);
}
private String buildAddress(DbType type, String host, String port, DbConnectType connectType) {
StringBuilder sb = new StringBuilder();
if (Constants.MYSQL.equals(type.name())) {
sb.append(Constants.JDBC_MYSQL);
sb.append(host).append(":").append(port);
} else if (Constants.POSTGRESQL.equals(type.name())) {
sb.append(Constants.JDBC_POSTGRESQL);
sb.append(host).append(":").append(port);
} else if (Constants.HIVE.equals(type.name()) || Constants.SPARK.equals(type.name())) {
sb.append(Constants.JDBC_HIVE_2);
String[] hostArray = host.split(",");
if (hostArray.length > 0) {
for (String zkHost : hostArray) {
sb.append(String.format("%s:%s,", zkHost, port));
}
sb.deleteCharAt(sb.length() - 1);
}
} else if (Constants.CLICKHOUSE.equals(type.name())) {
sb.append(Constants.JDBC_CLICKHOUSE);
sb.append(host).append(":").append(port);
} else if (Constants.ORACLE.equals(type.name())) {
if (connectType == DbConnectType.ORACLE_SID) {
sb.append(Constants.JDBC_ORACLE_SID);
} else {
sb.append(Constants.JDBC_ORACLE_SERVICE_NAME);
}
sb.append(host).append(":").append(port);
} else if (Constants.SQLSERVER.equals(type.name())) {
sb.append(Constants.JDBC_SQLSERVER);
sb.append(host).append(":").append(port);
} else if (Constants.DB2.equals(type.name())) {
sb.append(Constants.JDBC_DB2);
sb.append(host).append(":").append(port);
} else if (Constants.PRESTO.equals(type.name())) {
sb.append(Constants.JDBC_PRESTO);
sb.append(host).append(":").append(port);
}
return sb.toString();
}
/**
* delete datasource
*
* @param loginUser login user
* @param datasourceId data source id
* @return delete result code
*/
@Transactional(rollbackFor = RuntimeException.class)
public Result<Object> delete(User loginUser, int datasourceId) {
Result<Object> result = new Result<>();
try {
//query datasource by id
DataSource dataSource = dataSourceMapper.selectById(datasourceId);
if (dataSource == null) {
logger.error("resource id {} not exist", datasourceId);
putMsg(result, Status.RESOURCE_NOT_EXIST);
return result;
}
if (!hasPerm(loginUser, dataSource.getUserId())) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
dataSourceMapper.deleteById(datasourceId);
datasourceUserMapper.deleteByDatasourceId(datasourceId);
putMsg(result, Status.SUCCESS);
} catch (Exception e) {
logger.error("delete datasource error", e);
throw new RuntimeException("delete datasource error");
}
return result;
}
/**
* unauthorized datasource
*
* @param loginUser login user
* @param userId user id
* @return unauthed data source result code
*/
public Map<String, Object> unauthDatasource(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
//only admin operate
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
/**
* query all data sources except userId
*/
List<DataSource> resultList = new ArrayList<>();
List<DataSource> datasourceList = dataSourceMapper.queryDatasourceExceptUserId(userId);
Set<DataSource> datasourceSet = null;
if (datasourceList != null && datasourceList.size() > 0) {
datasourceSet = new HashSet<>(datasourceList);
List<DataSource> authedDataSourceList = dataSourceMapper.queryAuthedDatasource(userId);
Set<DataSource> authedDataSourceSet = null;
if (authedDataSourceList != null && authedDataSourceList.size() > 0) {
authedDataSourceSet = new HashSet<>(authedDataSourceList);
datasourceSet.removeAll(authedDataSourceSet);
}
resultList = new ArrayList<>(datasourceSet);
}
result.put(Constants.DATA_LIST, resultList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* authorized datasource
*
* @param loginUser login user
* @param userId user id
* @return authorized result code
*/
public Map<String, Object> authedDatasource(User loginUser, Integer userId) {
Map<String, Object> result = new HashMap<>();
if (!isAdmin(loginUser)) {
putMsg(result, Status.USER_NO_OPERATION_PERM);
return result;
}
List<DataSource> authedDatasourceList = dataSourceMapper.queryAuthedDatasource(userId);
result.put(Constants.DATA_LIST, authedDatasourceList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* get host and port by address
*
* @param address address
* @return sting array: [host,port]
*/
private String[] getHostsAndPort(String address) {
return getHostsAndPort(address, Constants.DOUBLE_SLASH);
}
/**
* get host and port by address
*
* @param address address
* @param separator separator
* @return sting array: [host,port]
*/
private String[] getHostsAndPort(String address, String separator) {
String[] result = new String[2];
String[] tmpArray = address.split(separator);
String hostsAndPorts = tmpArray[tmpArray.length - 1];
StringBuilder hosts = new StringBuilder();
String[] hostPortArray = hostsAndPorts.split(Constants.COMMA);
String port = hostPortArray[0].split(Constants.COLON)[1];
for (String hostPort : hostPortArray) {
hosts.append(hostPort.split(Constants.COLON)[0]).append(Constants.COMMA);
}
hosts.deleteCharAt(hosts.length() - 1);
result[0] = hosts.toString();
result[1] = port;
return result;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,369 | [Improvement][api] Introduce interface about api module service | **Describe the question**
Introduce interface about api module service for clearly service module.
**What are the current deficiencies and the benefits of improvement**
- A clear and concise service will help contributor or user to ready the code and locate the function.
**Which version of DolphinScheduler:**
-[dev]
**Describe alternatives you've considered**
Introduce service interface
| https://github.com/apache/dolphinscheduler/issues/3369 | https://github.com/apache/dolphinscheduler/pull/4759 | 9ae29a756f0aeed894c80f5e495d786ccf03f41f | 15a5b0588399bbafd201405af02d611d548fac12 | "2020-07-31T13:52:28Z" | java | "2021-02-18T15:27:37Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/ExecutorService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_PARAMS;
import static org.apache.dolphinscheduler.common.Constants.MAX_TASK_TIMEOUT;
import org.apache.dolphinscheduler.api.enums.ExecuteType;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.RunMode;
import org.apache.dolphinscheduler.common.enums.TaskDependType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Command;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* executor service
*/
@Service
public class ExecutorService extends BaseService {
private static final Logger logger = LoggerFactory.getLogger(ExecutorService.class);
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectService projectService;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private MonitorService monitorService;
@Autowired
private ProcessInstanceMapper processInstanceMapper;
@Autowired
private ProcessService processService;
/**
* execute process instance
*
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process Definition Id
* @param cronTime cron time
* @param commandType command type
* @param failureStrategy failuer strategy
* @param startNodeList start nodelist
* @param taskDependType node dependency type
* @param warningType warning type
* @param warningGroupId notify group id
* @param processInstancePriority process instance priority
* @param workerGroup worker group name
* @param runMode run mode
* @param timeout timeout
* @param startParams the global param values which pass to new process instance
* @return execute process instance code
* @throws ParseException Parse Exception
*/
public Map<String, Object> execProcessInstance(User loginUser, String projectName,
int processDefinitionId, String cronTime, CommandType commandType,
FailureStrategy failureStrategy, String startNodeList,
TaskDependType taskDependType, WarningType warningType, int warningGroupId,
RunMode runMode,
Priority processInstancePriority, String workerGroup, Integer timeout,
Map<String, String> startParams) throws ParseException {
Map<String, Object> result = new HashMap<>();
// timeout is invalid
if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) {
putMsg(result, Status.TASK_TIMEOUT_PARAMS_ERROR);
return result;
}
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResultAndAuth = checkResultAndAuth(loginUser, projectName, project);
if (checkResultAndAuth != null) {
return checkResultAndAuth;
}
// check process define release state
ProcessDefinition processDefinition = processDefinitionMapper.selectById(processDefinitionId);
result = checkProcessDefinitionValid(processDefinition, processDefinitionId);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
if (!checkTenantSuitable(processDefinition)) {
logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ",
processDefinition.getId(), processDefinition.getName());
putMsg(result, Status.TENANT_NOT_SUITABLE);
return result;
}
// check master exists
if (!checkMasterExists(result)) {
return result;
}
/**
* create command
*/
int create = this.createCommand(commandType, processDefinitionId,
taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(),
warningGroupId, runMode, processInstancePriority, workerGroup, startParams);
if (create > 0) {
processDefinition.setWarningGroupId(warningGroupId);
processDefinitionMapper.updateById(processDefinition);
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.START_PROCESS_INSTANCE_ERROR);
}
return result;
}
/**
* check whether master exists
*
* @param result result
* @return master exists return true , otherwise return false
*/
private boolean checkMasterExists(Map<String, Object> result) {
// check master server exists
List<Server> masterServers = monitorService.getServerListFromZK(true);
// no master
if (masterServers.size() == 0) {
putMsg(result, Status.MASTER_NOT_EXISTS);
return false;
}
return true;
}
/**
* check whether the process definition can be executed
*
* @param processDefinition process definition
* @param processDefineId process definition id
* @return check result code
*/
public Map<String, Object> checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId) {
Map<String, Object> result = new HashMap<>();
if (processDefinition == null) {
// check process definition exists
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId);
} else if (processDefinition.getReleaseState() != ReleaseState.ONLINE) {
// check process definition online
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefineId);
} else {
result.put(Constants.STATUS, Status.SUCCESS);
}
return result;
}
/**
* do action to process instance:pause, stop, repeat, recover from pause, recover from stop
*
* @param loginUser login user
* @param projectName project name
* @param processInstanceId process instance id
* @param executeType execute type
* @return execute result code
*/
public Map<String, Object> execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = checkResultAndAuth(loginUser, projectName, project);
if (checkResult != null) {
return checkResult;
}
// check master exists
if (!checkMasterExists(result)) {
return result;
}
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId);
if (processInstance == null) {
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId);
return result;
}
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId());
if (executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE) {
result = checkProcessDefinitionValid(processDefinition, processInstance.getProcessDefinitionId());
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
}
checkResult = checkExecuteType(processInstance, executeType);
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
if (!checkTenantSuitable(processDefinition)) {
logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ",
processDefinition.getId(), processDefinition.getName());
putMsg(result, Status.TENANT_NOT_SUITABLE);
}
switch (executeType) {
case REPEAT_RUNNING:
result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.REPEAT_RUNNING);
break;
case RECOVER_SUSPENDED_PROCESS:
result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.RECOVER_SUSPENDED_PROCESS);
break;
case START_FAILURE_TASK_PROCESS:
result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.START_FAILURE_TASK_PROCESS);
break;
case STOP:
if (processInstance.getState() == ExecutionStatus.READY_STOP) {
putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState());
} else {
result = updateProcessInstancePrepare(processInstance, CommandType.STOP, ExecutionStatus.READY_STOP);
}
break;
case PAUSE:
if (processInstance.getState() == ExecutionStatus.READY_PAUSE) {
putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState());
} else {
result = updateProcessInstancePrepare(processInstance, CommandType.PAUSE, ExecutionStatus.READY_PAUSE);
}
break;
default:
logger.error("unknown execute type : {}", executeType);
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "unknown execute type");
break;
}
return result;
}
/**
* check tenant suitable
*
* @param processDefinition process definition
* @return true if tenant suitable, otherwise return false
*/
private boolean checkTenantSuitable(ProcessDefinition processDefinition) {
// checkTenantExists();
Tenant tenant = processService.getTenantForProcess(processDefinition.getTenantId(),
processDefinition.getUserId());
return tenant != null;
}
/**
* Check the state of process instance and the type of operation match
*
* @param processInstance process instance
* @param executeType execute type
* @return check result code
*/
private Map<String, Object> checkExecuteType(ProcessInstance processInstance, ExecuteType executeType) {
Map<String, Object> result = new HashMap<>();
ExecutionStatus executionStatus = processInstance.getState();
boolean checkResult = false;
switch (executeType) {
case PAUSE:
case STOP:
if (executionStatus.typeIsRunning()) {
checkResult = true;
}
break;
case REPEAT_RUNNING:
if (executionStatus.typeIsFinished()) {
checkResult = true;
}
break;
case START_FAILURE_TASK_PROCESS:
if (executionStatus.typeIsFailure()) {
checkResult = true;
}
break;
case RECOVER_SUSPENDED_PROCESS:
if (executionStatus.typeIsPause() || executionStatus.typeIsCancel()) {
checkResult = true;
}
break;
default:
break;
}
if (!checkResult) {
putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), executionStatus.toString(), executeType.toString());
} else {
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* prepare to update process instance command type and status
*
* @param processInstance process instance
* @param commandType command type
* @param executionStatus execute status
* @return update result
*/
private Map<String, Object> updateProcessInstancePrepare(ProcessInstance processInstance, CommandType commandType, ExecutionStatus executionStatus) {
Map<String, Object> result = new HashMap<>();
processInstance.setCommandType(commandType);
processInstance.addHistoryCmd(commandType);
processInstance.setState(executionStatus);
int update = processService.updateProcessInstance(processInstance);
// determine whether the process is normal
if (update > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR);
}
return result;
}
/**
* insert command, used in the implementation of the page, re run, recovery (pause / failure) execution
*
* @param loginUser login user
* @param instanceId instance id
* @param processDefinitionId process definition id
* @param commandType command type
* @return insert result code
*/
private Map<String, Object> insertCommand(User loginUser, Integer instanceId, Integer processDefinitionId, CommandType commandType) {
Map<String, Object> result = new HashMap<>();
Command command = new Command();
command.setCommandType(commandType);
command.setProcessDefinitionId(processDefinitionId);
command.setCommandParam(String.format("{\"%s\":%d}",
CMD_PARAM_RECOVER_PROCESS_ID_STRING, instanceId));
command.setExecutorId(loginUser.getId());
if (!processService.verifyIsNeedCreateCommand(command)) {
putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND, processDefinitionId);
return result;
}
int create = processService.createCommand(command);
if (create > 0) {
putMsg(result, Status.SUCCESS);
} else {
putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR);
}
return result;
}
/**
* check if sub processes are offline before starting process definition
*
* @param processDefineId process definition id
* @return check result code
*/
public Map<String, Object> startCheckByProcessDefinedId(int processDefineId) {
Map<String, Object> result = new HashMap<>();
if (processDefineId == 0) {
logger.error("process definition id is null");
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "process definition id");
}
List<Integer> ids = new ArrayList<>();
processService.recurseFindSubProcessId(processDefineId, ids);
Integer[] idArray = ids.toArray(new Integer[ids.size()]);
if (!ids.isEmpty()) {
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray);
if (processDefinitionList != null) {
for (ProcessDefinition processDefinition : processDefinitionList) {
/**
* if there is no online process, exit directly
*/
if (processDefinition.getReleaseState() != ReleaseState.ONLINE) {
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName());
logger.info("not release process definition id: {} , name : {}",
processDefinition.getId(), processDefinition.getName());
return result;
}
}
}
}
putMsg(result, Status.SUCCESS);
return result;
}
/**
* create command
*
* @param commandType commandType
* @param processDefineId processDefineId
* @param nodeDep nodeDep
* @param failureStrategy failureStrategy
* @param startNodeList startNodeList
* @param schedule schedule
* @param warningType warningType
* @param executorId executorId
* @param warningGroupId warningGroupId
* @param runMode runMode
* @param processInstancePriority processInstancePriority
* @param workerGroup workerGroup
* @return command id
*/
private int createCommand(CommandType commandType, int processDefineId,
TaskDependType nodeDep, FailureStrategy failureStrategy,
String startNodeList, String schedule, WarningType warningType,
int executorId, int warningGroupId,
RunMode runMode, Priority processInstancePriority, String workerGroup,
Map<String, String> startParams) throws ParseException {
/**
* instantiate command schedule instance
*/
Command command = new Command();
Map<String, String> cmdParam = new HashMap<>();
if (commandType == null) {
command.setCommandType(CommandType.START_PROCESS);
} else {
command.setCommandType(commandType);
}
command.setProcessDefinitionId(processDefineId);
if (nodeDep != null) {
command.setTaskDependType(nodeDep);
}
if (failureStrategy != null) {
command.setFailureStrategy(failureStrategy);
}
if (StringUtils.isNotEmpty(startNodeList)) {
cmdParam.put(CMD_PARAM_START_NODE_NAMES, startNodeList);
}
if (warningType != null) {
command.setWarningType(warningType);
}
if (startParams != null && startParams.size() > 0) {
cmdParam.put(CMD_PARAM_START_PARAMS, JSONUtils.toJsonString(startParams));
}
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
command.setExecutorId(executorId);
command.setWarningGroupId(warningGroupId);
command.setProcessInstancePriority(processInstancePriority);
command.setWorkerGroup(workerGroup);
Date start = null;
Date end = null;
if (StringUtils.isNotEmpty(schedule)) {
String[] interval = schedule.split(",");
if (interval.length == 2) {
start = DateUtils.getScheduleDate(interval[0]);
end = DateUtils.getScheduleDate(interval[1]);
}
}
// determine whether to complement
if (commandType == CommandType.COMPLEMENT_DATA) {
runMode = (runMode == null) ? RunMode.RUN_MODE_SERIAL : runMode;
if (null != start && null != end && !start.after(end)) {
if (runMode == RunMode.RUN_MODE_SERIAL) {
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start));
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end));
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
return processService.createCommand(command);
} else if (runMode == RunMode.RUN_MODE_PARALLEL) {
List<Schedule> schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefineId);
List<Date> listDate = new LinkedList<>();
if (!CollectionUtils.isEmpty(schedules)) {
for (Schedule item : schedules) {
listDate.addAll(CronUtils.getSelfFireDateList(start, end, item.getCrontab()));
}
}
if (!CollectionUtils.isEmpty(listDate)) {
// loop by schedule date
for (Date date : listDate) {
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(date));
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(date));
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
processService.createCommand(command);
}
return listDate.size();
} else {
// loop by day
int runCunt = 0;
while (!start.after(end)) {
runCunt += 1;
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start));
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(start));
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
processService.createCommand(command);
start = DateUtils.getSomeDay(start, 1);
}
return runCunt;
}
}
} else {
logger.error("there is not valid schedule date for the process definition: id:{},date:{}",
processDefineId, schedule);
}
} else {
command.setCommandParam(JSONUtils.toJsonString(cmdParam));
return processService.createCommand(command);
}
return 0;
}
/**
* check result and auth
*/
private Map<String, Object> checkResultAndAuth(User loginUser, String projectName, Project project) {
// check project auth
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
Status status = (Status) checkResult.get(Constants.STATUS);
if (status != Status.SUCCESS) {
return checkResult;
}
return null;
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,369 | [Improvement][api] Introduce interface about api module service | **Describe the question**
Introduce interface about api module service for clearly service module.
**What are the current deficiencies and the benefits of improvement**
- A clear and concise service will help contributor or user to ready the code and locate the function.
**Which version of DolphinScheduler:**
-[dev]
**Describe alternatives you've considered**
Introduce service interface
| https://github.com/apache/dolphinscheduler/issues/3369 | https://github.com/apache/dolphinscheduler/pull/4759 | 9ae29a756f0aeed894c80f5e495d786ccf03f41f | 15a5b0588399bbafd201405af02d611d548fac12 | "2020-07-31T13:52:28Z" | java | "2021-02-18T15:27:37Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,369 | [Improvement][api] Introduce interface about api module service | **Describe the question**
Introduce interface about api module service for clearly service module.
**What are the current deficiencies and the benefits of improvement**
- A clear and concise service will help contributor or user to ready the code and locate the function.
**Which version of DolphinScheduler:**
-[dev]
**Describe alternatives you've considered**
Introduce service interface
| https://github.com/apache/dolphinscheduler/issues/3369 | https://github.com/apache/dolphinscheduler/pull/4759 | 9ae29a756f0aeed894c80f5e495d786ccf03f41f | 15a5b0588399bbafd201405af02d611d548fac12 | "2020-07-31T13:52:28Z" | java | "2021-02-18T15:27:37Z" | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ExecutorServiceImpl.java | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,369 | [Improvement][api] Introduce interface about api module service | **Describe the question**
Introduce interface about api module service for clearly service module.
**What are the current deficiencies and the benefits of improvement**
- A clear and concise service will help contributor or user to ready the code and locate the function.
**Which version of DolphinScheduler:**
-[dev]
**Describe alternatives you've considered**
Introduce service interface
| https://github.com/apache/dolphinscheduler/issues/3369 | https://github.com/apache/dolphinscheduler/pull/4759 | 9ae29a756f0aeed894c80f5e495d786ccf03f41f | 15a5b0588399bbafd201405af02d611d548fac12 | "2020-07-31T13:52:28Z" | java | "2021-02-18T15:27:37Z" | dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataSourceServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.DbConnectType;
import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.PropertyUtils;
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource;
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory;
import org.apache.dolphinscheduler.dao.datasource.MySQLDataSource;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
@PowerMockIgnore({"sun.security.*", "javax.net.*"})
@PrepareForTest({DataSourceFactory.class, CommonUtils.class})
public class DataSourceServiceTest {
@InjectMocks
private DataSourceService dataSourceService;
@Mock
private DataSourceMapper dataSourceMapper;
@Mock
private DataSourceUserMapper datasourceUserMapper;
public void createDataSourceTest() {
User loginUser = getAdminUser();
String dataSourceName = "dataSource01";
String dataSourceDesc = "test dataSource";
DbType dataSourceType = DbType.POSTGRESQL;
String parameter = dataSourceService.buildParameter(dataSourceType, "172.16.133.200", "5432", "dolphinscheduler", null, "postgres", "", null, null, null, null, null);
// data source exits
List<DataSource> dataSourceList = new ArrayList<>();
DataSource dataSource = new DataSource();
dataSource.setName(dataSourceName);
dataSourceList.add(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(dataSourceList);
Result dataSourceExitsResult = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.DATASOURCE_EXIST.getCode(), dataSourceExitsResult.getCode().intValue());
// data source exits
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null);
Result connectionResult = new Result(Status.DATASOURCE_CONNECT_FAILED.getCode(),Status.DATASOURCE_CONNECT_FAILED.getMsg());
//PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult);
PowerMockito.doReturn(connectionResult).when(dataSourceService).checkConnection(dataSourceType, parameter);
Result connectFailedResult = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED.getCode(), connectFailedResult.getCode().intValue());
// data source exits
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null);
connectionResult = new Result(Status.SUCCESS.getCode(),Status.SUCCESS.getMsg());
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult);
PowerMockito.when(DataSourceFactory.getDatasource(dataSourceType, parameter)).thenReturn(null);
Result notValidError = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getCode(), notValidError.getCode().intValue());
// success
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName.trim())).thenReturn(null);
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult);
PowerMockito.when(DataSourceFactory.getDatasource(dataSourceType, parameter)).thenReturn(JSONUtils.parseObject(parameter, MySQLDataSource.class));
Result success = dataSourceService.createDataSource(loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.SUCCESS.getCode(), success.getCode().intValue());
}
public void updateDataSourceTest() {
User loginUser = getAdminUser();
int dataSourceId = 12;
String dataSourceName = "dataSource01";
String dataSourceDesc = "test dataSource";
DbType dataSourceType = DbType.POSTGRESQL;
String parameter = dataSourceService.buildParameter(dataSourceType, "172.16.133.200", "5432", "dolphinscheduler", null, "postgres", "", null, null, null, null, null);
// data source not exits
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null);
Result resourceNotExits = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getCode(), resourceNotExits.getCode().intValue());
// user no operation perm
DataSource dataSource = new DataSource();
dataSource.setUserId(0);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
Result userNoOperationPerm = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM.getCode(), userNoOperationPerm.getCode().intValue());
// data source name exits
dataSource.setUserId(-1);
List<DataSource> dataSourceList = new ArrayList<>();
dataSourceList.add(dataSource);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(dataSourceList);
Result dataSourceNameExist = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.DATASOURCE_EXIST.getCode(), dataSourceNameExist.getCode().intValue());
// data source connect failed
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(null);
Result connectionResult = new Result(Status.SUCCESS.getCode(),Status.SUCCESS.getMsg());
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult);
Result connectFailed = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.DATASOURCE_CONNECT_FAILED.getCode(), connectFailed.getCode().intValue());
//success
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(null);
connectionResult = new Result(Status.DATASOURCE_CONNECT_FAILED.getCode(),Status.DATASOURCE_CONNECT_FAILED.getMsg());
PowerMockito.when(dataSourceService.checkConnection(dataSourceType, parameter)).thenReturn(connectionResult);
Result success = dataSourceService.updateDataSource(dataSourceId, loginUser, dataSourceName, dataSourceDesc, dataSourceType, parameter);
Assert.assertEquals(Status.SUCCESS.getCode(), success.getCode().intValue());
}
@Test
public void queryDataSourceListPagingTest() {
User loginUser = getAdminUser();
String searchVal = "";
int pageNo = 1;
int pageSize = 10;
Map<String, Object> success = dataSourceService.queryDataSourceListPaging(loginUser, searchVal, pageNo, pageSize);
Assert.assertEquals(Status.SUCCESS, success.get(Constants.STATUS));
}
@Test
public void connectionTest() {
int dataSourceId = -1;
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null);
Result result = dataSourceService.connectionTest(dataSourceId);
Assert.assertEquals(Status.RESOURCE_NOT_EXIST.getCode(),result.getCode().intValue());
}
@Test
public void deleteTest() {
User loginUser = getAdminUser();
int dataSourceId = 1;
Result result = new Result();
//resource not exist
dataSourceService.putMsg(result, Status.RESOURCE_NOT_EXIST);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(null);
Assert.assertEquals(result.getCode(), dataSourceService.delete(loginUser, dataSourceId).getCode());
// user no operation perm
dataSourceService.putMsg(result, Status.USER_NO_OPERATION_PERM);
DataSource dataSource = new DataSource();
dataSource.setUserId(0);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
Assert.assertEquals(result.getCode(), dataSourceService.delete(loginUser, dataSourceId).getCode());
// success
dataSourceService.putMsg(result, Status.SUCCESS);
dataSource.setUserId(-1);
PowerMockito.when(dataSourceMapper.selectById(dataSourceId)).thenReturn(dataSource);
Assert.assertEquals(result.getCode(), dataSourceService.delete(loginUser, dataSourceId).getCode());
}
@Test
public void unauthDatasourceTest() {
User loginUser = getAdminUser();
int userId = -1;
//user no operation perm
Map<String, Object> noOperationPerm = dataSourceService.unauthDatasource(loginUser, userId);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, noOperationPerm.get(Constants.STATUS));
//success
loginUser.setUserType(UserType.ADMIN_USER);
Map<String, Object> success = dataSourceService.unauthDatasource(loginUser, userId);
Assert.assertEquals(Status.SUCCESS, success.get(Constants.STATUS));
}
@Test
public void authedDatasourceTest() {
User loginUser = getAdminUser();
int userId = -1;
//user no operation perm
Map<String, Object> noOperationPerm = dataSourceService.authedDatasource(loginUser, userId);
Assert.assertEquals(Status.USER_NO_OPERATION_PERM, noOperationPerm.get(Constants.STATUS));
//success
loginUser.setUserType(UserType.ADMIN_USER);
Map<String, Object> success = dataSourceService.authedDatasource(loginUser, userId);
Assert.assertEquals(Status.SUCCESS, success.get(Constants.STATUS));
}
@Test
public void queryDataSourceListTest() {
User loginUser = new User();
loginUser.setUserType(UserType.GENERAL_USER);
Map<String, Object> map = dataSourceService.queryDataSourceList(loginUser, DbType.MYSQL.ordinal());
Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS));
}
@Test
public void verifyDataSourceNameTest() {
User loginUser = new User();
loginUser.setUserType(UserType.GENERAL_USER);
String dataSourceName = "dataSource1";
PowerMockito.when(dataSourceMapper.queryDataSourceByName(dataSourceName)).thenReturn(getDataSourceList());
Result result = dataSourceService.verifyDataSourceName(dataSourceName);
Assert.assertEquals(Status.DATASOURCE_EXIST.getMsg(), result.getMsg());
}
@Test
public void queryDataSourceTest() {
PowerMockito.when(dataSourceMapper.selectById(Mockito.anyInt())).thenReturn(null);
Map<String, Object> result = dataSourceService.queryDataSource(Mockito.anyInt());
Assert.assertEquals(((Status) result.get(Constants.STATUS)).getCode(), Status.RESOURCE_NOT_EXIST.getCode());
PowerMockito.when(dataSourceMapper.selectById(Mockito.anyInt())).thenReturn(getOracleDataSource());
result = dataSourceService.queryDataSource(Mockito.anyInt());
Assert.assertEquals(((Status) result.get(Constants.STATUS)).getCode(), Status.SUCCESS.getCode());
}
private List<DataSource> getDataSourceList() {
List<DataSource> dataSources = new ArrayList<>();
dataSources.add(getOracleDataSource());
return dataSources;
}
private DataSource getOracleDataSource() {
DataSource dataSource = new DataSource();
dataSource.setName("test");
dataSource.setNote("Note");
dataSource.setType(DbType.ORACLE);
dataSource.setConnectionParams("{\"connectType\":\"ORACLE_SID\",\"address\":\"jdbc:oracle:thin:@192.168.xx.xx:49161\",\"database\":\"XE\","
+ "\"jdbcUrl\":\"jdbc:oracle:thin:@192.168.xx.xx:49161/XE\",\"user\":\"system\",\"password\":\"oracle\"}");
return dataSource;
}
@Test
public void buildParameter() {
String param = dataSourceService.buildParameter(DbType.ORACLE, "192.168.9.1", "1521", "im"
, "", "test", "test", DbConnectType.ORACLE_SERVICE_NAME, "", "", "","");
String expected = "{\"connectType\":\"ORACLE_SERVICE_NAME\",\"type\":\"ORACLE_SERVICE_NAME\",\"address\":\"jdbc:oracle:thin:@//192.168.9.1:1521\",\"database\":\"im\","
+ "\"jdbcUrl\":\"jdbc:oracle:thin:@//192.168.9.1:1521/im\",\"user\":\"test\",\"password\":\"test\"}";
Assert.assertEquals(expected, param);
PowerMockito.mockStatic(CommonUtils.class);
PowerMockito.when(CommonUtils.getKerberosStartupState()).thenReturn(true);
PowerMockito.when(CommonUtils.encodePassword(Mockito.anyString())).thenReturn("test");
param = dataSourceService.buildParameter(DbType.HIVE, "192.168.9.1", "10000", "im"
, "hive/hdfs-mycluster@ESZ.COM", "test", "test", null, "", "/opt/krb5.conf", "test2/hdfs-mycluster@ESZ.COM", "/opt/hdfs.headless.keytab");
expected = "{\"type\":null,\"address\":\"jdbc:hive2://192.168.9.1:10000\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:hive2://192.168.9.1:10000/im;principal=hive/hdfs-mycluster@ESZ.COM\","
+ "\"user\":\"test\",\"password\":\"test\",\"principal\":\"hive/hdfs-mycluster@ESZ.COM\",\"javaSecurityKrb5Conf\":\"/opt/krb5.conf\","
+ "\"loginUserKeytabUsername\":\"test2/hdfs-mycluster@ESZ.COM\",\"loginUserKeytabPath\":\"/opt/hdfs.headless.keytab\"}";
Assert.assertEquals(expected, param);
}
@Test
public void buildParameterWithDecodePassword() {
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "true");
String param = dataSourceService.buildParameter(DbType.MYSQL, "192.168.9.1", "1521", "im"
, "", "test", "123456", null, "", "", "", "");
String expected = "{\"type\":null,\"address\":\"jdbc:mysql://192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\","
+ "\"user\":\"test\",\"password\":\"IUAjJCVeJipNVEl6TkRVMg==\"}";
Assert.assertEquals(expected, param);
PropertyUtils.setValue(Constants.DATASOURCE_ENCRYPTION_ENABLE, "false");
param = dataSourceService.buildParameter(DbType.MYSQL, "192.168.9.1", "1521", "im"
, "", "test", "123456", null, "", "", "", "");
expected = "{\"type\":null,\"address\":\"jdbc:mysql://192.168.9.1:1521\",\"database\":\"im\",\"jdbcUrl\":\"jdbc:mysql://192.168.9.1:1521/im\",\"user\":\"test\",\"password\":\"123456\"}";
Assert.assertEquals(expected, param);
}
/**
* get Mock Admin User
*
* @return admin user
*/
private User getAdminUser() {
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserName("admin");
loginUser.setUserType(UserType.GENERAL_USER);
return loginUser;
}
/**
* test check connection
* @throws Exception
*/
@Test
public void testCheckConnection() throws Exception {
DbType dataSourceType = DbType.POSTGRESQL;
String parameter = dataSourceService.buildParameter(dataSourceType, "172.16.133.200", "5432", "dolphinscheduler", null, "postgres", "", null, null, null, null, null);
PowerMockito.mockStatic(DataSourceFactory.class);
PowerMockito.when(DataSourceFactory.getDatasource(Mockito.any(), Mockito.anyString())).thenReturn(null);
Result result = dataSourceService.checkConnection(dataSourceType, parameter);
Assert.assertEquals(Status.DATASOURCE_TYPE_NOT_EXIST.getCode(), result.getCode().intValue());
BaseDataSource dataSource = PowerMockito.mock(BaseDataSource.class);
PowerMockito.when(DataSourceFactory.getDatasource(Mockito.any(), Mockito.anyString())).thenReturn(dataSource);
PowerMockito.when(dataSource.getConnection()).thenReturn(null);
result = dataSourceService.checkConnection(dataSourceType, parameter);
Assert.assertEquals(Status.CONNECTION_TEST_FAILURE.getCode(), result.getCode().intValue());
Connection connection = PowerMockito.mock(Connection.class);
PowerMockito.when(dataSource.getConnection()).thenReturn(connection);
result = dataSourceService.checkConnection(dataSourceType, parameter);
Assert.assertEquals(Status.SUCCESS.getCode(), result.getCode().intValue());
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,369 | [Improvement][api] Introduce interface about api module service | **Describe the question**
Introduce interface about api module service for clearly service module.
**What are the current deficiencies and the benefits of improvement**
- A clear and concise service will help contributor or user to ready the code and locate the function.
**Which version of DolphinScheduler:**
-[dev]
**Describe alternatives you've considered**
Introduce service interface
| https://github.com/apache/dolphinscheduler/issues/3369 | https://github.com/apache/dolphinscheduler/pull/4759 | 9ae29a756f0aeed894c80f5e495d786ccf03f41f | 15a5b0588399bbafd201405af02d611d548fac12 | "2020-07-31T13:52:28Z" | java | "2021-02-18T15:27:37Z" | dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorService2Test.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.ProjectServiceImpl;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.RunMode;
import org.apache.dolphinscheduler.common.model.Server;
import org.apache.dolphinscheduler.dao.entity.Command;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
/**
* test for ExecutorService
*/
@RunWith(MockitoJUnitRunner.Silent.class)
public class ExecutorService2Test {
@InjectMocks
private ExecutorService executorService;
@Mock
private ProcessService processService;
@Mock
private ProcessDefinitionMapper processDefinitionMapper;
@Mock
private ProjectMapper projectMapper;
@Mock
private ProjectServiceImpl projectService;
@Mock
private MonitorService monitorService;
private int processDefinitionId = 1;
private int processInstanceId = 1;
private int tenantId = 1;
private int userId = 1;
private ProcessDefinition processDefinition = new ProcessDefinition();
private ProcessInstance processInstance = new ProcessInstance();
private User loginUser = new User();
private String projectName = "projectName";
private Project project = new Project();
private String cronTime;
@Before
public void init() {
// user
loginUser.setId(userId);
// processDefinition
processDefinition.setId(processDefinitionId);
processDefinition.setReleaseState(ReleaseState.ONLINE);
processDefinition.setTenantId(tenantId);
processDefinition.setUserId(userId);
// processInstance
processInstance.setId(processInstanceId);
processInstance.setProcessDefinitionId(processDefinitionId);
processInstance.setState(ExecutionStatus.FAILURE);
processInstance.setExecutorId(userId);
processInstance.setTenantId(tenantId);
// project
project.setName(projectName);
// cronRangeTime
cronTime = "2020-01-01 00:00:00,2020-01-31 23:00:00";
// mock
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(checkProjectAndAuth());
Mockito.when(processDefinitionMapper.selectById(processDefinitionId)).thenReturn(processDefinition);
Mockito.when(processService.getTenantForProcess(tenantId, userId)).thenReturn(new Tenant());
Mockito.when(processService.createCommand(any(Command.class))).thenReturn(1);
Mockito.when(monitorService.getServerListFromZK(true)).thenReturn(getMasterServersList());
Mockito.when(processService.findProcessInstanceDetailById(processInstanceId)).thenReturn(processInstance);
Mockito.when(processService.findProcessDefineById(processDefinitionId)).thenReturn(processDefinition);
}
/**
* not complement
*/
@Test
public void testNoComplement() throws ParseException {
Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, cronTime, CommandType.START_PROCESS,
null, null,
null, null, 0,
RunMode.RUN_MODE_SERIAL,
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(1)).createCommand(any(Command.class));
}
/**
* not complement
*/
@Test
public void testComplementWithStartNodeList() throws ParseException {
Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, cronTime, CommandType.START_PROCESS,
null, "n1,n2",
null, null, 0,
RunMode.RUN_MODE_SERIAL,
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(1)).createCommand(any(Command.class));
}
/**
* date error
*/
@Test
public void testDateError() throws ParseException {
Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, "2020-01-31 23:00:00,2020-01-01 00:00:00", CommandType.COMPLEMENT_DATA,
null, null,
null, null, 0,
RunMode.RUN_MODE_SERIAL,
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null);
Assert.assertEquals(Status.START_PROCESS_INSTANCE_ERROR, result.get(Constants.STATUS));
verify(processService, times(0)).createCommand(any(Command.class));
}
/**
* serial
*/
@Test
public void testSerial() throws ParseException {
Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA,
null, null,
null, null, 0,
RunMode.RUN_MODE_SERIAL,
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(1)).createCommand(any(Command.class));
}
/**
* without schedule
*/
@Test
public void testParallelWithOutSchedule() throws ParseException {
Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(zeroSchedulerList());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA,
null, null,
null, null, 0,
RunMode.RUN_MODE_PARALLEL,
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(31)).createCommand(any(Command.class));
}
/**
* with schedule
*/
@Test
public void testParallelWithSchedule() throws ParseException {
Mockito.when(processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId)).thenReturn(oneSchedulerList());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA,
null, null,
null, null, 0,
RunMode.RUN_MODE_PARALLEL,
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null);
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
verify(processService, times(15)).createCommand(any(Command.class));
}
@Test
public void testNoMsterServers() throws ParseException {
Mockito.when(monitorService.getServerListFromZK(true)).thenReturn(new ArrayList<>());
Map<String, Object> result = executorService.execProcessInstance(loginUser, projectName,
processDefinitionId, cronTime, CommandType.COMPLEMENT_DATA,
null, null,
null, null, 0,
RunMode.RUN_MODE_PARALLEL,
Priority.LOW, Constants.DEFAULT_WORKER_GROUP, 110, null);
Assert.assertEquals(result.get(Constants.STATUS), Status.MASTER_NOT_EXISTS);
}
private List<Server> getMasterServersList() {
List<Server> masterServerList = new ArrayList<>();
Server masterServer1 = new Server();
masterServer1.setId(1);
masterServer1.setHost("192.168.220.188");
masterServer1.setPort(1121);
masterServerList.add(masterServer1);
Server masterServer2 = new Server();
masterServer2.setId(2);
masterServer2.setHost("192.168.220.189");
masterServer2.setPort(1122);
masterServerList.add(masterServer2);
return masterServerList;
}
private List zeroSchedulerList() {
return Collections.EMPTY_LIST;
}
private List<Schedule> oneSchedulerList() {
List<Schedule> schedulerList = new LinkedList<>();
Schedule schedule = new Schedule();
schedule.setCrontab("0 0 0 1/2 * ?");
schedulerList.add(schedule);
return schedulerList;
}
private Map<String, Object> checkProjectAndAuth() {
Map<String, Object> result = new HashMap<>();
result.put(Constants.STATUS, Status.SUCCESS);
return result;
}
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,369 | [Improvement][api] Introduce interface about api module service | **Describe the question**
Introduce interface about api module service for clearly service module.
**What are the current deficiencies and the benefits of improvement**
- A clear and concise service will help contributor or user to ready the code and locate the function.
**Which version of DolphinScheduler:**
-[dev]
**Describe alternatives you've considered**
Introduce service interface
| https://github.com/apache/dolphinscheduler/issues/3369 | https://github.com/apache/dolphinscheduler/pull/4759 | 9ae29a756f0aeed894c80f5e495d786ccf03f41f | 15a5b0588399bbafd201405af02d611d548fac12 | "2020-07-31T13:52:28Z" | java | "2021-02-18T15:27:37Z" | dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ExecutorServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.ApiApplicationServer;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.common.Constants;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.Map;
@RunWith(SpringRunner.class)
@SpringBootTest(classes = ApiApplicationServer.class)
public class ExecutorServiceTest {
private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceTest.class);
@Autowired
private ExecutorService executorService;
@Ignore
@Test
public void startCheckByProcessDefinedId(){
Map<String, Object> map = executorService.startCheckByProcessDefinedId(1234);
Assert.assertNull(map);
}
@Test
public void putMsgWithParamsTest() {
Map<String,Object> map = new HashMap<>();
putMsgWithParams(map, Status.PROJECT_ALREADY_EXISTS);
logger.info(map.toString());
}
void putMsgWithParams(Map<String, Object> result, Status status,Object ... statusParams) {
result.put(Constants.STATUS, status);
if(statusParams != null && statusParams.length > 0){
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams));
}else {
result.put(Constants.MSG, status.getMsg());
}
}
} |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 3,369 | [Improvement][api] Introduce interface about api module service | **Describe the question**
Introduce interface about api module service for clearly service module.
**What are the current deficiencies and the benefits of improvement**
- A clear and concise service will help contributor or user to ready the code and locate the function.
**Which version of DolphinScheduler:**
-[dev]
**Describe alternatives you've considered**
Introduce service interface
| https://github.com/apache/dolphinscheduler/issues/3369 | https://github.com/apache/dolphinscheduler/pull/4759 | 9ae29a756f0aeed894c80f5e495d786ccf03f41f | 15a5b0588399bbafd201405af02d611d548fac12 | "2020-07-31T13:52:28Z" | java | "2021-02-18T15:27:37Z" | dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.service.process;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_EMPTY_SUB_PROCESS;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID;
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID;
import static org.apache.dolphinscheduler.common.Constants.YYYY_MM_DD_HH_MM_SS;
import static java.util.stream.Collectors.toSet;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.AuthorizationType;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.CycleEnum;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.TaskDependType;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.model.DateInterval;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters;
import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Command;
import org.apache.dolphinscheduler.dao.entity.CycleDependency;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.ErrorCommand;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.ProcessInstanceMap;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.ProjectUser;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.CommandMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.service.log.LogClientService;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import com.cronutils.model.Cron;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* process relative dao that some mappers in this.
*/
@Component
public class ProcessService {
private final Logger logger = LoggerFactory.getLogger(getClass());
private final int[] stateArray = new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(),
ExecutionStatus.RUNNING_EXECUTION.ordinal(),
ExecutionStatus.DELAY_EXECUTION.ordinal(),
ExecutionStatus.READY_PAUSE.ordinal(),
ExecutionStatus.READY_STOP.ordinal()};
@Autowired
private UserMapper userMapper;
@Autowired
private ProcessDefinitionMapper processDefineMapper;
@Autowired
private ProcessInstanceMapper processInstanceMapper;
@Autowired
private DataSourceMapper dataSourceMapper;
@Autowired
private ProcessInstanceMapMapper processInstanceMapMapper;
@Autowired
private TaskInstanceMapper taskInstanceMapper;
@Autowired
private CommandMapper commandMapper;
@Autowired
private ScheduleMapper scheduleMapper;
@Autowired
private UdfFuncMapper udfFuncMapper;
@Autowired
private ResourceMapper resourceMapper;
@Autowired
private ErrorCommandMapper errorCommandMapper;
@Autowired
private TenantMapper tenantMapper;
@Autowired
private ProjectMapper projectMapper;
/**
* handle Command (construct ProcessInstance from Command) , wrapped in transaction
*
* @param logger logger
* @param host host
* @param validThreadNum validThreadNum
* @param command found command
* @return process instance
*/
@Transactional(rollbackFor = Exception.class)
public ProcessInstance handleCommand(Logger logger, String host, int validThreadNum, Command command) {
ProcessInstance processInstance = constructProcessInstance(command, host);
// cannot construct process instance, return null
if (processInstance == null) {
logger.error("scan command, command parameter is error: {}", command);
moveToErrorCommand(command, "process instance is null");
return null;
}
if (!checkThreadNum(command, validThreadNum)) {
logger.info("there is not enough thread for this command: {}", command);
return setWaitingThreadProcess(command, processInstance);
}
processInstance.setCommandType(command.getCommandType());
processInstance.addHistoryCmd(command.getCommandType());
saveProcessInstance(processInstance);
this.setSubProcessParam(processInstance);
delCommandById(command.getId());
return processInstance;
}
/**
* save error command, and delete original command
*
* @param command command
* @param message message
*/
@Transactional(rollbackFor = Exception.class)
public void moveToErrorCommand(Command command, String message) {
ErrorCommand errorCommand = new ErrorCommand(command, message);
this.errorCommandMapper.insert(errorCommand);
delCommandById(command.getId());
}
/**
* set process waiting thread
*
* @param command command
* @param processInstance processInstance
* @return process instance
*/
private ProcessInstance setWaitingThreadProcess(Command command, ProcessInstance processInstance) {
processInstance.setState(ExecutionStatus.WAITTING_THREAD);
if (command.getCommandType() != CommandType.RECOVER_WAITTING_THREAD) {
processInstance.addHistoryCmd(command.getCommandType());
}
saveProcessInstance(processInstance);
this.setSubProcessParam(processInstance);
createRecoveryWaitingThreadCommand(command, processInstance);
return null;
}
/**
* check thread num
*
* @param command command
* @param validThreadNum validThreadNum
* @return if thread is enough
*/
private boolean checkThreadNum(Command command, int validThreadNum) {
int commandThreadCount = this.workProcessThreadNumCount(command.getProcessDefinitionId());
return validThreadNum >= commandThreadCount;
}
/**
* insert one command
*
* @param command command
* @return create result
*/
public int createCommand(Command command) {
int result = 0;
if (command != null) {
result = commandMapper.insert(command);
}
return result;
}
/**
* find one command from queue list
*
* @return command
*/
public Command findOneCommand() {
return commandMapper.getOneToRun();
}
/**
* check the input command exists in queue list
*
* @param command command
* @return create command result
*/
public Boolean verifyIsNeedCreateCommand(Command command) {
Boolean isNeedCreate = true;
EnumMap<CommandType, Integer> cmdTypeMap = new EnumMap<>(CommandType.class);
cmdTypeMap.put(CommandType.REPEAT_RUNNING, 1);
cmdTypeMap.put(CommandType.RECOVER_SUSPENDED_PROCESS, 1);
cmdTypeMap.put(CommandType.START_FAILURE_TASK_PROCESS, 1);
CommandType commandType = command.getCommandType();
if (cmdTypeMap.containsKey(commandType)) {
ObjectNode cmdParamObj = JSONUtils.parseObject(command.getCommandParam());
int processInstanceId = cmdParamObj.path(CMD_PARAM_RECOVER_PROCESS_ID_STRING).asInt();
List<Command> commands = commandMapper.selectList(null);
// for all commands
for (Command tmpCommand : commands) {
if (cmdTypeMap.containsKey(tmpCommand.getCommandType())) {
ObjectNode tempObj = JSONUtils.parseObject(tmpCommand.getCommandParam());
if (tempObj != null && processInstanceId == tempObj.path(CMD_PARAM_RECOVER_PROCESS_ID_STRING).asInt()) {
isNeedCreate = false;
break;
}
}
}
}
return isNeedCreate;
}
/**
* find process instance detail by id
*
* @param processId processId
* @return process instance
*/
public ProcessInstance findProcessInstanceDetailById(int processId) {
return processInstanceMapper.queryDetailById(processId);
}
/**
* get task node list by definitionId
*/
public List<TaskNode> getTaskNodeListByDefinitionId(Integer defineId) {
ProcessDefinition processDefinition = processDefineMapper.selectById(defineId);
if (processDefinition == null) {
logger.info("process define not exists");
return null;
}
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
//process data check
if (null == processData) {
logger.error("process data is null");
return new ArrayList<>();
}
return processData.getTasks();
}
/**
* find process instance by id
*
* @param processId processId
* @return process instance
*/
public ProcessInstance findProcessInstanceById(int processId) {
return processInstanceMapper.selectById(processId);
}
/**
* find process define by id.
*
* @param processDefinitionId processDefinitionId
* @return process definition
*/
public ProcessDefinition findProcessDefineById(int processDefinitionId) {
return processDefineMapper.selectById(processDefinitionId);
}
/**
* delete work process instance by id
*
* @param processInstanceId processInstanceId
* @return delete process instance result
*/
public int deleteWorkProcessInstanceById(int processInstanceId) {
return processInstanceMapper.deleteById(processInstanceId);
}
/**
* delete all sub process by parent instance id
*
* @param processInstanceId processInstanceId
* @return delete all sub process instance result
*/
public int deleteAllSubWorkProcessByParentId(int processInstanceId) {
List<Integer> subProcessIdList = processInstanceMapMapper.querySubIdListByParentId(processInstanceId);
for (Integer subId : subProcessIdList) {
deleteAllSubWorkProcessByParentId(subId);
deleteWorkProcessMapByParentId(subId);
removeTaskLogFile(subId);
deleteWorkProcessInstanceById(subId);
}
return 1;
}
/**
* remove task log file
*
* @param processInstanceId processInstanceId
*/
public void removeTaskLogFile(Integer processInstanceId) {
LogClientService logClient = null;
try {
logClient = new LogClientService();
List<TaskInstance> taskInstanceList = findValidTaskListByProcessId(processInstanceId);
if (CollectionUtils.isEmpty(taskInstanceList)) {
return;
}
for (TaskInstance taskInstance : taskInstanceList) {
String taskLogPath = taskInstance.getLogPath();
if (StringUtils.isEmpty(taskInstance.getHost())) {
continue;
}
int port = Constants.RPC_PORT;
String ip = "";
try {
ip = Host.of(taskInstance.getHost()).getIp();
} catch (Exception e) {
// compatible old version
ip = taskInstance.getHost();
}
// remove task log from loggerserver
logClient.removeTaskLog(ip, port, taskLogPath);
}
} finally {
if (logClient != null) {
logClient.close();
}
}
}
/**
* calculate sub process number in the process define.
*
* @param processDefinitionId processDefinitionId
* @return process thread num count
*/
private Integer workProcessThreadNumCount(Integer processDefinitionId) {
List<Integer> ids = new ArrayList<>();
recurseFindSubProcessId(processDefinitionId, ids);
return ids.size() + 1;
}
/**
* recursive query sub process definition id by parent id.
*
* @param parentId parentId
* @param ids ids
*/
public void recurseFindSubProcessId(int parentId, List<Integer> ids) {
ProcessDefinition processDefinition = processDefineMapper.selectById(parentId);
String processDefinitionJson = processDefinition.getProcessDefinitionJson();
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
List<TaskNode> taskNodeList = processData.getTasks();
if (taskNodeList != null && !taskNodeList.isEmpty()) {
for (TaskNode taskNode : taskNodeList) {
String parameter = taskNode.getParams();
ObjectNode parameterJson = JSONUtils.parseObject(parameter);
if (parameterJson.get(CMD_PARAM_SUB_PROCESS_DEFINE_ID) != null) {
SubProcessParameters subProcessParam = JSONUtils.parseObject(parameter, SubProcessParameters.class);
ids.add(subProcessParam.getProcessDefinitionId());
recurseFindSubProcessId(subProcessParam.getProcessDefinitionId(), ids);
}
}
}
}
/**
* create recovery waiting thread command when thread pool is not enough for the process instance.
* sub work process instance need not to create recovery command.
* create recovery waiting thread command and delete origin command at the same time.
* if the recovery command is exists, only update the field update_time
*
* @param originCommand originCommand
* @param processInstance processInstance
*/
public void createRecoveryWaitingThreadCommand(Command originCommand, ProcessInstance processInstance) {
// sub process doesnot need to create wait command
if (processInstance.getIsSubProcess() == Flag.YES) {
if (originCommand != null) {
commandMapper.deleteById(originCommand.getId());
}
return;
}
Map<String, String> cmdParam = new HashMap<>();
cmdParam.put(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD, String.valueOf(processInstance.getId()));
// process instance quit by "waiting thread" state
if (originCommand == null) {
Command command = new Command(
CommandType.RECOVER_WAITTING_THREAD,
processInstance.getTaskDependType(),
processInstance.getFailureStrategy(),
processInstance.getExecutorId(),
processInstance.getProcessDefinitionId(),
JSONUtils.toJsonString(cmdParam),
processInstance.getWarningType(),
processInstance.getWarningGroupId(),
processInstance.getScheduleTime(),
processInstance.getWorkerGroup(),
processInstance.getProcessInstancePriority()
);
saveCommand(command);
return;
}
// update the command time if current command if recover from waiting
if (originCommand.getCommandType() == CommandType.RECOVER_WAITTING_THREAD) {
originCommand.setUpdateTime(new Date());
saveCommand(originCommand);
} else {
// delete old command and create new waiting thread command
commandMapper.deleteById(originCommand.getId());
originCommand.setId(0);
originCommand.setCommandType(CommandType.RECOVER_WAITTING_THREAD);
originCommand.setUpdateTime(new Date());
originCommand.setCommandParam(JSONUtils.toJsonString(cmdParam));
originCommand.setProcessInstancePriority(processInstance.getProcessInstancePriority());
saveCommand(originCommand);
}
}
/**
* get schedule time from command
*
* @param command command
* @param cmdParam cmdParam map
* @return date
*/
private Date getScheduleTime(Command command, Map<String, String> cmdParam) {
Date scheduleTime = command.getScheduleTime();
if (scheduleTime == null && cmdParam != null && cmdParam.containsKey(CMDPARAM_COMPLEMENT_DATA_START_DATE)) {
scheduleTime = DateUtils.stringToDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE));
}
return scheduleTime;
}
/**
* generate a new work process instance from command.
*
* @param processDefinition processDefinition
* @param command command
* @param cmdParam cmdParam map
* @return process instance
*/
private ProcessInstance generateNewProcessInstance(ProcessDefinition processDefinition,
Command command,
Map<String, String> cmdParam) {
ProcessInstance processInstance = new ProcessInstance(processDefinition);
processInstance.setState(ExecutionStatus.RUNNING_EXECUTION);
processInstance.setRecovery(Flag.NO);
processInstance.setStartTime(new Date());
processInstance.setRunTimes(1);
processInstance.setMaxTryTimes(0);
processInstance.setProcessDefinitionId(command.getProcessDefinitionId());
processInstance.setCommandParam(command.getCommandParam());
processInstance.setCommandType(command.getCommandType());
processInstance.setIsSubProcess(Flag.NO);
processInstance.setTaskDependType(command.getTaskDependType());
processInstance.setFailureStrategy(command.getFailureStrategy());
processInstance.setExecutorId(command.getExecutorId());
WarningType warningType = command.getWarningType() == null ? WarningType.NONE : command.getWarningType();
processInstance.setWarningType(warningType);
Integer warningGroupId = command.getWarningGroupId() == null ? 0 : command.getWarningGroupId();
processInstance.setWarningGroupId(warningGroupId);
// schedule time
Date scheduleTime = getScheduleTime(command, cmdParam);
if (scheduleTime != null) {
processInstance.setScheduleTime(scheduleTime);
}
processInstance.setCommandStartTime(command.getStartTime());
processInstance.setLocations(processDefinition.getLocations());
processInstance.setConnects(processDefinition.getConnects());
// get start params from command param
Map<String, String> startParamMap = null;
if (cmdParam != null && cmdParam.containsKey(Constants.CMD_PARAM_START_PARAMS)) {
String startParamJson = cmdParam.get(Constants.CMD_PARAM_START_PARAMS);
startParamMap = JSONUtils.toMap(startParamJson);
}
// set start param into global params
if (startParamMap != null && startParamMap.size() > 0
&& processDefinition.getGlobalParamMap() != null) {
for (Map.Entry<String, String> param : processDefinition.getGlobalParamMap().entrySet()) {
String val = startParamMap.get(param.getKey());
if (val != null) {
param.setValue(val);
}
}
}
// curing global params
processInstance.setGlobalParams(ParameterUtils.curingGlobalParams(
processDefinition.getGlobalParamMap(),
processDefinition.getGlobalParamList(),
getCommandTypeIfComplement(processInstance, command),
processInstance.getScheduleTime()));
//copy process define json to process instance
processInstance.setProcessInstanceJson(processDefinition.getProcessDefinitionJson());
// set process instance priority
processInstance.setProcessInstancePriority(command.getProcessInstancePriority());
String workerGroup = StringUtils.isBlank(command.getWorkerGroup()) ? Constants.DEFAULT_WORKER_GROUP : command.getWorkerGroup();
processInstance.setWorkerGroup(workerGroup);
processInstance.setTimeout(processDefinition.getTimeout());
processInstance.setTenantId(processDefinition.getTenantId());
return processInstance;
}
/**
* get process tenant
* there is tenant id in definition, use the tenant of the definition.
* if there is not tenant id in the definiton or the tenant not exist
* use definition creator's tenant.
*
* @param tenantId tenantId
* @param userId userId
* @return tenant
*/
public Tenant getTenantForProcess(int tenantId, int userId) {
Tenant tenant = null;
if (tenantId >= 0) {
tenant = tenantMapper.queryById(tenantId);
}
if (userId == 0) {
return null;
}
if (tenant == null) {
User user = userMapper.selectById(userId);
tenant = tenantMapper.queryById(user.getTenantId());
}
return tenant;
}
/**
* check command parameters is valid
*
* @param command command
* @param cmdParam cmdParam map
* @return whether command param is valid
*/
private Boolean checkCmdParam(Command command, Map<String, String> cmdParam) {
if (command.getTaskDependType() == TaskDependType.TASK_ONLY || command.getTaskDependType() == TaskDependType.TASK_PRE) {
if (cmdParam == null
|| !cmdParam.containsKey(Constants.CMD_PARAM_START_NODE_NAMES)
|| cmdParam.get(Constants.CMD_PARAM_START_NODE_NAMES).isEmpty()) {
logger.error("command node depend type is {}, but start nodes is null ", command.getTaskDependType());
return false;
}
}
return true;
}
/**
* construct process instance according to one command.
*
* @param command command
* @param host host
* @return process instance
*/
private ProcessInstance constructProcessInstance(Command command, String host) {
ProcessInstance processInstance = null;
CommandType commandType = command.getCommandType();
Map<String, String> cmdParam = JSONUtils.toMap(command.getCommandParam());
ProcessDefinition processDefinition = null;
if (command.getProcessDefinitionId() != 0) {
processDefinition = processDefineMapper.selectById(command.getProcessDefinitionId());
if (processDefinition == null) {
logger.error("cannot find the work process define! define id : {}", command.getProcessDefinitionId());
return null;
}
}
if (cmdParam != null) {
Integer processInstanceId = 0;
// recover from failure or pause tasks
if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING)) {
String processId = cmdParam.get(Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING);
processInstanceId = Integer.parseInt(processId);
if (processInstanceId == 0) {
logger.error("command parameter is error, [ ProcessInstanceId ] is 0");
return null;
}
} else if (cmdParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS)) {
// sub process map
String pId = cmdParam.get(Constants.CMD_PARAM_SUB_PROCESS);
processInstanceId = Integer.parseInt(pId);
} else if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD)) {
// waiting thread command
String pId = cmdParam.get(Constants.CMD_PARAM_RECOVERY_WAITING_THREAD);
processInstanceId = Integer.parseInt(pId);
}
if (processInstanceId == 0) {
processInstance = generateNewProcessInstance(processDefinition, command, cmdParam);
} else {
processInstance = this.findProcessInstanceDetailById(processInstanceId);
// Recalculate global parameters after rerun.
processInstance.setGlobalParams(ParameterUtils.curingGlobalParams(
processDefinition.getGlobalParamMap(),
processDefinition.getGlobalParamList(),
getCommandTypeIfComplement(processInstance, command),
processInstance.getScheduleTime()));
}
processDefinition = processDefineMapper.selectById(processInstance.getProcessDefinitionId());
processInstance.setProcessDefinition(processDefinition);
//reset command parameter
if (processInstance.getCommandParam() != null) {
Map<String, String> processCmdParam = JSONUtils.toMap(processInstance.getCommandParam());
for (Map.Entry<String, String> entry : processCmdParam.entrySet()) {
if (!cmdParam.containsKey(entry.getKey())) {
cmdParam.put(entry.getKey(), entry.getValue());
}
}
}
// reset command parameter if sub process
if (cmdParam.containsKey(Constants.CMD_PARAM_SUB_PROCESS)) {
processInstance.setCommandParam(command.getCommandParam());
}
} else {
// generate one new process instance
processInstance = generateNewProcessInstance(processDefinition, command, cmdParam);
}
if (Boolean.FALSE.equals(checkCmdParam(command, cmdParam))) {
logger.error("command parameter check failed!");
return null;
}
if (command.getScheduleTime() != null) {
processInstance.setScheduleTime(command.getScheduleTime());
}
processInstance.setHost(host);
ExecutionStatus runStatus = ExecutionStatus.RUNNING_EXECUTION;
int runTime = processInstance.getRunTimes();
switch (commandType) {
case START_PROCESS:
break;
case START_FAILURE_TASK_PROCESS:
// find failed tasks and init these tasks
List<Integer> failedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.FAILURE);
List<Integer> toleranceList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.NEED_FAULT_TOLERANCE);
List<Integer> killedList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.KILL);
cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING);
failedList.addAll(killedList);
failedList.addAll(toleranceList);
for (Integer taskId : failedList) {
initTaskInstance(this.findTaskInstanceById(taskId));
}
cmdParam.put(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING,
String.join(Constants.COMMA, convertIntListToString(failedList)));
processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam));
processInstance.setRunTimes(runTime + 1);
break;
case START_CURRENT_TASK_PROCESS:
break;
case RECOVER_WAITTING_THREAD:
break;
case RECOVER_SUSPENDED_PROCESS:
// find pause tasks and init task's state
cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING);
List<Integer> suspendedNodeList = this.findTaskIdByInstanceState(processInstance.getId(), ExecutionStatus.PAUSE);
List<Integer> stopNodeList = findTaskIdByInstanceState(processInstance.getId(),
ExecutionStatus.KILL);
suspendedNodeList.addAll(stopNodeList);
for (Integer taskId : suspendedNodeList) {
// initialize the pause state
initTaskInstance(this.findTaskInstanceById(taskId));
}
cmdParam.put(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING, String.join(",", convertIntListToString(suspendedNodeList)));
processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam));
processInstance.setRunTimes(runTime + 1);
break;
case RECOVER_TOLERANCE_FAULT_PROCESS:
// recover tolerance fault process
processInstance.setRecovery(Flag.YES);
runStatus = processInstance.getState();
break;
case COMPLEMENT_DATA:
// delete all the valid tasks when complement data
List<TaskInstance> taskInstanceList = this.findValidTaskListByProcessId(processInstance.getId());
for (TaskInstance taskInstance : taskInstanceList) {
taskInstance.setFlag(Flag.NO);
this.updateTaskInstance(taskInstance);
}
initComplementDataParam(processDefinition, processInstance, cmdParam);
break;
case REPEAT_RUNNING:
// delete the recover task names from command parameter
if (cmdParam.containsKey(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING)) {
cmdParam.remove(Constants.CMD_PARAM_RECOVERY_START_NODE_STRING);
processInstance.setCommandParam(JSONUtils.toJsonString(cmdParam));
}
// delete all the valid tasks when repeat running
List<TaskInstance> validTaskList = findValidTaskListByProcessId(processInstance.getId());
for (TaskInstance taskInstance : validTaskList) {
taskInstance.setFlag(Flag.NO);
updateTaskInstance(taskInstance);
}
processInstance.setStartTime(new Date());
processInstance.setEndTime(null);
processInstance.setRunTimes(runTime + 1);
initComplementDataParam(processDefinition, processInstance, cmdParam);
break;
case SCHEDULER:
break;
default:
break;
}
processInstance.setState(runStatus);
return processInstance;
}
/**
* return complement data if the process start with complement data
*
* @param processInstance processInstance
* @param command command
* @return command type
*/
private CommandType getCommandTypeIfComplement(ProcessInstance processInstance, Command command) {
if (CommandType.COMPLEMENT_DATA == processInstance.getCmdTypeIfComplement()) {
return CommandType.COMPLEMENT_DATA;
} else {
return command.getCommandType();
}
}
/**
* initialize complement data parameters
*
* @param processDefinition processDefinition
* @param processInstance processInstance
* @param cmdParam cmdParam
*/
private void initComplementDataParam(ProcessDefinition processDefinition,
ProcessInstance processInstance,
Map<String, String> cmdParam) {
if (!processInstance.isComplementData()) {
return;
}
Date startComplementTime = DateUtils.parse(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE),
YYYY_MM_DD_HH_MM_SS);
if (Flag.NO == processInstance.getIsSubProcess()) {
processInstance.setScheduleTime(startComplementTime);
}
processInstance.setGlobalParams(ParameterUtils.curingGlobalParams(
processDefinition.getGlobalParamMap(),
processDefinition.getGlobalParamList(),
CommandType.COMPLEMENT_DATA, processInstance.getScheduleTime()));
}
/**
* set sub work process parameters.
* handle sub work process instance, update relation table and command parameters
* set sub work process flag, extends parent work process command parameters
*
* @param subProcessInstance subProcessInstance
* @return process instance
*/
public ProcessInstance setSubProcessParam(ProcessInstance subProcessInstance) {
String cmdParam = subProcessInstance.getCommandParam();
if (StringUtils.isEmpty(cmdParam)) {
return subProcessInstance;
}
Map<String, String> paramMap = JSONUtils.toMap(cmdParam);
// write sub process id into cmd param.
if (paramMap.containsKey(CMD_PARAM_SUB_PROCESS)
&& CMD_PARAM_EMPTY_SUB_PROCESS.equals(paramMap.get(CMD_PARAM_SUB_PROCESS))) {
paramMap.remove(CMD_PARAM_SUB_PROCESS);
paramMap.put(CMD_PARAM_SUB_PROCESS, String.valueOf(subProcessInstance.getId()));
subProcessInstance.setCommandParam(JSONUtils.toJsonString(paramMap));
subProcessInstance.setIsSubProcess(Flag.YES);
this.saveProcessInstance(subProcessInstance);
}
// copy parent instance user def params to sub process..
String parentInstanceId = paramMap.get(CMD_PARAM_SUB_PROCESS_PARENT_INSTANCE_ID);
if (StringUtils.isNotEmpty(parentInstanceId)) {
ProcessInstance parentInstance = findProcessInstanceDetailById(Integer.parseInt(parentInstanceId));
if (parentInstance != null) {
subProcessInstance.setGlobalParams(
joinGlobalParams(parentInstance.getGlobalParams(), subProcessInstance.getGlobalParams()));
this.saveProcessInstance(subProcessInstance);
} else {
logger.error("sub process command params error, cannot find parent instance: {} ", cmdParam);
}
}
ProcessInstanceMap processInstanceMap = JSONUtils.parseObject(cmdParam, ProcessInstanceMap.class);
if (processInstanceMap == null || processInstanceMap.getParentProcessInstanceId() == 0) {
return subProcessInstance;
}
// update sub process id to process map table
processInstanceMap.setProcessInstanceId(subProcessInstance.getId());
this.updateWorkProcessInstanceMap(processInstanceMap);
return subProcessInstance;
}
/**
* join parent global params into sub process.
* only the keys doesn't in sub process global would be joined.
*
* @param parentGlobalParams parentGlobalParams
* @param subGlobalParams subGlobalParams
* @return global params join
*/
private String joinGlobalParams(String parentGlobalParams, String subGlobalParams) {
List<Property> parentPropertyList = JSONUtils.toList(parentGlobalParams, Property.class);
List<Property> subPropertyList = JSONUtils.toList(subGlobalParams, Property.class);
Map<String, String> subMap = subPropertyList.stream().collect(Collectors.toMap(Property::getProp, Property::getValue));
for (Property parent : parentPropertyList) {
if (!subMap.containsKey(parent.getProp())) {
subPropertyList.add(parent);
}
}
return JSONUtils.toJsonString(subPropertyList);
}
/**
* initialize task instance
*
* @param taskInstance taskInstance
*/
private void initTaskInstance(TaskInstance taskInstance) {
if (!taskInstance.isSubProcess()
&& (taskInstance.getState().typeIsCancel() || taskInstance.getState().typeIsFailure())) {
taskInstance.setFlag(Flag.NO);
updateTaskInstance(taskInstance);
return;
}
taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS);
updateTaskInstance(taskInstance);
}
/**
* submit task to db
* submit sub process to command
*
* @param taskInstance taskInstance
* @return task instance
*/
@Transactional(rollbackFor = Exception.class)
public TaskInstance submitTask(TaskInstance taskInstance) {
ProcessInstance processInstance = this.findProcessInstanceDetailById(taskInstance.getProcessInstanceId());
logger.info("start submit task : {}, instance id:{}, state: {}",
taskInstance.getName(), taskInstance.getProcessInstanceId(), processInstance.getState());
//submit to db
TaskInstance task = submitTaskInstanceToDB(taskInstance, processInstance);
if (task == null) {
logger.error("end submit task to db error, task name:{}, process id:{} state: {} ",
taskInstance.getName(), taskInstance.getProcessInstance(), processInstance.getState());
return task;
}
if (!task.getState().typeIsFinished()) {
createSubWorkProcess(processInstance, task);
}
logger.info("end submit task to db successfully:{} state:{} complete, instance id:{} state: {} ",
taskInstance.getName(), task.getState(), processInstance.getId(), processInstance.getState());
return task;
}
/**
* set work process instance map
* consider o
* repeat running does not generate new sub process instance
* set map {parent instance id, task instance id, 0(child instance id)}
*
* @param parentInstance parentInstance
* @param parentTask parentTask
* @return process instance map
*/
private ProcessInstanceMap setProcessInstanceMap(ProcessInstance parentInstance, TaskInstance parentTask) {
ProcessInstanceMap processMap = findWorkProcessMapByParent(parentInstance.getId(), parentTask.getId());
if (processMap != null) {
return processMap;
}
if (parentInstance.getCommandType() == CommandType.REPEAT_RUNNING) {
// update current task id to map
processMap = findPreviousTaskProcessMap(parentInstance, parentTask);
if (processMap != null) {
processMap.setParentTaskInstanceId(parentTask.getId());
updateWorkProcessInstanceMap(processMap);
return processMap;
}
}
// new task
processMap = new ProcessInstanceMap();
processMap.setParentProcessInstanceId(parentInstance.getId());
processMap.setParentTaskInstanceId(parentTask.getId());
createWorkProcessInstanceMap(processMap);
return processMap;
}
/**
* find previous task work process map.
*
* @param parentProcessInstance parentProcessInstance
* @param parentTask parentTask
* @return process instance map
*/
private ProcessInstanceMap findPreviousTaskProcessMap(ProcessInstance parentProcessInstance,
TaskInstance parentTask) {
Integer preTaskId = 0;
List<TaskInstance> preTaskList = this.findPreviousTaskListByWorkProcessId(parentProcessInstance.getId());
for (TaskInstance task : preTaskList) {
if (task.getName().equals(parentTask.getName())) {
preTaskId = task.getId();
ProcessInstanceMap map = findWorkProcessMapByParent(parentProcessInstance.getId(), preTaskId);
if (map != null) {
return map;
}
}
}
logger.info("sub process instance is not found,parent task:{},parent instance:{}",
parentTask.getId(), parentProcessInstance.getId());
return null;
}
/**
* create sub work process command
*
* @param parentProcessInstance parentProcessInstance
* @param task task
*/
public void createSubWorkProcess(ProcessInstance parentProcessInstance, TaskInstance task) {
if (!task.isSubProcess()) {
return;
}
//check create sub work flow firstly
ProcessInstanceMap instanceMap = findWorkProcessMapByParent(parentProcessInstance.getId(), task.getId());
if (null != instanceMap && CommandType.RECOVER_TOLERANCE_FAULT_PROCESS == parentProcessInstance.getCommandType()) {
// recover failover tolerance would not create a new command when the sub command already have been created
return;
}
instanceMap = setProcessInstanceMap(parentProcessInstance, task);
ProcessInstance childInstance = null;
if (instanceMap.getProcessInstanceId() != 0) {
childInstance = findProcessInstanceById(instanceMap.getProcessInstanceId());
}
Command subProcessCommand = createSubProcessCommand(parentProcessInstance, childInstance, instanceMap, task);
updateSubProcessDefinitionByParent(parentProcessInstance, subProcessCommand.getProcessDefinitionId());
initSubInstanceState(childInstance);
createCommand(subProcessCommand);
logger.info("sub process command created: {} ", subProcessCommand);
}
/**
* complement data needs transform parent parameter to child.
*/
private String getSubWorkFlowParam(ProcessInstanceMap instanceMap, ProcessInstance parentProcessInstance) {
// set sub work process command
String processMapStr = JSONUtils.toJsonString(instanceMap);
Map<String, String> cmdParam = JSONUtils.toMap(processMapStr);
if (parentProcessInstance.isComplementData()) {
Map<String, String> parentParam = JSONUtils.toMap(parentProcessInstance.getCommandParam());
String endTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE);
String startTime = parentParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE);
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, endTime);
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, startTime);
processMapStr = JSONUtils.toJsonString(cmdParam);
}
return processMapStr;
}
/**
* create sub work process command
*/
public Command createSubProcessCommand(ProcessInstance parentProcessInstance,
ProcessInstance childInstance,
ProcessInstanceMap instanceMap,
TaskInstance task) {
CommandType commandType = getSubCommandType(parentProcessInstance, childInstance);
TaskNode taskNode = JSONUtils.parseObject(task.getTaskJson(), TaskNode.class);
Map<String, String> subProcessParam = JSONUtils.toMap(taskNode.getParams());
Integer childDefineId = Integer.parseInt(subProcessParam.get(Constants.CMD_PARAM_SUB_PROCESS_DEFINE_ID));
String processParam = getSubWorkFlowParam(instanceMap, parentProcessInstance);
return new Command(
commandType,
TaskDependType.TASK_POST,
parentProcessInstance.getFailureStrategy(),
parentProcessInstance.getExecutorId(),
childDefineId,
processParam,
parentProcessInstance.getWarningType(),
parentProcessInstance.getWarningGroupId(),
parentProcessInstance.getScheduleTime(),
task.getWorkerGroup(),
parentProcessInstance.getProcessInstancePriority()
);
}
/**
* initialize sub work flow state
* child instance state would be initialized when 'recovery from pause/stop/failure'
*/
private void initSubInstanceState(ProcessInstance childInstance) {
if (childInstance != null) {
childInstance.setState(ExecutionStatus.RUNNING_EXECUTION);
updateProcessInstance(childInstance);
}
}
/**
* get sub work flow command type
* child instance exist: child command = fatherCommand
* child instance not exists: child command = fatherCommand[0]
*/
private CommandType getSubCommandType(ProcessInstance parentProcessInstance, ProcessInstance childInstance) {
CommandType commandType = parentProcessInstance.getCommandType();
if (childInstance == null) {
String fatherHistoryCommand = parentProcessInstance.getHistoryCmd();
commandType = CommandType.valueOf(fatherHistoryCommand.split(Constants.COMMA)[0]);
}
return commandType;
}
/**
* update sub process definition
*
* @param parentProcessInstance parentProcessInstance
* @param childDefinitionId childDefinitionId
*/
private void updateSubProcessDefinitionByParent(ProcessInstance parentProcessInstance, int childDefinitionId) {
ProcessDefinition fatherDefinition = this.findProcessDefineById(parentProcessInstance.getProcessDefinitionId());
ProcessDefinition childDefinition = this.findProcessDefineById(childDefinitionId);
if (childDefinition != null && fatherDefinition != null) {
childDefinition.setWarningGroupId(fatherDefinition.getWarningGroupId());
processDefineMapper.updateById(childDefinition);
}
}
/**
* submit task to mysql
*
* @param taskInstance taskInstance
* @param processInstance processInstance
* @return task instance
*/
public TaskInstance submitTaskInstanceToDB(TaskInstance taskInstance, ProcessInstance processInstance) {
ExecutionStatus processInstanceState = processInstance.getState();
if (taskInstance.getState().typeIsFailure()) {
if (taskInstance.isSubProcess()) {
taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1);
} else {
if (processInstanceState != ExecutionStatus.READY_STOP
&& processInstanceState != ExecutionStatus.READY_PAUSE) {
// failure task set invalid
taskInstance.setFlag(Flag.NO);
updateTaskInstance(taskInstance);
// crate new task instance
if (taskInstance.getState() != ExecutionStatus.NEED_FAULT_TOLERANCE) {
taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1);
}
taskInstance.setSubmitTime(null);
taskInstance.setStartTime(null);
taskInstance.setEndTime(null);
taskInstance.setFlag(Flag.YES);
taskInstance.setHost(null);
taskInstance.setId(0);
}
}
}
taskInstance.setExecutorId(processInstance.getExecutorId());
taskInstance.setProcessInstancePriority(processInstance.getProcessInstancePriority());
taskInstance.setState(getSubmitTaskState(taskInstance, processInstanceState));
if (taskInstance.getSubmitTime() == null) {
taskInstance.setSubmitTime(new Date());
}
if (taskInstance.getFirstSubmitTime() == null) {
taskInstance.setFirstSubmitTime(taskInstance.getSubmitTime());
}
boolean saveResult = saveTaskInstance(taskInstance);
if (!saveResult) {
return null;
}
return taskInstance;
}
/**
* get submit task instance state by the work process state
* cannot modify the task state when running/kill/submit success, or this
* task instance is already exists in task queue .
* return pause if work process state is ready pause
* return stop if work process state is ready stop
* if all of above are not satisfied, return submit success
*
* @param taskInstance taskInstance
* @param processInstanceState processInstanceState
* @return process instance state
*/
public ExecutionStatus getSubmitTaskState(TaskInstance taskInstance, ExecutionStatus processInstanceState) {
ExecutionStatus state = taskInstance.getState();
if (
// running, delayed or killed
// the task already exists in task queue
// return state
state == ExecutionStatus.RUNNING_EXECUTION
|| state == ExecutionStatus.DELAY_EXECUTION
|| state == ExecutionStatus.KILL
) {
return state;
}
//return pasue /stop if process instance state is ready pause / stop
// or return submit success
if (processInstanceState == ExecutionStatus.READY_PAUSE) {
state = ExecutionStatus.PAUSE;
} else if (processInstanceState == ExecutionStatus.READY_STOP
|| !checkProcessStrategy(taskInstance)) {
state = ExecutionStatus.KILL;
} else {
state = ExecutionStatus.SUBMITTED_SUCCESS;
}
return state;
}
/**
* check process instance strategy
*
* @param taskInstance taskInstance
* @return check strategy result
*/
private boolean checkProcessStrategy(TaskInstance taskInstance) {
ProcessInstance processInstance = this.findProcessInstanceById(taskInstance.getProcessInstanceId());
FailureStrategy failureStrategy = processInstance.getFailureStrategy();
if (failureStrategy == FailureStrategy.CONTINUE) {
return true;
}
List<TaskInstance> taskInstances = this.findValidTaskListByProcessId(taskInstance.getProcessInstanceId());
for (TaskInstance task : taskInstances) {
if (task.getState() == ExecutionStatus.FAILURE) {
return false;
}
}
return true;
}
/**
* create a new process instance
*
* @param processInstance processInstance
*/
public void createProcessInstance(ProcessInstance processInstance) {
if (processInstance != null) {
processInstanceMapper.insert(processInstance);
}
}
/**
* insert or update work process instance to data base
*
* @param processInstance processInstance
*/
public void saveProcessInstance(ProcessInstance processInstance) {
if (processInstance == null) {
logger.error("save error, process instance is null!");
return;
}
if (processInstance.getId() != 0) {
processInstanceMapper.updateById(processInstance);
} else {
createProcessInstance(processInstance);
}
}
/**
* insert or update command
*
* @param command command
* @return save command result
*/
public int saveCommand(Command command) {
if (command.getId() != 0) {
return commandMapper.updateById(command);
} else {
return commandMapper.insert(command);
}
}
/**
* insert or update task instance
*
* @param taskInstance taskInstance
* @return save task instance result
*/
public boolean saveTaskInstance(TaskInstance taskInstance) {
if (taskInstance.getId() != 0) {
return updateTaskInstance(taskInstance);
} else {
return createTaskInstance(taskInstance);
}
}
/**
* insert task instance
*
* @param taskInstance taskInstance
* @return create task instance result
*/
public boolean createTaskInstance(TaskInstance taskInstance) {
int count = taskInstanceMapper.insert(taskInstance);
return count > 0;
}
/**
* update task instance
*
* @param taskInstance taskInstance
* @return update task instance result
*/
public boolean updateTaskInstance(TaskInstance taskInstance) {
int count = taskInstanceMapper.updateById(taskInstance);
return count > 0;
}
/**
* delete a command by id
*
* @param id id
*/
public void delCommandById(int id) {
commandMapper.deleteById(id);
}
/**
* find task instance by id
*
* @param taskId task id
* @return task intance
*/
public TaskInstance findTaskInstanceById(Integer taskId) {
return taskInstanceMapper.selectById(taskId);
}
/**
* package task instance,associate processInstance and processDefine
*
* @param taskInstId taskInstId
* @return task instance
*/
public TaskInstance getTaskInstanceDetailByTaskId(int taskInstId) {
// get task instance
TaskInstance taskInstance = findTaskInstanceById(taskInstId);
if (taskInstance == null) {
return taskInstance;
}
// get process instance
ProcessInstance processInstance = findProcessInstanceDetailById(taskInstance.getProcessInstanceId());
// get process define
ProcessDefinition processDefine = findProcessDefineById(taskInstance.getProcessDefinitionId());
taskInstance.setProcessInstance(processInstance);
taskInstance.setProcessDefine(processDefine);
return taskInstance;
}
/**
* get id list by task state
*
* @param instanceId instanceId
* @param state state
* @return task instance states
*/
public List<Integer> findTaskIdByInstanceState(int instanceId, ExecutionStatus state) {
return taskInstanceMapper.queryTaskByProcessIdAndState(instanceId, state.ordinal());
}
/**
* find valid task list by process definition id
*
* @param processInstanceId processInstanceId
* @return task instance list
*/
public List<TaskInstance> findValidTaskListByProcessId(Integer processInstanceId) {
return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.YES);
}
/**
* find previous task list by work process id
*
* @param processInstanceId processInstanceId
* @return task instance list
*/
public List<TaskInstance> findPreviousTaskListByWorkProcessId(Integer processInstanceId) {
return taskInstanceMapper.findValidTaskListByProcessId(processInstanceId, Flag.NO);
}
/**
* update work process instance map
*
* @param processInstanceMap processInstanceMap
* @return update process instance result
*/
public int updateWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap) {
return processInstanceMapMapper.updateById(processInstanceMap);
}
/**
* create work process instance map
*
* @param processInstanceMap processInstanceMap
* @return create process instance result
*/
public int createWorkProcessInstanceMap(ProcessInstanceMap processInstanceMap) {
int count = 0;
if (processInstanceMap != null) {
return processInstanceMapMapper.insert(processInstanceMap);
}
return count;
}
/**
* find work process map by parent process id and parent task id.
*
* @param parentWorkProcessId parentWorkProcessId
* @param parentTaskId parentTaskId
* @return process instance map
*/
public ProcessInstanceMap findWorkProcessMapByParent(Integer parentWorkProcessId, Integer parentTaskId) {
return processInstanceMapMapper.queryByParentId(parentWorkProcessId, parentTaskId);
}
/**
* delete work process map by parent process id
*
* @param parentWorkProcessId parentWorkProcessId
* @return delete process map result
*/
public int deleteWorkProcessMapByParentId(int parentWorkProcessId) {
return processInstanceMapMapper.deleteByParentProcessId(parentWorkProcessId);
}
/**
* find sub process instance
*
* @param parentProcessId parentProcessId
* @param parentTaskId parentTaskId
* @return process instance
*/
public ProcessInstance findSubProcessInstance(Integer parentProcessId, Integer parentTaskId) {
ProcessInstance processInstance = null;
ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryByParentId(parentProcessId, parentTaskId);
if (processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0) {
return processInstance;
}
processInstance = findProcessInstanceById(processInstanceMap.getProcessInstanceId());
return processInstance;
}
/**
* find parent process instance
*
* @param subProcessId subProcessId
* @return process instance
*/
public ProcessInstance findParentProcessInstance(Integer subProcessId) {
ProcessInstance processInstance = null;
ProcessInstanceMap processInstanceMap = processInstanceMapMapper.queryBySubProcessId(subProcessId);
if (processInstanceMap == null || processInstanceMap.getProcessInstanceId() == 0) {
return processInstance;
}
processInstance = findProcessInstanceById(processInstanceMap.getParentProcessInstanceId());
return processInstance;
}
/**
* change task state
*
* @param state state
* @param startTime startTime
* @param host host
* @param executePath executePath
* @param logPath logPath
* @param taskInstId taskInstId
*/
public void changeTaskState(TaskInstance taskInstance, ExecutionStatus state, Date startTime, String host,
String executePath,
String logPath,
int taskInstId) {
taskInstance.setState(state);
taskInstance.setStartTime(startTime);
taskInstance.setHost(host);
taskInstance.setExecutePath(executePath);
taskInstance.setLogPath(logPath);
saveTaskInstance(taskInstance);
}
/**
* update process instance
*
* @param processInstance processInstance
* @return update process instance result
*/
public int updateProcessInstance(ProcessInstance processInstance) {
return processInstanceMapper.updateById(processInstance);
}
/**
* update the process instance
*
* @param processInstanceId processInstanceId
* @param processJson processJson
* @param globalParams globalParams
* @param scheduleTime scheduleTime
* @param flag flag
* @param locations locations
* @param connects connects
* @return update process instance result
*/
public int updateProcessInstance(Integer processInstanceId, String processJson,
String globalParams, Date scheduleTime, Flag flag,
String locations, String connects) {
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId);
if (processInstance != null) {
processInstance.setProcessInstanceJson(processJson);
processInstance.setGlobalParams(globalParams);
processInstance.setScheduleTime(scheduleTime);
processInstance.setLocations(locations);
processInstance.setConnects(connects);
return processInstanceMapper.updateById(processInstance);
}
return 0;
}
/**
* change task state
*
* @param state state
* @param endTime endTime
* @param taskInstId taskInstId
* @param varPool varPool
*/
public void changeTaskState(TaskInstance taskInstance, ExecutionStatus state,
Date endTime,
int processId,
String appIds,
int taskInstId,
String varPool) {
taskInstance.setPid(processId);
taskInstance.setAppLink(appIds);
taskInstance.setState(state);
taskInstance.setEndTime(endTime);
taskInstance.setVarPool(varPool);
saveTaskInstance(taskInstance);
}
/**
* convert integer list to string list
*
* @param intList intList
* @return string list
*/
public List<String> convertIntListToString(List<Integer> intList) {
if (intList == null) {
return new ArrayList<>();
}
List<String> result = new ArrayList<>(intList.size());
for (Integer intVar : intList) {
result.add(String.valueOf(intVar));
}
return result;
}
/**
* query schedule by id
*
* @param id id
* @return schedule
*/
public Schedule querySchedule(int id) {
return scheduleMapper.selectById(id);
}
/**
* query Schedule by processDefinitionId
*
* @param processDefinitionId processDefinitionId
* @see Schedule
*/
public List<Schedule> queryReleaseSchedulerListByProcessDefinitionId(int processDefinitionId) {
return scheduleMapper.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId);
}
/**
* query need failover process instance
*
* @param host host
* @return process instance list
*/
public List<ProcessInstance> queryNeedFailoverProcessInstances(String host) {
return processInstanceMapper.queryByHostAndStatus(host, stateArray);
}
/**
* process need failover process instance
*
* @param processInstance processInstance
*/
@Transactional(rollbackFor = RuntimeException.class)
public void processNeedFailoverProcessInstances(ProcessInstance processInstance) {
//1 update processInstance host is null
processInstance.setHost(Constants.NULL);
processInstanceMapper.updateById(processInstance);
//2 insert into recover command
Command cmd = new Command();
cmd.setProcessDefinitionId(processInstance.getProcessDefinitionId());
cmd.setCommandParam(String.format("{\"%s\":%d}", Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING, processInstance.getId()));
cmd.setExecutorId(processInstance.getExecutorId());
cmd.setCommandType(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS);
createCommand(cmd);
}
/**
* query all need failover task instances by host
*
* @param host host
* @return task instance list
*/
public List<TaskInstance> queryNeedFailoverTaskInstances(String host) {
return taskInstanceMapper.queryByHostAndStatus(host,
stateArray);
}
/**
* find data source by id
*
* @param id id
* @return datasource
*/
public DataSource findDataSourceById(int id) {
return dataSourceMapper.selectById(id);
}
/**
* update process instance state by id
*
* @param processInstanceId processInstanceId
* @param executionStatus executionStatus
* @return update process result
*/
public int updateProcessInstanceState(Integer processInstanceId, ExecutionStatus executionStatus) {
ProcessInstance instance = processInstanceMapper.selectById(processInstanceId);
instance.setState(executionStatus);
return processInstanceMapper.updateById(instance);
}
/**
* find process instance by the task id
*
* @param taskId taskId
* @return process instance
*/
public ProcessInstance findProcessInstanceByTaskId(int taskId) {
TaskInstance taskInstance = taskInstanceMapper.selectById(taskId);
if (taskInstance != null) {
return processInstanceMapper.selectById(taskInstance.getProcessInstanceId());
}
return null;
}
/**
* find udf function list by id list string
*
* @param ids ids
* @return udf function list
*/
public List<UdfFunc> queryUdfFunListByIds(int[] ids) {
return udfFuncMapper.queryUdfByIdStr(ids, null);
}
/**
* find tenant code by resource name
*
* @param resName resource name
* @param resourceType resource type
* @return tenant code
*/
public String queryTenantCodeByResName(String resName, ResourceType resourceType) {
// in order to query tenant code successful although the version is older
String fullName = resName.startsWith("/") ? resName : String.format("/%s", resName);
return resourceMapper.queryTenantCodeByResourceName(fullName, resourceType.ordinal());
}
/**
* find schedule list by process define id.
*
* @param ids ids
* @return schedule list
*/
public List<Schedule> selectAllByProcessDefineId(int[] ids) {
return scheduleMapper.selectAllByProcessDefineArray(
ids);
}
/**
* get dependency cycle by work process define id and scheduler fire time
*
* @param masterId masterId
* @param processDefinitionId processDefinitionId
* @param scheduledFireTime the time the task schedule is expected to trigger
* @return CycleDependency
* @throws Exception if error throws Exception
*/
public CycleDependency getCycleDependency(int masterId, int processDefinitionId, Date scheduledFireTime) throws Exception {
List<CycleDependency> list = getCycleDependencies(masterId, new int[]{processDefinitionId}, scheduledFireTime);
return !list.isEmpty() ? list.get(0) : null;
}
/**
* get dependency cycle list by work process define id list and scheduler fire time
*
* @param masterId masterId
* @param ids ids
* @param scheduledFireTime the time the task schedule is expected to trigger
* @return CycleDependency list
* @throws Exception if error throws Exception
*/
public List<CycleDependency> getCycleDependencies(int masterId, int[] ids, Date scheduledFireTime) throws Exception {
List<CycleDependency> cycleDependencyList = new ArrayList<>();
if (null == ids || ids.length == 0) {
logger.warn("ids[] is empty!is invalid!");
return cycleDependencyList;
}
if (scheduledFireTime == null) {
logger.warn("scheduledFireTime is null!is invalid!");
return cycleDependencyList;
}
String strCrontab = "";
CronExpression depCronExpression;
Cron depCron;
List<Date> list;
List<Schedule> schedules = this.selectAllByProcessDefineId(ids);
// for all scheduling information
for (Schedule depSchedule : schedules) {
strCrontab = depSchedule.getCrontab();
depCronExpression = CronUtils.parse2CronExpression(strCrontab);
depCron = CronUtils.parse2Cron(strCrontab);
CycleEnum cycleEnum = CronUtils.getMiniCycle(depCron);
if (cycleEnum == null) {
logger.error("{} is not valid", strCrontab);
continue;
}
Calendar calendar = Calendar.getInstance();
switch (cycleEnum) {
case HOUR:
calendar.add(Calendar.HOUR, -25);
break;
case DAY:
case WEEK:
calendar.add(Calendar.DATE, -32);
break;
case MONTH:
calendar.add(Calendar.MONTH, -13);
break;
default:
String cycleName = cycleEnum.name();
logger.warn("Dependent process definition's cycleEnum is {},not support!!", cycleName);
continue;
}
Date start = calendar.getTime();
if (depSchedule.getProcessDefinitionId() == masterId) {
list = CronUtils.getSelfFireDateList(start, scheduledFireTime, depCronExpression);
} else {
list = CronUtils.getFireDateList(start, scheduledFireTime, depCronExpression);
}
if (!list.isEmpty()) {
start = list.get(list.size() - 1);
CycleDependency dependency = new CycleDependency(depSchedule.getProcessDefinitionId(), start, CronUtils.getExpirationTime(start, cycleEnum), cycleEnum);
cycleDependencyList.add(dependency);
}
}
return cycleDependencyList;
}
/**
* find last scheduler process instance in the date interval
*
* @param definitionId definitionId
* @param dateInterval dateInterval
* @return process instance
*/
public ProcessInstance findLastSchedulerProcessInterval(int definitionId, DateInterval dateInterval) {
return processInstanceMapper.queryLastSchedulerProcess(definitionId,
dateInterval.getStartTime(),
dateInterval.getEndTime());
}
/**
* find last manual process instance interval
*
* @param definitionId process definition id
* @param dateInterval dateInterval
* @return process instance
*/
public ProcessInstance findLastManualProcessInterval(int definitionId, DateInterval dateInterval) {
return processInstanceMapper.queryLastManualProcess(definitionId,
dateInterval.getStartTime(),
dateInterval.getEndTime());
}
/**
* find last running process instance
*
* @param definitionId process definition id
* @param startTime start time
* @param endTime end time
* @return process instance
*/
public ProcessInstance findLastRunningProcess(int definitionId, Date startTime, Date endTime) {
return processInstanceMapper.queryLastRunningProcess(definitionId,
startTime,
endTime,
stateArray);
}
/**
* query user queue by process instance id
*
* @param processInstanceId processInstanceId
* @return queue
*/
public String queryUserQueueByProcessInstanceId(int processInstanceId) {
String queue = "";
ProcessInstance processInstance = processInstanceMapper.selectById(processInstanceId);
if (processInstance == null) {
return queue;
}
User executor = userMapper.selectById(processInstance.getExecutorId());
if (executor != null) {
queue = executor.getQueue();
}
return queue;
}
/**
* query project name and user name by processInstanceId.
*
* @param processInstanceId processInstanceId
* @return projectName and userName
*/
public ProjectUser queryProjectWithUserByProcessInstanceId(int processInstanceId) {
return projectMapper.queryProjectWithUserByProcessInstanceId(processInstanceId);
}
/**
* get task worker group
*
* @param taskInstance taskInstance
* @return workerGroupId
*/
public String getTaskWorkerGroup(TaskInstance taskInstance) {
String workerGroup = taskInstance.getWorkerGroup();
if (StringUtils.isNotBlank(workerGroup)) {
return workerGroup;
}
int processInstanceId = taskInstance.getProcessInstanceId();
ProcessInstance processInstance = findProcessInstanceById(processInstanceId);
if (processInstance != null) {
return processInstance.getWorkerGroup();
}
logger.info("task : {} will use default worker group", taskInstance.getId());
return Constants.DEFAULT_WORKER_GROUP;
}
/**
* get have perm project list
*
* @param userId userId
* @return project list
*/
public List<Project> getProjectListHavePerm(int userId) {
List<Project> createProjects = projectMapper.queryProjectCreatedByUser(userId);
List<Project> authedProjects = projectMapper.queryAuthedProjectListByUserId(userId);
if (createProjects == null) {
createProjects = new ArrayList<>();
}
if (authedProjects != null) {
createProjects.addAll(authedProjects);
}
return createProjects;
}
/**
* get have perm project ids
*
* @param userId userId
* @return project ids
*/
public List<Integer> getProjectIdListHavePerm(int userId) {
List<Integer> projectIdList = new ArrayList<>();
for (Project project : getProjectListHavePerm(userId)) {
projectIdList.add(project.getId());
}
return projectIdList;
}
/**
* list unauthorized udf function
*
* @param userId user id
* @param needChecks data source id array
* @return unauthorized udf function list
*/
public <T> List<T> listUnauthorized(int userId, T[] needChecks, AuthorizationType authorizationType) {
List<T> resultList = new ArrayList<>();
if (Objects.nonNull(needChecks) && needChecks.length > 0) {
Set<T> originResSet = new HashSet<>(Arrays.asList(needChecks));
switch (authorizationType) {
case RESOURCE_FILE_ID:
case UDF_FILE:
Set<Integer> authorizedResourceFiles = resourceMapper.listAuthorizedResourceById(userId, needChecks).stream().map(Resource::getId).collect(toSet());
originResSet.removeAll(authorizedResourceFiles);
break;
case RESOURCE_FILE_NAME:
Set<String> authorizedResources = resourceMapper.listAuthorizedResource(userId, needChecks).stream().map(Resource::getFullName).collect(toSet());
originResSet.removeAll(authorizedResources);
break;
case DATASOURCE:
Set<Integer> authorizedDatasources = dataSourceMapper.listAuthorizedDataSource(userId, needChecks).stream().map(DataSource::getId).collect(toSet());
originResSet.removeAll(authorizedDatasources);
break;
case UDF:
Set<Integer> authorizedUdfs = udfFuncMapper.listAuthorizedUdfFunc(userId, needChecks).stream().map(UdfFunc::getId).collect(toSet());
originResSet.removeAll(authorizedUdfs);
break;
default:
break;
}
resultList.addAll(originResSet);
}
return resultList;
}
/**
* get user by user id
*
* @param userId user id
* @return User
*/
public User getUserById(int userId) {
return userMapper.selectById(userId);
}
/**
* get resource by resoruce id
*
* @param resoruceId resource id
* @return Resource
*/
public Resource getResourceById(int resoruceId) {
return resourceMapper.selectById(resoruceId);
}
/**
* list resources by ids
*
* @param resIds resIds
* @return resource list
*/
public List<Resource> listResourceByIds(Integer[] resIds) {
return resourceMapper.listResourceByIds(resIds);
}
/**
* format task app id in task instance
*/
public String formatTaskAppId(TaskInstance taskInstance) {
ProcessDefinition definition = this.findProcessDefineById(taskInstance.getProcessDefinitionId());
ProcessInstance processInstanceById = this.findProcessInstanceById(taskInstance.getProcessInstanceId());
if (definition == null || processInstanceById == null) {
return "";
}
return String.format("%s_%s_%s",
definition.getId(),
processInstanceById.getId(),
taskInstance.getId());
}
/**
* solve the branch rename bug
*
* @param processData
* @param oldJson
* @return String
*/
public String changeJson(ProcessData processData, String oldJson) {
ProcessData oldProcessData = JSONUtils.parseObject(oldJson, ProcessData.class);
HashMap<String, String> oldNameTaskId = new HashMap<>();
List<TaskNode> oldTasks = oldProcessData.getTasks();
for (int i = 0; i < oldTasks.size(); i++) {
TaskNode taskNode = oldTasks.get(i);
String oldName = taskNode.getName();
String oldId = taskNode.getId();
oldNameTaskId.put(oldName, oldId);
}
// take the processdefinitionjson saved this time, and then save the taskid and name
HashMap<String, String> newNameTaskId = new HashMap<>();
List<TaskNode> newTasks = processData.getTasks();
for (int i = 0; i < newTasks.size(); i++) {
TaskNode taskNode = newTasks.get(i);
String newId = taskNode.getId();
String newName = taskNode.getName();
newNameTaskId.put(newId, newName);
}
// replace the previous conditionresult with a new one
List<TaskNode> tasks = processData.getTasks();
for (int i = 0; i < tasks.size(); i++) {
TaskNode taskNode = newTasks.get(i);
String type = taskNode.getType();
if (TaskType.CONDITIONS.getDescp().equalsIgnoreCase(type)) {
ConditionsParameters conditionsParameters = JSONUtils.parseObject(taskNode.getConditionResult(), ConditionsParameters.class);
String oldSuccessNodeName = conditionsParameters.getSuccessNode().get(0);
String oldFailedNodeName = conditionsParameters.getFailedNode().get(0);
String newSuccessNodeName = newNameTaskId.get(oldNameTaskId.get(oldSuccessNodeName));
String newFailedNodeName = newNameTaskId.get(oldNameTaskId.get(oldFailedNodeName));
if (newSuccessNodeName != null) {
ArrayList<String> successNode = new ArrayList<>();
successNode.add(newSuccessNodeName);
conditionsParameters.setSuccessNode(successNode);
}
if (newFailedNodeName != null) {
ArrayList<String> failedNode = new ArrayList<>();
failedNode.add(newFailedNodeName);
conditionsParameters.setFailedNode(failedNode);
}
String conditionResultStr = conditionsParameters.getConditionResult();
taskNode.setConditionResult(conditionResultStr);
tasks.set(i, taskNode);
}
}
return JSONUtils.toJsonString(processData);
}
}
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,881 | [Improvement][Docker] Add default login username and password in readme | **For better global communication, Please describe it in English. If you feel the description in English is not clear, then you can append description in Chinese(just for Mandarin(CN)), thx! **
**Describe the bug**
Documentation for the docker-compose instructions is missing the default login credentials. (https://github.com/apache/incubator-dolphinscheduler/tree/dev/docker/build)
**To Reproduce**
Steps to reproduce the behavior, for example:
1. Go to https://github.com/apache/incubator-dolphinscheduler/tree/dev/docker/build
**Expected behavior**
A clear and concise description of what you expected to happen.
The default login username and password (admin / dolphinscheduler123) should be in the readme for the above location.
**Which version of Dolphin Scheduler:**
-[dev]
**Additional context**
Add any other context about the problem here.
**Requirement or improvement**
- Please describe about your requirements or improvement suggestions.
| https://github.com/apache/dolphinscheduler/issues/4881 | https://github.com/apache/dolphinscheduler/pull/4882 | d217937de3c876c2f1d44cd05eeb266159111b1e | fbe0edac070df6322ce72b269255ff70f64c9c3d | "2021-02-26T03:33:09Z" | java | "2021-02-26T05:28:39Z" | docker/build/README.md | ## What is Dolphin Scheduler?
Dolphin Scheduler is a distributed and easy-to-expand visual DAG workflow scheduling system, dedicated to solving the complex dependencies in data processing, making the scheduling system out of the box for data processing.
Github URL: https://github.com/apache/incubator-dolphinscheduler
Official Website: https://dolphinscheduler.apache.org
![Dolphin Scheduler](https://dolphinscheduler.apache.org/img/hlogo_colorful.svg)
[![EN doc](https://img.shields.io/badge/document-English-blue.svg)](README.md)
[![CN doc](https://img.shields.io/badge/文档-中文版-blue.svg)](README_zh_CN.md)
## How to use this docker image
#### You can start a dolphinscheduler by docker-compose (recommended)
```
$ docker-compose -f ./docker/docker-swarm/docker-compose.yml up -d
```
The default **postgres** user `root`, postgres password `root` and database `dolphinscheduler` are created in the `docker-compose.yml`.
The default **zookeeper** is created in the `docker-compose.yml`.
Access the Web UI:http://192.168.xx.xx:12345/dolphinscheduler
#### Or via Environment Variables **`DATABASE_HOST`** **`DATABASE_PORT`** **`DATABASE_DATABASE`** **`ZOOKEEPER_QUORUM`**
You can specify **existing postgres and zookeeper service**. Example:
```
$ docker run -d --name dolphinscheduler \
-e ZOOKEEPER_QUORUM="192.168.x.x:2181" \
-e DATABASE_HOST="192.168.x.x" -e DATABASE_PORT="5432" -e DATABASE_DATABASE="dolphinscheduler" \
-e DATABASE_USERNAME="test" -e DATABASE_PASSWORD="test" \
-p 12345:12345 \
apache/dolphinscheduler:latest all
```
Access the Web UI:http://192.168.xx.xx:12345/dolphinscheduler
#### Or start a standalone dolphinscheduler server
You can start a standalone dolphinscheduler server.
* Create a **local volume** for resource storage, For example:
```
docker volume create dolphinscheduler-resource-local
```
* Start a **master server**, For example:
```
$ docker run -d --name dolphinscheduler-master \
-e ZOOKEEPER_QUORUM="192.168.x.x:2181" \
-e DATABASE_HOST="192.168.x.x" -e DATABASE_PORT="5432" -e DATABASE_DATABASE="dolphinscheduler" \
-e DATABASE_USERNAME="test" -e DATABASE_PASSWORD="test" \
apache/dolphinscheduler:latest master-server
```
* Start a **worker server**, For example:
```
$ docker run -d --name dolphinscheduler-worker \
-e ZOOKEEPER_QUORUM="192.168.x.x:2181" \
-e DATABASE_HOST="192.168.x.x" -e DATABASE_PORT="5432" -e DATABASE_DATABASE="dolphinscheduler" \
-e DATABASE_USERNAME="test" -e DATABASE_PASSWORD="test" \
-e ALERT_LISTEN_HOST="dolphinscheduler-alert" \
-v dolphinscheduler-resource-local:/dolphinscheduler \
apache/dolphinscheduler:latest worker-server
```
* Start a **api server**, For example:
```
$ docker run -d --name dolphinscheduler-api \
-e ZOOKEEPER_QUORUM="192.168.x.x:2181" \
-e DATABASE_HOST="192.168.x.x" -e DATABASE_PORT="5432" -e DATABASE_DATABASE="dolphinscheduler" \
-e DATABASE_USERNAME="test" -e DATABASE_PASSWORD="test" \
-v dolphinscheduler-resource-local:/dolphinscheduler \
-p 12345:12345 \
apache/dolphinscheduler:latest api-server
```
* Start a **alert server**, For example:
```
$ docker run -d --name dolphinscheduler-alert \
-e DATABASE_HOST="192.168.x.x" -e DATABASE_PORT="5432" -e DATABASE_DATABASE="dolphinscheduler" \
-e DATABASE_USERNAME="test" -e DATABASE_PASSWORD="test" \
apache/dolphinscheduler:latest alert-server
```
**Note**: You must be specify `DATABASE_HOST` `DATABASE_PORT` `DATABASE_DATABASE` `DATABASE_USERNAME` `DATABASE_PASSWORD` `ZOOKEEPER_QUORUM` when start a standalone dolphinscheduler server.
## How to build a docker image
You can build a docker image in A Unix-like operating system, You can also build it in Windows operating system.
In Unix-Like, Example:
```bash
$ cd path/incubator-dolphinscheduler
$ sh ./docker/build/hooks/build
```
In Windows, Example:
```bat
C:\incubator-dolphinscheduler>.\docker\build\hooks\build.bat
```
Please read `./docker/build/hooks/build` `./docker/build/hooks/build.bat` script files if you don't understand
## Environment Variables
The Dolphin Scheduler image uses several environment variables which are easy to miss. While none of the variables are required, they may significantly aid you in using the image.
**`DATABASE_TYPE`**
This environment variable sets the type for database. The default value is `postgresql`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`DATABASE_DRIVER`**
This environment variable sets the type for database. The default value is `org.postgresql.Driver`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`DATABASE_HOST`**
This environment variable sets the host for database. The default value is `127.0.0.1`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`DATABASE_PORT`**
This environment variable sets the port for database. The default value is `5432`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`DATABASE_USERNAME`**
This environment variable sets the username for database. The default value is `root`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`DATABASE_PASSWORD`**
This environment variable sets the password for database. The default value is `root`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`DATABASE_DATABASE`**
This environment variable sets the database for database. The default value is `dolphinscheduler`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`DATABASE_PARAMS`**
This environment variable sets the database for database. The default value is `characterEncoding=utf8`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`, `api-server`, `alert-server`.
**`DOLPHINSCHEDULER_ENV_PATH`**
This environment variable sets the runtime environment for task. The default value is `/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh`.
**`DOLPHINSCHEDULER_DATA_BASEDIR_PATH`**
User data directory path, self configuration, please make sure the directory exists and have read write permissions. The default value is `/tmp/dolphinscheduler`
**`DOLPHINSCHEDULER_OPTS`**
This environment variable sets java options. The default value is empty.
**`RESOURCE_STORAGE_TYPE`**
This environment variable sets resource storage type for dolphinscheduler like `HDFS`, `S3`, `NONE`. The default value is `HDFS`.
**`RESOURCE_UPLOAD_PATH`**
This environment variable sets resource store path on HDFS/S3 for resource storage. The default value is `/dolphinscheduler`.
**`FS_DEFAULT_FS`**
This environment variable sets fs.defaultFS for resource storage like `file:///`, `hdfs://mycluster:8020` or `s3a://dolphinscheduler`. The default value is `file:///`.
**`FS_S3A_ENDPOINT`**
This environment variable sets s3 endpoint for resource storage. The default value is `s3.xxx.amazonaws.com`.
**`FS_S3A_ACCESS_KEY`**
This environment variable sets s3 access key for resource storage. The default value is `xxxxxxx`.
**`FS_S3A_SECRET_KEY`**
This environment variable sets s3 secret key for resource storage. The default value is `xxxxxxx`.
**`ZOOKEEPER_QUORUM`**
This environment variable sets zookeeper quorum for `master-server` and `worker-serverr`. The default value is `127.0.0.1:2181`.
**Note**: You must be specify it when start a standalone dolphinscheduler server. Like `master-server`, `worker-server`.
**`ZOOKEEPER_ROOT`**
This environment variable sets zookeeper root directory for dolphinscheduler. The default value is `/dolphinscheduler`.
**`MASTER_EXEC_THREADS`**
This environment variable sets exec thread num for `master-server`. The default value is `100`.
**`MASTER_EXEC_TASK_NUM`**
This environment variable sets exec task num for `master-server`. The default value is `20`.
**`MASTER_HEARTBEAT_INTERVAL`**
This environment variable sets heartbeat interval for `master-server`. The default value is `10`.
**`MASTER_TASK_COMMIT_RETRYTIMES`**
This environment variable sets task commit retry times for `master-server`. The default value is `5`.
**`MASTER_TASK_COMMIT_INTERVAL`**
This environment variable sets task commit interval for `master-server`. The default value is `1000`.
**`MASTER_MAX_CPULOAD_AVG`**
This environment variable sets max cpu load avg for `master-server`. The default value is `100`.
**`MASTER_RESERVED_MEMORY`**
This environment variable sets reserved memory for `master-server`. The default value is `0.1`.
**`MASTER_LISTEN_PORT`**
This environment variable sets port for `master-server`. The default value is `5678`.
**`WORKER_EXEC_THREADS`**
This environment variable sets exec thread num for `worker-server`. The default value is `100`.
**`WORKER_HEARTBEAT_INTERVAL`**
This environment variable sets heartbeat interval for `worker-server`. The default value is `10`.
**`WORKER_MAX_CPULOAD_AVG`**
This environment variable sets max cpu load avg for `worker-server`. The default value is `100`.
**`WORKER_RESERVED_MEMORY`**
This environment variable sets reserved memory for `worker-server`. The default value is `0.1`.
**`WORKER_LISTEN_PORT`**
This environment variable sets port for `worker-server`. The default value is `1234`.
**`WORKER_GROUPS`**
This environment variable sets group for `worker-server`. The default value is `default`.
**`WORKER_WEIGHT`**
This environment variable sets weight for `worker-server`. The default value is `100`.
**`ALERT_LISTEN_HOST`**
This environment variable sets the host of `alert-server` for `worker-server`. The default value is `127.0.0.1`.
**`ALERT_PLUGIN_DIR`**
This environment variable sets the alert plugin directory for `alert-server`. The default value is `lib/plugin/alert`.
## Initialization scripts
If you would like to do additional initialization in an image derived from this one, add one or more environment variable under `/root/start-init-conf.sh`, and modify template files in `/opt/dolphinscheduler/conf/*.tpl`.
For example, to add an environment variable `API_SERVER_PORT` in `/root/start-init-conf.sh`:
```
export API_SERVER_PORT=5555
```
and to modify `/opt/dolphinscheduler/conf/application-api.properties.tpl` template file, add server port:
```
server.port=${API_SERVER_PORT}
```
`/root/start-init-conf.sh` will dynamically generate config file:
```sh
echo "generate app config"
ls ${DOLPHINSCHEDULER_HOME}/conf/ | grep ".tpl" | while read line; do
eval "cat << EOF
$(cat ${DOLPHINSCHEDULER_HOME}/conf/${line})
EOF
" > ${DOLPHINSCHEDULER_HOME}/conf/${line%.*}
done
```
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,891 | [Improvement][Readme] update DolphinScheduler slack invitation url | the slack invitation url has been expired, please update the url address in README.md and README_zh_CN.md
```
Join the slack channel by [this invitation link](https://join.slack.com/t/asf-dolphinscheduler/shared_invite/zt-mzqu52gi-rCggPkSHQ0DZYkwbTxO1Gw ).
```
please use this new slack invitation url: https://join.slack.com/t/asf-dolphinscheduler/shared_invite/zt-omtdhuio-_JISsxYhiVsltmC5h38yfw
this invitation url never expires | https://github.com/apache/dolphinscheduler/issues/4891 | https://github.com/apache/dolphinscheduler/pull/4892 | c6b7eb34868a8d80cfd93f242564d17f8ba73bf5 | 5cadf4e4e920ed28a5e1d837d3e02505e7f13eee | "2021-02-27T01:01:13Z" | java | "2021-02-27T01:32:01Z" | README.md | Dolphin Scheduler Official Website
[dolphinscheduler.apache.org](https://dolphinscheduler.apache.org)
============
[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html)
[![Total Lines](https://tokei.rs/b1/github/apache/Incubator-DolphinScheduler?category=lines)](https://github.com/apache/Incubator-DolphinScheduler)
[![codecov](https://codecov.io/gh/apache/incubator-dolphinscheduler/branch/dev/graph/badge.svg)](https://codecov.io/gh/apache/incubator-dolphinscheduler/branch/dev)
[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=apache-dolphinscheduler&metric=alert_status)](https://sonarcloud.io/dashboard?id=apache-dolphinscheduler)
[![Stargazers over time](https://starchart.cc/apache/incubator-dolphinscheduler.svg)](https://starchart.cc/apache/incubator-dolphinscheduler)
[![EN doc](https://img.shields.io/badge/document-English-blue.svg)](README.md)
[![CN doc](https://img.shields.io/badge/文档-中文版-blue.svg)](README_zh_CN.md)
### Design Features:
DolphinScheduler is a distributed and extensible workflow scheduler platform with powerful DAG visual interfaces, dedicated to solving complex job dependencies in the data pipeline and providing various types of jobs available `out of the box`.
Its main objectives are as follows:
- Associate the tasks according to the dependencies of the tasks in a DAG graph, which can visualize the running state of the task in real-time.
- Support various task types: Shell, MR, Spark, SQL (MySQL, PostgreSQL, hive, spark SQL), Python, Sub_Process, Procedure, etc.
- Support scheduling of workflows and dependencies, manual scheduling to pause/stop/recover task, support failure task retry/alarm, recover specified nodes from failure, kill task, etc.
- Support the priority of workflows & tasks, task failover, and task timeout alarm or failure.
- Support workflow global parameters and node customized parameter settings.
- Support online upload/download/management of resource files, etc. Support online file creation and editing.
- Support task log online viewing and scrolling and downloading, etc.
- Have implemented cluster HA, decentralize Master cluster and Worker cluster through Zookeeper.
- Support the viewing of Master/Worker CPU load, memory, and CPU usage metrics.
- Support displaying workflow history in tree/Gantt chart, as well as statistical analysis on the task status & process status in each workflow.
- Support back-filling data.
- Support multi-tenant.
- Support internationalization.
- More features waiting for partners to explore...
### What's in DolphinScheduler
Stability | Accessibility | Features | Scalability |
-- | -- | -- | --
Decentralized multi-master and multi-worker | Visualization of workflow key information, such as task status, task type, retry times, task operation machine information, visual variables, and so on at a glance. | Support pause, recover operation | Support customized task types
support HA | Visualization of all workflow operations, dragging tasks to draw DAGs, configuring data sources and resources. At the same time, for third-party systems, provide API mode operations. | Users on DolphinScheduler can achieve many-to-one or one-to-one mapping relationship through tenants and Hadoop users, which is very important for scheduling large data jobs. | The scheduler supports distributed scheduling, and the overall scheduling capability will increase linearly with the scale of the cluster. Master and Worker support dynamic adjustment.
Overload processing: By using the task queue mechanism, the number of schedulable tasks on a single machine can be flexibly configured. Machine jam can be avoided with high tolerance to numbers of tasks cached in task queue. | One-click deployment | Support traditional shell tasks, and big data platform task scheduling: MR, Spark, SQL (MySQL, PostgreSQL, hive, spark SQL), Python, Procedure, Sub_Process | |
### User Interface Screenshots
![home page](https://user-images.githubusercontent.com/15833811/75218288-bf286400-57d4-11ea-8263-d639c6511d5f.jpg)
![dag](https://user-images.githubusercontent.com/15833811/75236750-3374fe80-57f9-11ea-857d-62a66a5a559d.png)
![process definition list page](https://user-images.githubusercontent.com/15833811/75216886-6f479e00-57d0-11ea-92dd-66e7640a186f.png)
![view task log online](https://user-images.githubusercontent.com/15833811/75216924-9900c500-57d0-11ea-91dc-3522a76bdbbe.png)
![resource management](https://user-images.githubusercontent.com/15833811/75216984-be8dce80-57d0-11ea-840d-58546edc8788.png)
![monitor](https://user-images.githubusercontent.com/59273635/75625839-c698a480-5bfc-11ea-8bbe-895b561b337f.png)
![security](https://user-images.githubusercontent.com/15833811/75236441-bfd2f180-57f8-11ea-88bd-f24311e01b7e.png)
![treeview](https://user-images.githubusercontent.com/15833811/75217191-3fe56100-57d1-11ea-8856-f19180d9a879.png)
### QuickStart in Docker
Please referer the official website document:[[QuickStart in Docker](https://dolphinscheduler.apache.org/en-us/docs/1.3.4/user_doc/docker-deployment.html)]
### How to Build
```bash
./mvnw clean install -Prelease
```
Artifact:
```
dolphinscheduler-dist/target/apache-dolphinscheduler-incubating-${latest.release.version}-dolphinscheduler-bin.tar.gz: Binary package of DolphinScheduler
dolphinscheduler-dist/target/apache-dolphinscheduler-incubating-${latest.release.version}-src.zip: Source code package of DolphinScheduler
```
### Thanks
DolphinScheduler is based on a lot of excellent open-source projects, such as google guava, guice, grpc, netty, ali bonecp, quartz, and many open-source projects of Apache and so on.
We would like to express our deep gratitude to all the open-source projects used in Dolphin Scheduler. We hope that we are not only the beneficiaries of open-source, but also give back to the community. Besides, we hope everyone who have the same enthusiasm and passion for open source could join in and contribute to the open-source community!
### Get Help
1. Submit an [[issue](https://github.com/apache/incubator-dolphinscheduler/issues/new/choose)]
1. Subscribe to this mail list: https://dolphinscheduler.apache.org/en-us/community/development/subscribe.html, then email dev@dolphinscheduler.apache.org
### Community
You are so much welcomed to communicate with the developers and users of Dolphin Scheduler freely. There are two ways to find them:
1. Join the slack channel by [this invitation link](https://join.slack.com/t/asf-dolphinscheduler/shared_invite/zt-l8k90ceu-wwUfobaDkJxjzMfZp4y1Ag).
2. Follow the [twitter account of Dolphin Scheduler](https://twitter.com/dolphinschedule) and get the latest news just on time.
### How to Contribute
The community welcomes everyone to participate in contributing, please refer to this website to find out more: [[How to contribute](https://dolphinscheduler.apache.org/en-us/community/development/contribute.html)]
### License
Please refer to the [LICENSE](https://github.com/apache/incubator-dolphinscheduler/blob/dev/LICENSE) file.
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,891 | [Improvement][Readme] update DolphinScheduler slack invitation url | the slack invitation url has been expired, please update the url address in README.md and README_zh_CN.md
```
Join the slack channel by [this invitation link](https://join.slack.com/t/asf-dolphinscheduler/shared_invite/zt-mzqu52gi-rCggPkSHQ0DZYkwbTxO1Gw ).
```
please use this new slack invitation url: https://join.slack.com/t/asf-dolphinscheduler/shared_invite/zt-omtdhuio-_JISsxYhiVsltmC5h38yfw
this invitation url never expires | https://github.com/apache/dolphinscheduler/issues/4891 | https://github.com/apache/dolphinscheduler/pull/4892 | c6b7eb34868a8d80cfd93f242564d17f8ba73bf5 | 5cadf4e4e920ed28a5e1d837d3e02505e7f13eee | "2021-02-27T01:01:13Z" | java | "2021-02-27T01:32:01Z" | README_zh_CN.md | Dolphin Scheduler Official Website
[dolphinscheduler.apache.org](https://dolphinscheduler.apache.org)
============
[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html)
[![Total Lines](https://tokei.rs/b1/github/apache/Incubator-DolphinScheduler?category=lines)](https://github.com/apache/Incubator-DolphinScheduler)
[![codecov](https://codecov.io/gh/apache/incubator-dolphinscheduler/branch/dev/graph/badge.svg)](https://codecov.io/gh/apache/incubator-dolphinscheduler/branch/dev)
[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=apache-dolphinscheduler&metric=alert_status)](https://sonarcloud.io/dashboard?id=apache-dolphinscheduler)
> Dolphin Scheduler for Big Data
[![Stargazers over time](https://starchart.cc/apache/incubator-dolphinscheduler.svg)](https://starchart.cc/apache/incubator-dolphinscheduler)
[![CN doc](https://img.shields.io/badge/文档-中文版-blue.svg)](README_zh_CN.md)
[![EN doc](https://img.shields.io/badge/document-English-blue.svg)](README.md)
**设计特点:** 一个分布式易扩展的可视化DAG工作流任务调度系统。致力于解决数据处理流程中错综复杂的依赖关系,使调度系统在数据处理流程中`开箱即用`。
其主要目标如下:
- 以DAG图的方式将Task按照任务的依赖关系关联起来,可实时可视化监控任务的运行状态
- 支持丰富的任务类型:Shell、MR、Spark、SQL(mysql、postgresql、hive、sparksql),Python,Sub_Process、Procedure等
- 支持工作流定时调度、依赖调度、手动调度、手动暂停/停止/恢复,同时支持失败重试/告警、从指定节点恢复失败、Kill任务等操作
- 支持工作流优先级、任务优先级及任务的故障转移及任务超时告警/失败
- 支持工作流全局参数及节点自定义参数设置
- 支持资源文件的在线上传/下载,管理等,支持在线文件创建、编辑
- 支持任务日志在线查看及滚动、在线下载日志等
- 实现集群HA,通过Zookeeper实现Master集群和Worker集群去中心化
- 支持对`Master/Worker` cpu load,memory,cpu在线查看
- 支持工作流运行历史树形/甘特图展示、支持任务状态统计、流程状态统计
- 支持补数
- 支持多租户
- 支持国际化
- 还有更多等待伙伴们探索
### 系统部分截图
![home page](https://user-images.githubusercontent.com/15833811/75208819-abbad000-57b7-11ea-8d3c-67e7c270671f.jpg)
![dag](https://user-images.githubusercontent.com/15833811/75209584-93e44b80-57b9-11ea-952e-537fb24ec72d.jpg)
![log](https://user-images.githubusercontent.com/15833811/75209645-c55d1700-57b9-11ea-94d4-e3fa91ab5218.jpg)
![gantt](https://user-images.githubusercontent.com/15833811/75209640-c0986300-57b9-11ea-878e-a2098533ad44.jpg)
![resources](https://user-images.githubusercontent.com/15833811/75209403-11f42280-57b9-11ea-9b59-d4be77063553.jpg)
![monitor](https://user-images.githubusercontent.com/15833811/75209631-b5ddce00-57b9-11ea-8d22-cdf15cf0ee25.jpg)
![security](https://user-images.githubusercontent.com/15833811/75209633-baa28200-57b9-11ea-9def-94bef2e212a7.jpg)
### 近期研发计划
DolphinScheduler的工作计划:<a href="https://github.com/apache/incubator-dolphinscheduler/projects/1" target="_blank">研发计划</a> ,其中 In Develop卡片下是正在研发的功能,TODO卡片是待做事项(包括 feature ideas)
### 参与贡献
非常欢迎大家来参与贡献,贡献流程请参考:
[[参与贡献](https://dolphinscheduler.apache.org/zh-cn/docs/development/contribute.html)]
### How to Build
```bash
./mvnw clean install -Prelease
```
Artifact:
```
dolphinscheduler-dist/target/apache-dolphinscheduler-incubating-${latest.release.version}-dolphinscheduler-bin.tar.gz: Binary package of DolphinScheduler
dolphinscheduler-dist/target/apache-dolphinscheduler-incubating-${latest.release.version}-src.zip: Source code package of DolphinScheduler
```
### 感谢
Dolphin Scheduler使用了很多优秀的开源项目,比如google的guava、guice、grpc,netty,ali的bonecp,quartz,以及apache的众多开源项目等等,
正是由于站在这些开源项目的肩膀上,才有Dolphin Scheduler的诞生的可能。对此我们对使用的所有开源软件表示非常的感谢!我们也希望自己不仅是开源的受益者,也能成为开源的贡献者,也希望对开源有同样热情和信念的伙伴加入进来,一起为开源献出一份力!
### 获得帮助
1. 提交issue
2. 先订阅邮件开发列表:[订阅邮件列表](https://dolphinscheduler.apache.org/zh-cn/community/development/subscribe.html), 订阅成功后发送邮件到dev@dolphinscheduler.apache.org.
### 社区
1. 通过[该申请链接](https://join.slack.com/t/asf-dolphinscheduler/shared_invite/zt-l8k90ceu-wwUfobaDkJxjzMfZp4y1Ag)加入slack channel
2. 关注[Apache Dolphin Scheduler的Twitter账号](https://twitter.com/dolphinschedule)获取实时动态
### 版权
请参考 [LICENSE](https://github.com/apache/incubator-dolphinscheduler/blob/dev/LICENSE) 文件.
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,886 | [Bug][Docker] Cannot create container for service dolphinscheduler-worker: invalid volume specification in Windows | **For better global communication, Please describe it in English. If you feel the description in English is not clear, then you can append description in Chinese(just for Mandarin(CN)), thx! **
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior, for example:
1. Run `docker-compose` on Windows
2. See error: `Cannot create container for service dolphinscheduler-worker: invalid volume specification`
**Expected behavior**
Bug fixed.
**Screenshots**
If applicable, add screenshots to help explain your problem.
![image](https://user-images.githubusercontent.com/4902714/109300088-0e362880-7871-11eb-9132-f716462a26eb.png)
**Which version of Dolphin Scheduler:**
-[1.3.x]
-[dev]
**Additional context**
Add any other context about the problem here.
**Requirement or improvement**
- Please describe about your requirements or improvement suggestions.
| https://github.com/apache/dolphinscheduler/issues/4886 | https://github.com/apache/dolphinscheduler/pull/4887 | a75793d875a9a7c3394dc8768c014e1d4efa6e7f | b8788f007b1617605e05d70c0ff33c4aa7318c4b | "2021-02-26T12:27:50Z" | java | "2021-02-27T01:45:23Z" | docker/build/Dockerfile | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
FROM openjdk:8-jdk-alpine
ARG VERSION
ENV TZ Asia/Shanghai
ENV LANG C.UTF-8
ENV DOCKER true
# 1. install command/library/software
# If install slowly, you can replcae alpine's mirror with aliyun's mirror, Example:
# RUN sed -i "s/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g" /etc/apk/repositories
# RUN sed -i 's/dl-cdn.alpinelinux.org/mirror.tuna.tsinghua.edu.cn/g' /etc/apk/repositories
RUN apk update && \
apk add --no-cache tzdata dos2unix bash python2 python3 procps sudo shadow tini postgresql-client && \
cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && \
apk del tzdata && \
rm -rf /var/cache/apk/*
# 2. add dolphinscheduler
ADD ./apache-dolphinscheduler-incubating-${VERSION}-dolphinscheduler-bin.tar.gz /opt/
RUN ln -s /opt/apache-dolphinscheduler-incubating-${VERSION}-dolphinscheduler-bin /opt/dolphinscheduler
ENV DOLPHINSCHEDULER_HOME /opt/dolphinscheduler
# 3. add configuration and modify permissions and set soft links
COPY ./checkpoint.sh /root/checkpoint.sh
COPY ./startup-init-conf.sh /root/startup-init-conf.sh
COPY ./startup.sh /root/startup.sh
COPY ./conf/dolphinscheduler/*.tpl /opt/dolphinscheduler/conf/
COPY ./conf/dolphinscheduler/logback/* /opt/dolphinscheduler/conf/
COPY ./conf/dolphinscheduler/env/dolphinscheduler_env.sh /opt/dolphinscheduler/conf/env/
RUN dos2unix /root/checkpoint.sh && \
dos2unix /root/startup-init-conf.sh && \
dos2unix /root/startup.sh && \
dos2unix /opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh && \
dos2unix /opt/dolphinscheduler/script/*.sh && \
dos2unix /opt/dolphinscheduler/bin/*.sh && \
rm -rf /bin/sh && \
ln -s /bin/bash /bin/sh && \
mkdir -p /var/mail /tmp/xls && \
echo "Set disable_coredump false" >> /etc/sudo.conf
# 4. expose port
EXPOSE 5678 1234 12345 50051 50052
ENTRYPOINT ["/sbin/tini", "--", "/root/startup.sh"]
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,886 | [Bug][Docker] Cannot create container for service dolphinscheduler-worker: invalid volume specification in Windows | **For better global communication, Please describe it in English. If you feel the description in English is not clear, then you can append description in Chinese(just for Mandarin(CN)), thx! **
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior, for example:
1. Run `docker-compose` on Windows
2. See error: `Cannot create container for service dolphinscheduler-worker: invalid volume specification`
**Expected behavior**
Bug fixed.
**Screenshots**
If applicable, add screenshots to help explain your problem.
![image](https://user-images.githubusercontent.com/4902714/109300088-0e362880-7871-11eb-9132-f716462a26eb.png)
**Which version of Dolphin Scheduler:**
-[1.3.x]
-[dev]
**Additional context**
Add any other context about the problem here.
**Requirement or improvement**
- Please describe about your requirements or improvement suggestions.
| https://github.com/apache/dolphinscheduler/issues/4886 | https://github.com/apache/dolphinscheduler/pull/4887 | a75793d875a9a7c3394dc8768c014e1d4efa6e7f | b8788f007b1617605e05d70c0ff33c4aa7318c4b | "2021-02-26T12:27:50Z" | java | "2021-02-27T01:45:23Z" | docker/build/conf/dolphinscheduler/common.properties.tpl | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# resource storage type : HDFS, S3, NONE
resource.storage.type=${RESOURCE_STORAGE_TYPE}
# resource store on HDFS/S3 path, resource file will store to this hadoop hdfs path, self configuration, please make sure the directory exists on hdfs and have read write permissions。"/dolphinscheduler" is recommended
resource.upload.path=${RESOURCE_UPLOAD_PATH}
# user data local directory path, please make sure the directory exists and have read write permissions
data.basedir.path=${DOLPHINSCHEDULER_DATA_BASEDIR_PATH}
# whether kerberos starts
hadoop.security.authentication.startup.state=false
# java.security.krb5.conf path
java.security.krb5.conf.path=/opt/krb5.conf
# login user from keytab username
login.user.keytab.username=hdfs-mycluster@ESZ.COM
# login user from keytab path
login.user.keytab.path=/opt/hdfs.headless.keytab
#resource.view.suffixs
#resource.view.suffixs=txt,log,sh,bat,conf,cfg,py,java,sql,xml,hql,properties,json,yml,yaml,ini,js
# if resource.storage.type=HDFS, the user need to have permission to create directories under the HDFS root path
hdfs.root.user=hdfs
# if resource.storage.type=S3, the value like: s3a://dolphinscheduler; if resource.storage.type=HDFS, When namenode HA is enabled, you need to copy core-site.xml and hdfs-site.xml to conf dir
fs.defaultFS=${FS_DEFAULT_FS}
# if resource.storage.type=S3, s3 endpoint
fs.s3a.endpoint=${FS_S3A_ENDPOINT}
# if resource.storage.type=S3, s3 access key
fs.s3a.access.key=${FS_S3A_ACCESS_KEY}
# if resource.storage.type=S3, s3 secret key
fs.s3a.secret.key=${FS_S3A_SECRET_KEY}
# if resourcemanager HA enable, please type the HA ips ; if resourcemanager is single, make this value empty
yarn.resourcemanager.ha.rm.ids=192.168.xx.xx,192.168.xx.xx
# if resourcemanager HA enable or not use resourcemanager, please keep the default value; If resourcemanager is single, you only need to replace ds1 to actual resourcemanager hostname.
yarn.application.status.address=http://ds1:8088/ws/v1/cluster/apps/%s
# job history status url when application number threshold is reached(default 10000,maybe it was set to 1000)
yarn.job.history.status.address=http://ds1:19888/ws/v1/history/mapreduce/jobs/%s
# system env path, If you want to set your own path, you need to set this env file to an absolute path
dolphinscheduler.env.path=${DOLPHINSCHEDULER_ENV_PATH}
development.state=false
# kerberos tgt expire time, unit is hours
kerberos.expire.time=2
# datasource encryption salt
datasource.encryption.enable=false
datasource.encryption.salt=!@#$%^&*
# Network IP gets priority, default inner outer
#dolphin.scheduler.network.priority.strategy=default
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,886 | [Bug][Docker] Cannot create container for service dolphinscheduler-worker: invalid volume specification in Windows | **For better global communication, Please describe it in English. If you feel the description in English is not clear, then you can append description in Chinese(just for Mandarin(CN)), thx! **
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior, for example:
1. Run `docker-compose` on Windows
2. See error: `Cannot create container for service dolphinscheduler-worker: invalid volume specification`
**Expected behavior**
Bug fixed.
**Screenshots**
If applicable, add screenshots to help explain your problem.
![image](https://user-images.githubusercontent.com/4902714/109300088-0e362880-7871-11eb-9132-f716462a26eb.png)
**Which version of Dolphin Scheduler:**
-[1.3.x]
-[dev]
**Additional context**
Add any other context about the problem here.
**Requirement or improvement**
- Please describe about your requirements or improvement suggestions.
| https://github.com/apache/dolphinscheduler/issues/4886 | https://github.com/apache/dolphinscheduler/pull/4887 | a75793d875a9a7c3394dc8768c014e1d4efa6e7f | b8788f007b1617605e05d70c0ff33c4aa7318c4b | "2021-02-26T12:27:50Z" | java | "2021-02-27T01:45:23Z" | docker/build/conf/dolphinscheduler/env/dolphinscheduler_env.sh.tpl | |
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,886 | [Bug][Docker] Cannot create container for service dolphinscheduler-worker: invalid volume specification in Windows | **For better global communication, Please describe it in English. If you feel the description in English is not clear, then you can append description in Chinese(just for Mandarin(CN)), thx! **
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior, for example:
1. Run `docker-compose` on Windows
2. See error: `Cannot create container for service dolphinscheduler-worker: invalid volume specification`
**Expected behavior**
Bug fixed.
**Screenshots**
If applicable, add screenshots to help explain your problem.
![image](https://user-images.githubusercontent.com/4902714/109300088-0e362880-7871-11eb-9132-f716462a26eb.png)
**Which version of Dolphin Scheduler:**
-[1.3.x]
-[dev]
**Additional context**
Add any other context about the problem here.
**Requirement or improvement**
- Please describe about your requirements or improvement suggestions.
| https://github.com/apache/dolphinscheduler/issues/4886 | https://github.com/apache/dolphinscheduler/pull/4887 | a75793d875a9a7c3394dc8768c014e1d4efa6e7f | b8788f007b1617605e05d70c0ff33c4aa7318c4b | "2021-02-26T12:27:50Z" | java | "2021-02-27T01:45:23Z" | docker/build/startup-init-conf.sh | #!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
echo "init env variables"
# Define parameters default value
#============================================================================
# Database Source
#============================================================================
export DATABASE_HOST=${DATABASE_HOST:-"127.0.0.1"}
export DATABASE_PORT=${DATABASE_PORT:-"5432"}
export DATABASE_USERNAME=${DATABASE_USERNAME:-"root"}
export DATABASE_PASSWORD=${DATABASE_PASSWORD:-"root"}
export DATABASE_DATABASE=${DATABASE_DATABASE:-"dolphinscheduler"}
export DATABASE_TYPE=${DATABASE_TYPE:-"postgresql"}
export DATABASE_DRIVER=${DATABASE_DRIVER:-"org.postgresql.Driver"}
export DATABASE_PARAMS=${DATABASE_PARAMS:-"characterEncoding=utf8"}
#============================================================================
# Common
#============================================================================
export DOLPHINSCHEDULER_ENV_PATH=${DOLPHINSCHEDULER_ENV_PATH:-"/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh"}
export DOLPHINSCHEDULER_DATA_BASEDIR_PATH=${DOLPHINSCHEDULER_DATA_BASEDIR_PATH:-"/tmp/dolphinscheduler"}
export DOLPHINSCHEDULER_OPTS=${DOLPHINSCHEDULER_OPTS:-""}
export RESOURCE_STORAGE_TYPE=${RESOURCE_STORAGE_TYPE:-"HDFS"}
export RESOURCE_UPLOAD_PATH=${RESOURCE_UPLOAD_PATH:-"/dolphinscheduler"}
export FS_DEFAULT_FS=${FS_DEFAULT_FS:-"file:///"}
export FS_S3A_ENDPOINT=${FS_S3A_ENDPOINT:-"s3.xxx.amazonaws.com"}
export FS_S3A_ACCESS_KEY=${FS_S3A_ACCESS_KEY:-"xxxxxxx"}
export FS_S3A_SECRET_KEY=${FS_S3A_SECRET_KEY:-"xxxxxxx"}
#============================================================================
# Zookeeper
#============================================================================
export ZOOKEEPER_QUORUM=${ZOOKEEPER_QUORUM:-"127.0.0.1:2181"}
export ZOOKEEPER_ROOT=${ZOOKEEPER_ROOT:-"/dolphinscheduler"}
#============================================================================
# Master Server
#============================================================================
export MASTER_EXEC_THREADS=${MASTER_EXEC_THREADS:-"100"}
export MASTER_EXEC_TASK_NUM=${MASTER_EXEC_TASK_NUM:-"20"}
export MASTER_HEARTBEAT_INTERVAL=${MASTER_HEARTBEAT_INTERVAL:-"10"}
export MASTER_TASK_COMMIT_RETRYTIMES=${MASTER_TASK_COMMIT_RETRYTIMES:-"5"}
export MASTER_TASK_COMMIT_INTERVAL=${MASTER_TASK_COMMIT_INTERVAL:-"1000"}
export MASTER_MAX_CPULOAD_AVG=${MASTER_MAX_CPULOAD_AVG:-"100"}
export MASTER_RESERVED_MEMORY=${MASTER_RESERVED_MEMORY:-"0.1"}
export MASTER_LISTEN_PORT=${MASTER_LISTEN_PORT:-"5678"}
#============================================================================
# Worker Server
#============================================================================
export WORKER_EXEC_THREADS=${WORKER_EXEC_THREADS:-"100"}
export WORKER_HEARTBEAT_INTERVAL=${WORKER_HEARTBEAT_INTERVAL:-"10"}
export WORKER_MAX_CPULOAD_AVG=${WORKER_MAX_CPULOAD_AVG:-"100"}
export WORKER_RESERVED_MEMORY=${WORKER_RESERVED_MEMORY:-"0.1"}
export WORKER_LISTEN_PORT=${WORKER_LISTEN_PORT:-"1234"}
export WORKER_GROUPS=${WORKER_GROUPS:-"default"}
export WORKER_WEIGHT=${WORKER_WEIGHT:-"100"}
export ALERT_LISTEN_HOST=${ALERT_LISTEN_HOST:-"127.0.0.1"}
#============================================================================
# Alert Server
#============================================================================
export ALERT_PLUGIN_DIR=${ALERT_PLUGIN_DIR:-"lib/plugin/alert"}
echo "generate app config"
ls ${DOLPHINSCHEDULER_HOME}/conf/ | grep ".tpl" | while read line; do
eval "cat << EOF
$(cat ${DOLPHINSCHEDULER_HOME}/conf/${line})
EOF
" > ${DOLPHINSCHEDULER_HOME}/conf/${line%.*}
done
|
closed | apache/dolphinscheduler | https://github.com/apache/dolphinscheduler | 4,886 | [Bug][Docker] Cannot create container for service dolphinscheduler-worker: invalid volume specification in Windows | **For better global communication, Please describe it in English. If you feel the description in English is not clear, then you can append description in Chinese(just for Mandarin(CN)), thx! **
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior, for example:
1. Run `docker-compose` on Windows
2. See error: `Cannot create container for service dolphinscheduler-worker: invalid volume specification`
**Expected behavior**
Bug fixed.
**Screenshots**
If applicable, add screenshots to help explain your problem.
![image](https://user-images.githubusercontent.com/4902714/109300088-0e362880-7871-11eb-9132-f716462a26eb.png)
**Which version of Dolphin Scheduler:**
-[1.3.x]
-[dev]
**Additional context**
Add any other context about the problem here.
**Requirement or improvement**
- Please describe about your requirements or improvement suggestions.
| https://github.com/apache/dolphinscheduler/issues/4886 | https://github.com/apache/dolphinscheduler/pull/4887 | a75793d875a9a7c3394dc8768c014e1d4efa6e7f | b8788f007b1617605e05d70c0ff33c4aa7318c4b | "2021-02-26T12:27:50Z" | java | "2021-02-27T01:45:23Z" | docker/docker-swarm/docker-compose.yml | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version: "3.4"
services:
dolphinscheduler-postgresql:
image: bitnami/postgresql:latest
container_name: dolphinscheduler-postgresql
ports:
- 5432:5432
environment:
TZ: Asia/Shanghai
POSTGRESQL_USERNAME: root
POSTGRESQL_PASSWORD: root
POSTGRESQL_DATABASE: dolphinscheduler
volumes:
- dolphinscheduler-postgresql:/bitnami/postgresql
- dolphinscheduler-postgresql-initdb:/docker-entrypoint-initdb.d
restart: unless-stopped
networks:
- dolphinscheduler
dolphinscheduler-zookeeper:
image: bitnami/zookeeper:latest
container_name: dolphinscheduler-zookeeper
ports:
- 2181:2181
environment:
TZ: Asia/Shanghai
ALLOW_ANONYMOUS_LOGIN: "yes"
ZOO_4LW_COMMANDS_WHITELIST: srvr,ruok,wchs,cons
volumes:
- dolphinscheduler-zookeeper:/bitnami/zookeeper
restart: unless-stopped
networks:
- dolphinscheduler
dolphinscheduler-api:
image: apache/dolphinscheduler:latest
container_name: dolphinscheduler-api
command: api-server
ports:
- 12345:12345
environment:
TZ: Asia/Shanghai
DATABASE_HOST: dolphinscheduler-postgresql
DATABASE_PORT: 5432
DATABASE_USERNAME: root
DATABASE_PASSWORD: root
DATABASE_DATABASE: dolphinscheduler
ZOOKEEPER_QUORUM: dolphinscheduler-zookeeper:2181
RESOURCE_STORAGE_TYPE: HDFS
RESOURCE_UPLOAD_PATH: /dolphinscheduler
FS_DEFAULT_FS: file:///
healthcheck:
test: ["CMD", "/root/checkpoint.sh", "ApiApplicationServer"]
interval: 30s
timeout: 5s
retries: 3
start_period: 30s
depends_on:
- dolphinscheduler-postgresql
- dolphinscheduler-zookeeper
volumes:
- dolphinscheduler-logs:/opt/dolphinscheduler/logs
- dolphinscheduler-resource-local:/dolphinscheduler
restart: unless-stopped
networks:
- dolphinscheduler
dolphinscheduler-alert:
image: apache/dolphinscheduler:latest
container_name: dolphinscheduler-alert
command: alert-server
ports:
- 50052:50052
environment:
TZ: Asia/Shanghai
ALERT_PLUGIN_DIR: lib/plugin/alert
DATABASE_HOST: dolphinscheduler-postgresql
DATABASE_PORT: 5432
DATABASE_USERNAME: root
DATABASE_PASSWORD: root
DATABASE_DATABASE: dolphinscheduler
healthcheck:
test: ["CMD", "/root/checkpoint.sh", "AlertServer"]
interval: 30s
timeout: 5s
retries: 3
start_period: 30s
depends_on:
- dolphinscheduler-postgresql
volumes:
- dolphinscheduler-logs:/opt/dolphinscheduler/logs
restart: unless-stopped
networks:
- dolphinscheduler
dolphinscheduler-master:
image: apache/dolphinscheduler:latest
container_name: dolphinscheduler-master
command: master-server
ports:
- 5678:5678
environment:
TZ: Asia/Shanghai
MASTER_EXEC_THREADS: "100"
MASTER_EXEC_TASK_NUM: "20"
MASTER_HEARTBEAT_INTERVAL: "10"
MASTER_TASK_COMMIT_RETRYTIMES: "5"
MASTER_TASK_COMMIT_INTERVAL: "1000"
MASTER_MAX_CPULOAD_AVG: "100"
MASTER_RESERVED_MEMORY: "0.1"
DATABASE_HOST: dolphinscheduler-postgresql
DATABASE_PORT: 5432
DATABASE_USERNAME: root
DATABASE_PASSWORD: root
DATABASE_DATABASE: dolphinscheduler
ZOOKEEPER_QUORUM: dolphinscheduler-zookeeper:2181
healthcheck:
test: ["CMD", "/root/checkpoint.sh", "MasterServer"]
interval: 30s
timeout: 5s
retries: 3
start_period: 30s
depends_on:
- dolphinscheduler-postgresql
- dolphinscheduler-zookeeper
volumes:
- dolphinscheduler-logs:/opt/dolphinscheduler/logs
restart: unless-stopped
networks:
- dolphinscheduler
dolphinscheduler-worker:
image: apache/dolphinscheduler:latest
container_name: dolphinscheduler-worker
command: worker-server
ports:
- 1234:1234
- 50051:50051
environment:
TZ: Asia/Shanghai
WORKER_EXEC_THREADS: "100"
WORKER_HEARTBEAT_INTERVAL: "10"
WORKER_MAX_CPULOAD_AVG: "100"
WORKER_RESERVED_MEMORY: "0.1"
WORKER_GROUPS: "default"
WORKER_WEIGHT: "100"
DOLPHINSCHEDULER_DATA_BASEDIR_PATH: /tmp/dolphinscheduler
ALERT_LISTEN_HOST: dolphinscheduler-alert
DATABASE_HOST: dolphinscheduler-postgresql
DATABASE_PORT: 5432
DATABASE_USERNAME: root
DATABASE_PASSWORD: root
DATABASE_DATABASE: dolphinscheduler
ZOOKEEPER_QUORUM: dolphinscheduler-zookeeper:2181
RESOURCE_STORAGE_TYPE: HDFS
RESOURCE_UPLOAD_PATH: /dolphinscheduler
FS_DEFAULT_FS: file:///
healthcheck:
test: ["CMD", "/root/checkpoint.sh", "WorkerServer"]
interval: 30s
timeout: 5s
retries: 3
start_period: 30s
depends_on:
- dolphinscheduler-postgresql
- dolphinscheduler-zookeeper
volumes:
- ./dolphinscheduler_env.sh:/opt/dolphinscheduler/conf/env/dolphinscheduler_env.sh
- dolphinscheduler-worker-data:/tmp/dolphinscheduler
- dolphinscheduler-logs:/opt/dolphinscheduler/logs
- dolphinscheduler-resource-local:/dolphinscheduler
restart: unless-stopped
networks:
- dolphinscheduler
networks:
dolphinscheduler:
driver: bridge
volumes:
dolphinscheduler-postgresql:
dolphinscheduler-postgresql-initdb:
dolphinscheduler-zookeeper:
dolphinscheduler-worker-data:
dolphinscheduler-logs:
dolphinscheduler-resource-local: |